2
* Licensed to the Apache Software Foundation (ASF) under one or more
3
* contributor license agreements. See the NOTICE file distributed with
4
* this work for additional information regarding copyright ownership.
5
* The ASF licenses this file to You under the Apache License, Version 2.0
6
* (the "License"); you may not use this file except in compliance with
7
* the License. You may obtain a copy of the License at
9
* http://www.apache.org/licenses/LICENSE-2.0
11
* Unless required by applicable law or agreed to in writing, software
12
* distributed under the License is distributed on an "AS IS" BASIS,
13
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
* See the License for the specific language governing permissions and
15
* limitations under the License.
18
package org.apache.solr.handler;
20
import org.apache.lucene.analysis.MockTokenizer;
21
import org.apache.lucene.analysis.KeywordTokenizer;
22
import org.apache.lucene.analysis.WhitespaceTokenizer;
23
import org.apache.solr.common.params.AnalysisParams;
24
import org.apache.solr.common.params.CommonParams;
25
import org.apache.solr.common.params.ModifiableSolrParams;
26
import org.apache.solr.common.util.NamedList;
27
import org.apache.solr.client.solrj.request.FieldAnalysisRequest;
28
import org.apache.solr.request.LocalSolrQueryRequest;
29
import org.apache.solr.request.SolrQueryRequest;
30
import org.junit.Before;
31
import org.junit.BeforeClass;
32
import org.junit.Test;
34
import java.util.List;
37
* A test for {@link FieldAnalysisRequestHandler}.
39
* @version $Id: FieldAnalysisRequestHandlerTest.java 1175580 2011-09-25 21:17:25Z rmuir $
42
public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestBase {
44
private FieldAnalysisRequestHandler handler;
48
public void setUp() throws Exception {
50
handler = new FieldAnalysisRequestHandler();
54
public static void beforeClass() throws Exception {
55
initCore("solrconfig.xml", "schema.xml");
59
* Tests the {@link FieldAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)}
62
public void testResolveAnalysisRequest() throws Exception {
63
ModifiableSolrParams params = new ModifiableSolrParams();
64
params.add(AnalysisParams.FIELD_NAME, "text,nametext");
65
params.add(AnalysisParams.FIELD_TYPE, "whitetok,keywordtok");
66
params.add(AnalysisParams.FIELD_VALUE, "the quick red fox jumped over the lazy brown dogs");
67
params.add(CommonParams.Q, "fox brown");
69
SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params);
70
FieldAnalysisRequest request = handler.resolveAnalysisRequest(req);
71
List<String> fieldNames = request.getFieldNames();
72
assertEquals("Expecting 2 field names", 2, fieldNames.size());
73
assertEquals("text", fieldNames.get(0));
74
assertEquals("nametext", fieldNames.get(1));
75
List<String> fieldTypes = request.getFieldTypes();
76
assertEquals("Expecting 2 field types", 2, fieldTypes.size());
77
assertEquals("whitetok", fieldTypes.get(0));
78
assertEquals("keywordtok", fieldTypes.get(1));
79
assertEquals("the quick red fox jumped over the lazy brown dogs", request.getFieldValue());
80
assertEquals("fox brown", request.getQuery());
81
assertFalse(request.isShowMatch());
84
// testing overide of query value using analysis.query param
85
params.add(AnalysisParams.QUERY, "quick lazy");
86
req=new LocalSolrQueryRequest(h.getCore(), params);
87
request = handler.resolveAnalysisRequest(req);
88
assertEquals("quick lazy", request.getQuery());
91
// testing analysis.showmatch param
92
params.add(AnalysisParams.SHOW_MATCH, "false");
93
req=new LocalSolrQueryRequest(h.getCore(), params);
94
request = handler.resolveAnalysisRequest(req);
95
assertFalse(request.isShowMatch());
98
params.set(AnalysisParams.SHOW_MATCH, "true");
99
req=new LocalSolrQueryRequest(h.getCore(), params);
100
request = handler.resolveAnalysisRequest(req);
101
assertTrue(request.isShowMatch());
104
// testing absence of query value
105
params.remove(CommonParams.Q);
106
params.remove(AnalysisParams.QUERY);
107
req=new LocalSolrQueryRequest(h.getCore(), params);
108
request = handler.resolveAnalysisRequest(req);
109
assertNull(request.getQuery());
114
* Tests the {@link FieldAnalysisRequestHandler#handleAnalysisRequest(org.apache.solr.client.solrj.request.FieldAnalysisRequest,
115
* org.apache.solr.schema.IndexSchema)}
118
public void testHandleAnalysisRequest() throws Exception {
120
FieldAnalysisRequest request = new FieldAnalysisRequest();
121
request.addFieldName("whitetok");
122
request.addFieldName("keywordtok");
123
request.addFieldType("text");
124
request.addFieldType("nametext");
125
request.setFieldValue("the quick red fox jumped over the lazy brown dogs");
126
request.setQuery("fox brown");
127
request.setShowMatch(true);
129
NamedList<NamedList> result = handler.handleAnalysisRequest(request, h.getCore().getSchema());
130
assertTrue("result is null and it shouldn't be", result != null);
132
NamedList<NamedList> fieldTypes = result.get("field_types");
133
assertNotNull("field_types should never be null", fieldTypes);
134
NamedList<NamedList> textType = fieldTypes.get("text");
135
assertNotNull("expecting result for field type 'text'", textType);
137
NamedList<List<NamedList>> indexPart = textType.get("index");
138
assertNotNull("expecting an index token analysis for field type 'text'", indexPart);
140
List<NamedList> tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardTokenizer");
141
assertNotNull("Expcting StandardTokenizer analysis breakdown", tokenList);
142
assertEquals(tokenList.size(), 10);
143
assertToken(tokenList.get(0), new TokenInfo("the", null, "<ALPHANUM>", 0, 3, 1, new int[]{1}, null, false));
144
assertToken(tokenList.get(1), new TokenInfo("quick", null, "<ALPHANUM>", 4, 9, 2, new int[]{2}, null, false));
145
assertToken(tokenList.get(2), new TokenInfo("red", null, "<ALPHANUM>", 10, 13, 3, new int[]{3}, null, false));
146
assertToken(tokenList.get(3), new TokenInfo("fox", null, "<ALPHANUM>", 14, 17, 4, new int[]{4}, null, true));
147
assertToken(tokenList.get(4), new TokenInfo("jumped", null, "<ALPHANUM>", 18, 24, 5, new int[]{5}, null, false));
148
assertToken(tokenList.get(5), new TokenInfo("over", null, "<ALPHANUM>", 25, 29, 6, new int[]{6}, null, false));
149
assertToken(tokenList.get(6), new TokenInfo("the", null, "<ALPHANUM>", 30, 33, 7, new int[]{7}, null, false));
150
assertToken(tokenList.get(7), new TokenInfo("lazy", null, "<ALPHANUM>", 34, 38, 8, new int[]{8}, null, false));
151
assertToken(tokenList.get(8), new TokenInfo("brown", null, "<ALPHANUM>", 39, 44, 9, new int[]{9}, null, true));
152
assertToken(tokenList.get(9), new TokenInfo("dogs", null, "<ALPHANUM>", 45, 49, 10, new int[]{10}, null, false));
153
tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardFilter");
154
assertNotNull("Expcting StandardFilter analysis breakdown", tokenList);
155
assertEquals(tokenList.size(), 10);
156
assertToken(tokenList.get(0), new TokenInfo("the", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1}, null, false));
157
assertToken(tokenList.get(1), new TokenInfo("quick", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2}, null, false));
158
assertToken(tokenList.get(2), new TokenInfo("red", null, "<ALPHANUM>", 10, 13, 3, new int[]{3,3}, null, false));
159
assertToken(tokenList.get(3), new TokenInfo("fox", null, "<ALPHANUM>", 14, 17, 4, new int[]{4,4}, null, true));
160
assertToken(tokenList.get(4), new TokenInfo("jumped", null, "<ALPHANUM>", 18, 24, 5, new int[]{5,5}, null, false));
161
assertToken(tokenList.get(5), new TokenInfo("over", null, "<ALPHANUM>", 25, 29, 6, new int[]{6,6}, null, false));
162
assertToken(tokenList.get(6), new TokenInfo("the", null, "<ALPHANUM>", 30, 33, 7, new int[]{7,7}, null, false));
163
assertToken(tokenList.get(7), new TokenInfo("lazy", null, "<ALPHANUM>", 34, 38, 8, new int[]{8,8}, null, false));
164
assertToken(tokenList.get(8), new TokenInfo("brown", null, "<ALPHANUM>", 39, 44, 9, new int[]{9,9}, null, true));
165
assertToken(tokenList.get(9), new TokenInfo("dogs", null, "<ALPHANUM>", 45, 49, 10, new int[]{10,10}, null, false));
166
tokenList = indexPart.get("org.apache.lucene.analysis.LowerCaseFilter");
167
assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList);
168
assertEquals(tokenList.size(), 10);
169
assertToken(tokenList.get(0), new TokenInfo("the", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1,1}, null, false));
170
assertToken(tokenList.get(1), new TokenInfo("quick", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2,2}, null, false));
171
assertToken(tokenList.get(2), new TokenInfo("red", null, "<ALPHANUM>", 10, 13, 3, new int[]{3,3,3}, null, false));
172
assertToken(tokenList.get(3), new TokenInfo("fox", null, "<ALPHANUM>", 14, 17, 4, new int[]{4,4,4}, null, true));
173
assertToken(tokenList.get(4), new TokenInfo("jumped", null, "<ALPHANUM>", 18, 24, 5, new int[]{5,5,5}, null, false));
174
assertToken(tokenList.get(5), new TokenInfo("over", null, "<ALPHANUM>", 25, 29, 6, new int[]{6,6,6}, null, false));
175
assertToken(tokenList.get(6), new TokenInfo("the", null, "<ALPHANUM>", 30, 33, 7, new int[]{7,7,7}, null, false));
176
assertToken(tokenList.get(7), new TokenInfo("lazy", null, "<ALPHANUM>", 34, 38, 8, new int[]{8,8,8}, null, false));
177
assertToken(tokenList.get(8), new TokenInfo("brown", null, "<ALPHANUM>", 39, 44, 9, new int[]{9,9,9}, null, true));
178
assertToken(tokenList.get(9), new TokenInfo("dogs", null, "<ALPHANUM>", 45, 49, 10, new int[]{10,10,10}, null, false));
179
tokenList = indexPart.get("org.apache.lucene.analysis.StopFilter");
180
assertNotNull("Expcting StopFilter analysis breakdown", tokenList);
181
assertEquals(tokenList.size(), 8);
182
assertToken(tokenList.get(0), new TokenInfo("quick", null, "<ALPHANUM>", 4, 9, 1, new int[]{2,2,2,1}, null, false));
183
assertToken(tokenList.get(1), new TokenInfo("red", null, "<ALPHANUM>", 10, 13, 2, new int[]{3,3,3,2}, null, false));
184
assertToken(tokenList.get(2), new TokenInfo("fox", null, "<ALPHANUM>", 14, 17, 3, new int[]{4,4,4,3}, null, true));
185
assertToken(tokenList.get(3), new TokenInfo("jumped", null, "<ALPHANUM>", 18, 24, 4, new int[]{5,5,5,4}, null, false));
186
assertToken(tokenList.get(4), new TokenInfo("over", null, "<ALPHANUM>", 25, 29, 5, new int[]{6,6,6,5}, null, false));
187
assertToken(tokenList.get(5), new TokenInfo("lazy", null, "<ALPHANUM>", 34, 38, 6, new int[]{8,8,8,6}, null, false));
188
assertToken(tokenList.get(6), new TokenInfo("brown", null, "<ALPHANUM>", 39, 44, 7, new int[]{9,9,9,7}, null, true));
189
assertToken(tokenList.get(7), new TokenInfo("dogs", null, "<ALPHANUM>", 45, 49, 8, new int[]{10,10,10,8}, null, false));
190
tokenList = indexPart.get("org.apache.lucene.analysis.PorterStemFilter");
191
assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList);
192
assertEquals(tokenList.size(), 8);
193
assertToken(tokenList.get(0), new TokenInfo("quick", null, "<ALPHANUM>", 4, 9, 1, new int[]{2,2,2,1,1}, null, false));
194
assertToken(tokenList.get(1), new TokenInfo("red", null, "<ALPHANUM>", 10, 13, 2, new int[]{3,3,3,2,2}, null, false));
195
assertToken(tokenList.get(2), new TokenInfo("fox", null, "<ALPHANUM>", 14, 17, 3, new int[]{4,4,4,3,3}, null, true));
196
assertToken(tokenList.get(3), new TokenInfo("jump", null, "<ALPHANUM>", 18, 24, 4, new int[]{5,5,5,4,4}, null, false));
197
assertToken(tokenList.get(4), new TokenInfo("over", null, "<ALPHANUM>", 25, 29, 5, new int[]{6,6,6,5,5}, null, false));
198
assertToken(tokenList.get(5), new TokenInfo("lazi", null, "<ALPHANUM>", 34, 38, 6, new int[]{8,8,8,6,6}, null, false));
199
assertToken(tokenList.get(6), new TokenInfo("brown", null, "<ALPHANUM>", 39, 44, 7, new int[]{9,9,9,7,7}, null, true));
200
assertToken(tokenList.get(7), new TokenInfo("dog", null, "<ALPHANUM>", 45, 49, 8, new int[]{10,10,10,8,8}, null, false));
202
NamedList<List<NamedList>> queryPart = textType.get("query");
203
assertNotNull("expecting a query token analysis for field type 'text'", queryPart);
205
tokenList = queryPart.get("org.apache.lucene.analysis.standard.StandardTokenizer");
206
assertNotNull("Expecting StandardTokenizer analysis breakdown", tokenList);
207
assertEquals("Expecting StandardTokenizer to produce 2 tokens from '" + request.getQuery() + "'", 2, tokenList.size());
208
assertToken(tokenList.get(0), new TokenInfo("fox", null, "<ALPHANUM>", 0, 3, 1, new int[]{1}, null, false));
209
assertToken(tokenList.get(1), new TokenInfo("brown", null, "<ALPHANUM>", 4, 9, 2, new int[]{2}, null, false));
210
tokenList = queryPart.get("org.apache.lucene.analysis.standard.StandardFilter");
211
assertNotNull("Expcting StandardFilter analysis breakdown", tokenList);
212
assertEquals(2, tokenList.size());
213
assertToken(tokenList.get(0), new TokenInfo("fox", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1}, null, false));
214
assertToken(tokenList.get(1), new TokenInfo("brown", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2}, null, false));
215
tokenList = queryPart.get("org.apache.lucene.analysis.LowerCaseFilter");
216
assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList);
217
assertEquals(2, tokenList.size());
218
assertToken(tokenList.get(0), new TokenInfo("fox", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1,1}, null, false));
219
assertToken(tokenList.get(1), new TokenInfo("brown", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2,2}, null, false));
220
tokenList = queryPart.get("org.apache.lucene.analysis.StopFilter");
221
assertNotNull("Expcting StopFilter analysis breakdown", tokenList);
222
assertEquals(2, tokenList.size());
223
assertToken(tokenList.get(0), new TokenInfo("fox", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1,1,1}, null, false));
224
assertToken(tokenList.get(1), new TokenInfo("brown", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2,2,2}, null, false));
225
tokenList = queryPart.get("org.apache.lucene.analysis.PorterStemFilter");
226
assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList);
227
assertEquals(2, tokenList.size());
228
assertToken(tokenList.get(0), new TokenInfo("fox", null, "<ALPHANUM>", 0, 3, 1, new int[]{1,1,1,1,1}, null, false));
229
assertToken(tokenList.get(1), new TokenInfo("brown", null, "<ALPHANUM>", 4, 9, 2, new int[]{2,2,2,2,2}, null, false));
231
NamedList<NamedList> nameTextType = fieldTypes.get("nametext");
232
assertNotNull("expecting result for field type 'nametext'", nameTextType);
234
indexPart = nameTextType.get("index");
235
assertNotNull("expecting an index token analysis for field type 'nametext'", indexPart);
237
tokenList = indexPart.get("org.apache.lucene.analysis.WhitespaceTokenizer");
238
assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList);
239
assertEquals(10, tokenList.size());
240
assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false));
241
assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false));
242
assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false));
243
assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true));
244
assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false));
245
assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false));
246
assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false));
247
assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false));
248
assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true));
249
assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false));
251
queryPart = nameTextType.get("query");
252
assertNotNull("expecting a query token analysis for field type 'nametext'", queryPart);
253
tokenList = queryPart.get(WhitespaceTokenizer.class.getName());
254
assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false));
255
assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false));
257
NamedList<NamedList> fieldNames = result.get("field_names");
258
assertNotNull("field_nameds should never be null", fieldNames);
260
NamedList<NamedList> whitetok = fieldNames.get("whitetok");
261
assertNotNull("expecting result for field 'whitetok'", whitetok);
263
indexPart = whitetok.get("index");
264
assertNotNull("expecting an index token analysis for field 'whitetok'", indexPart);
265
assertEquals("expecting only MockTokenizer to be applied", 1, indexPart.size());
266
tokenList = indexPart.get(MockTokenizer.class.getName());
267
assertNotNull("expecting only MockTokenizer to be applied", tokenList);
268
assertEquals("expecting MockTokenizer to produce 10 tokens", 10, tokenList.size());
269
assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false));
270
assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false));
271
assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false));
272
assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true));
273
assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false));
274
assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false));
275
assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false));
276
assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false));
277
assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true));
278
assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false));
280
queryPart = whitetok.get("query");
281
assertNotNull("expecting a query token analysis for field 'whitetok'", queryPart);
282
assertEquals("expecting only MockTokenizer to be applied", 1, queryPart.size());
283
tokenList = queryPart.get(MockTokenizer.class.getName());
284
assertNotNull("expecting only MockTokenizer to be applied", tokenList);
285
assertEquals("expecting MockTokenizer to produce 2 tokens", 2, tokenList.size());
286
assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false));
287
assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false));
289
NamedList<NamedList> keywordtok = fieldNames.get("keywordtok");
290
assertNotNull("expecting result for field 'keywordtok'", keywordtok);
292
indexPart = keywordtok.get("index");
293
assertNotNull("expecting an index token analysis for field 'keywordtok'", indexPart);
294
assertEquals("expecting only MockTokenizer to be applied", 1, indexPart.size());
295
tokenList = indexPart.get(MockTokenizer.class.getName());
296
assertNotNull("expecting only MockTokenizer to be applied", tokenList);
297
assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size());
298
assertToken(tokenList.get(0), new TokenInfo("the quick red fox jumped over the lazy brown dogs", null, "word", 0, 49, 1, new int[]{1}, null, false));
300
queryPart = keywordtok.get("query");
301
assertNotNull("expecting a query token analysis for field 'keywordtok'", queryPart);
302
assertEquals("expecting only MockTokenizer to be applied", 1, queryPart.size());
303
tokenList = queryPart.get(MockTokenizer.class.getName());
304
assertNotNull("expecting only MockTokenizer to be applied", tokenList);
305
assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size());
306
assertToken(tokenList.get(0), new TokenInfo("fox brown", null, "word", 0, 9, 1, new int[]{1}, null, false));
311
public void testCharFilterAnalysis() throws Exception {
313
FieldAnalysisRequest request = new FieldAnalysisRequest();
314
request.addFieldType("charfilthtmlmap");
315
request.setFieldValue("<html><body>whátëvêr</body></html>");
316
request.setShowMatch(false);
318
NamedList<NamedList> result = handler.handleAnalysisRequest(request, h.getCore().getSchema());
319
assertTrue("result is null and it shouldn't be", result != null);
321
NamedList<NamedList> fieldTypes = result.get("field_types");
322
assertNotNull("field_types should never be null", fieldTypes);
323
NamedList<NamedList> textType = fieldTypes.get("charfilthtmlmap");
324
assertNotNull("expecting result for field type 'charfilthtmlmap'", textType);
326
NamedList indexPart = textType.get("index");
327
assertNotNull("expecting an index token analysis for field type 'charfilthtmlmap'", indexPart);
329
assertEquals(" whátëvêr ", indexPart.get("org.apache.solr.analysis.HTMLStripCharFilter"));
330
assertEquals(" whatever ", indexPart.get("org.apache.lucene.analysis.MappingCharFilter"));
332
List<NamedList> tokenList = (List<NamedList>)indexPart.get(MockTokenizer.class.getName());
333
assertNotNull("Expecting MockTokenizer analysis breakdown", tokenList);
334
assertEquals(tokenList.size(), 1);
335
assertToken(tokenList.get(0), new TokenInfo("whatever", null, "word", 12, 20, 1, new int[]{1}, null, false));
339
public void testPositionHistoryWithWDF() throws Exception {
341
FieldAnalysisRequest request = new FieldAnalysisRequest();
342
request.addFieldType("skutype1");
343
request.setFieldValue("hi, 3456-12 a Test");
344
request.setShowMatch(false);
346
NamedList<NamedList> result = handler.handleAnalysisRequest(request, h.getCore().getSchema());
347
assertTrue("result is null and it shouldn't be", result != null);
349
NamedList<NamedList> fieldTypes = result.get("field_types");
350
assertNotNull("field_types should never be null", fieldTypes);
351
NamedList<NamedList> textType = fieldTypes.get("skutype1");
352
assertNotNull("expecting result for field type 'skutype1'", textType);
354
NamedList<List<NamedList>> indexPart = textType.get("index");
355
assertNotNull("expecting an index token analysis for field type 'skutype1'", indexPart);
357
List<NamedList> tokenList = indexPart.get(MockTokenizer.class.getName());
358
assertNotNull("Expcting MockTokenizer analysis breakdown", tokenList);
359
assertEquals(4, tokenList.size());
360
assertToken(tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, new int[]{1}, null, false));
361
assertToken(tokenList.get(1), new TokenInfo("3456-12", null, "word", 4, 11, 2, new int[]{2}, null, false));
362
assertToken(tokenList.get(2), new TokenInfo("a", null, "word", 12, 13, 3, new int[]{3}, null, false));
363
assertToken(tokenList.get(3), new TokenInfo("Test", null, "word", 14, 18, 4, new int[]{4}, null, false));
364
tokenList = indexPart.get("org.apache.solr.analysis.WordDelimiterFilter");
365
assertNotNull("Expcting WordDelimiterFilter analysis breakdown", tokenList);
366
assertEquals(6, tokenList.size());
367
assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1}, null, false));
368
assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2}, null, false));
369
assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3}, null, false));
370
assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, new int[]{2,3}, null, false));
371
assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4}, null, false));
372
assertToken(tokenList.get(5), new TokenInfo("Test", null, "word", 14, 18, 5, new int[]{4,5}, null, false));
373
tokenList = indexPart.get("org.apache.lucene.analysis.LowerCaseFilter");
374
assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList);
375
assertEquals(6, tokenList.size());
376
assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1,1}, null, false));
377
assertToken(tokenList.get(1), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2,2}, null, false));
378
assertToken(tokenList.get(2), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3,3}, null, false));
379
assertToken(tokenList.get(3), new TokenInfo("345612", null, "word", 4, 11, 3, new int[]{2,3,3}, null, false));
380
assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4,4}, null, false));
381
assertToken(tokenList.get(5), new TokenInfo("test", null, "word", 14, 18, 5, new int[]{4,5,5}, null, false));