@@ -214,88 +214,88 @@ Which responds with:
214
214
[source,console-result]
215
215
--------------------------------------------------
216
216
{
217
- "detail": {
218
- "custom_analyzer": true,
219
- "charfilters": [],
220
- "tokenizer": {
221
- "name": "nori_tokenizer",
222
- "tokens": [
223
- {
224
- "token": "뿌리",
225
- "start_offset": 0,
226
- "end_offset": 2,
227
- "type": "word",
228
- "position": 0,
229
- "leftPOS": "NNG(General Noun)",
230
- "morphemes": null,
231
- "posType": "MORPHEME",
232
- "reading": null,
233
- "rightPOS": "NNG(General Noun)"
234
- },
235
- {
236
- "token": "가",
237
- "start_offset": 2,
238
- "end_offset": 3,
239
- "type": "word",
240
- "position": 1,
241
- "leftPOS": "J(Ending Particle)",
242
- "morphemes": null,
243
- "posType": "MORPHEME",
244
- "reading": null,
245
- "rightPOS": "J(Ending Particle)"
246
- },
247
- {
248
- "token": "깊",
249
- "start_offset": 4,
250
- "end_offset": 5,
251
- "type": "word",
252
- "position": 2,
253
- "leftPOS": "VA(Adjective)",
254
- "morphemes": null,
255
- "posType": "MORPHEME",
256
- "reading": null,
257
- "rightPOS": "VA(Adjective)"
258
- },
259
- {
260
- "token": "은",
261
- "start_offset": 5,
262
- "end_offset": 6,
263
- "type": "word",
264
- "position": 3,
265
- "leftPOS": "E(Verbal endings)",
266
- "morphemes": null,
267
- "posType": "MORPHEME",
268
- "reading": null,
269
- "rightPOS": "E(Verbal endings)"
270
- },
271
- {
272
- "token": "나무",
273
- "start_offset": 7,
274
- "end_offset": 9,
275
- "type": "word",
276
- "position": 4,
277
- "leftPOS": "NNG(General Noun)",
278
- "morphemes": null,
279
- "posType": "MORPHEME",
280
- "reading": null,
281
- "rightPOS": "NNG(General Noun)"
282
- },
283
- {
284
- "token": "는",
285
- "start_offset": 9,
286
- "end_offset": 10,
287
- "type": "word",
288
- "position": 5,
289
- "leftPOS": "J(Ending Particle)",
290
- "morphemes": null,
291
- "posType": "MORPHEME",
292
- "reading": null,
293
- "rightPOS": "J(Ending Particle)"
294
- }
295
- ]
217
+ "detail": {
218
+ "custom_analyzer": true,
219
+ "charfilters": [],
220
+ "tokenizer": {
221
+ "name": "nori_tokenizer",
222
+ "tokens": [
223
+ {
224
+ "token": "뿌리",
225
+ "start_offset": 0,
226
+ "end_offset": 2,
227
+ "type": "word",
228
+ "position": 0,
229
+ "leftPOS": "NNG(General Noun)",
230
+ "morphemes": null,
231
+ "posType": "MORPHEME",
232
+ "reading": null,
233
+ "rightPOS": "NNG(General Noun)"
296
234
},
297
- "tokenfilters": []
298
- }
235
+ {
236
+ "token": "가",
237
+ "start_offset": 2,
238
+ "end_offset": 3,
239
+ "type": "word",
240
+ "position": 1,
241
+ "leftPOS": "J(Ending Particle)",
242
+ "morphemes": null,
243
+ "posType": "MORPHEME",
244
+ "reading": null,
245
+ "rightPOS": "J(Ending Particle)"
246
+ },
247
+ {
248
+ "token": "깊",
249
+ "start_offset": 4,
250
+ "end_offset": 5,
251
+ "type": "word",
252
+ "position": 2,
253
+ "leftPOS": "VA(Adjective)",
254
+ "morphemes": null,
255
+ "posType": "MORPHEME",
256
+ "reading": null,
257
+ "rightPOS": "VA(Adjective)"
258
+ },
259
+ {
260
+ "token": "은",
261
+ "start_offset": 5,
262
+ "end_offset": 6,
263
+ "type": "word",
264
+ "position": 3,
265
+ "leftPOS": "E(Verbal endings)",
266
+ "morphemes": null,
267
+ "posType": "MORPHEME",
268
+ "reading": null,
269
+ "rightPOS": "E(Verbal endings)"
270
+ },
271
+ {
272
+ "token": "나무",
273
+ "start_offset": 7,
274
+ "end_offset": 9,
275
+ "type": "word",
276
+ "position": 4,
277
+ "leftPOS": "NNG(General Noun)",
278
+ "morphemes": null,
279
+ "posType": "MORPHEME",
280
+ "reading": null,
281
+ "rightPOS": "NNG(General Noun)"
282
+ },
283
+ {
284
+ "token": "는",
285
+ "start_offset": 9,
286
+ "end_offset": 10,
287
+ "type": "word",
288
+ "position": 5,
289
+ "leftPOS": "J(Ending Particle)",
290
+ "morphemes": null,
291
+ "posType": "MORPHEME",
292
+ "reading": null,
293
+ "rightPOS": "J(Ending Particle)"
294
+ }
295
+ ]
296
+ },
297
+ "tokenfilters": []
298
+ }
299
299
}
300
300
--------------------------------------------------
301
301
@@ -400,18 +400,18 @@ The `nori_readingform` token filter rewrites tokens written in Hanja to their Ha
400
400
--------------------------------------------------
401
401
PUT nori_sample
402
402
{
403
- "settings": {
404
- "index":{
405
- "analysis":{
406
- "analyzer" : {
407
- "my_analyzer" : {
408
- "tokenizer" : "nori_tokenizer",
409
- "filter" : ["nori_readingform"]
410
- }
411
- }
412
- }
403
+ "settings": {
404
+ "index": {
405
+ "analysis": {
406
+ "analyzer": {
407
+ "my_analyzer": {
408
+ "tokenizer": "nori_tokenizer",
409
+ "filter": [ "nori_readingform" ]
410
+ }
413
411
}
412
+ }
414
413
}
414
+ }
415
415
}
416
416
417
417
GET nori_sample/_analyze
0 commit comments