|
1 | 1 | "use strict"; |
| 2 | +import { createTextlintMatcher } from "morpheme-match-textlint"; |
| 3 | + |
2 | 4 | const tokenize = require("kuromojin").tokenize; |
3 | | -const createMatchAll = require("morpheme-match-all"); |
4 | 5 | const path = require("path"); |
5 | 6 | const untildify = require("untildify"); |
6 | 7 |
|
@@ -64,54 +65,43 @@ const reporter = (context, options) => { |
64 | 65 | { |
65 | 66 | dictionaryPathList: ["./path/to/dictionary.js", "./path/to/dictionary.json"] |
66 | 67 | } |
67 | | -`) |
| 68 | +`); |
68 | 69 | } |
69 | 70 |
|
70 | 71 | if (!Array.isArray(options.dictionaryPathList)) { |
71 | 72 | throw new Error(`"dictionaryPathList" option should be array. |
72 | 73 | { |
73 | 74 | dictionaryPathList: ["./path/to/dictionary.js", "./path/to/dictionary.json"] |
74 | 75 | } |
75 | | -`) |
| 76 | +`); |
76 | 77 | } |
77 | 78 |
|
78 | 79 | const textlintRcDir = context.getConfigBaseDir() || process.cwd(); |
79 | 80 | const dictionaryList = loadDictionaries(textlintRcDir, options.dictionaryPathList); |
80 | | - const matchAll = createMatchAll(dictionaryList); |
| 81 | + const matchAll = createTextlintMatcher({ |
| 82 | + tokenize: tokenize, |
| 83 | + dictionaries: dictionaryList |
| 84 | + }); |
81 | 85 | return { |
82 | 86 | [Syntax.Str](node) { |
83 | 87 | const text = getSource(node); |
84 | | - return tokenize(text).then(currentTokens => { |
85 | | - /** |
86 | | - * @type {MatchResult[]} |
87 | | - */ |
88 | | - const matchResults = matchAll(currentTokens); |
89 | | - matchResults.forEach(matchResult => { |
90 | | - const firstToken = matchResult.tokens[0]; |
91 | | - const lastToken = matchResult.tokens[matchResult.tokens.length - 1]; |
92 | | - const firstWordIndex = Math.max(firstToken.word_position - 1, 0); |
93 | | - const lastWorkIndex = Math.max(lastToken.word_position - 1, 0); |
94 | | - // replace $1 |
95 | | - const message = replaceWithCaptureTokens(matchResult.dict.message, matchResult.dict.tokens, matchResult.tokens); |
96 | | - const expected = matchResult.dict.expected |
97 | | - ? replaceWithCaptureTokens(matchResult.dict.expected, matchResult.dict.tokens, matchResult.tokens) |
98 | | - : undefined; |
99 | | - if (expected) { |
100 | | - report(node, new RuleError(message, { |
101 | | - index: firstWordIndex, |
102 | | - fix: fixer.replaceTextRange([ |
103 | | - firstWordIndex, lastWorkIndex + lastToken.surface_form.length |
104 | | - ], expected) |
| 88 | + return matchAll(text).then(results => { |
| 89 | + results.forEach(result => { |
| 90 | + if (result.expected) { |
| 91 | + report(node, new RuleError(result.message, { |
| 92 | + index: result.index, |
| 93 | + fix: fixer.replaceTextRange(result.range, result.expected) |
105 | 94 | })); |
106 | 95 | } else { |
107 | | - report(node, new RuleError(message, { |
108 | | - index: firstWordIndex |
| 96 | + report(node, new RuleError(result.message, { |
| 97 | + index: result.index |
109 | 98 | })); |
110 | 99 | } |
| 100 | + |
111 | 101 | }); |
112 | 102 | }); |
113 | 103 | } |
114 | | - } |
| 104 | + }; |
115 | 105 | }; |
116 | 106 | module.exports = { |
117 | 107 | linter: reporter, |
|
0 commit comments