@@ -4,7 +4,7 @@ import { match } from "ts-pattern";
44import type { functions } from "./functions" ;
55
66/**
7- * Represents an error where the tokeniser couldn't match the input to any token.
7+ * Represents an error where the tokenizer couldn't match the input to any token.
88 * The `idx` field points to the start of the unknown part in the input.
99 */
1010export type LexicalError = { type : "UNKNOWN_TOKEN" ; idx : number } ;
@@ -124,17 +124,17 @@ const tokenMatchers = [
124124
125125/**
126126 * Reads an input expression and returns a `Result<Token[], LexicalError>` where
127- * - `Token[]` is the tokenised expression, or
127+ * - `Token[]` is the tokenized expression, or
128128 * - `LexicalError.idx` is the starting index of the *first lexical error* (i.e. unrecognised word) in the input expression.
129129 *
130130 * @see {@link Token }
131131 * @example
132132 * ```typescript
133- * tokenise ("1 + 2") // => Ok([{ type: "litr", value: Decimal(1) }, { type: "oper", name: "+" }, ...])
134- * tokenise ("1 ö 2") // => Err({ type: "UNKNOWN_TOKEN", idx: 2 }) // 2 === "1 ö 2".indexOf("ö")
133+ * tokenize ("1 + 2") // => Ok([{ type: "litr", value: Decimal(1) }, { type: "oper", name: "+" }, ...])
134+ * tokenize ("1 ö 2") // => Err({ type: "UNKNOWN_TOKEN", idx: 2 }) // 2 === "1 ö 2".indexOf("ö")
135135 * ```
136136 */
137- export default function tokenise (
137+ export default function tokenize (
138138 expression : string ,
139139) : Result < Token [ ] , LexicalError > {
140140 return Result . combine ( [ ...tokens ( expression ) ] ) ;
@@ -149,7 +149,7 @@ export default function tokenise(
149149 * The generator stops on the first lexical error.
150150 * I.e. if an error is encountered, it will be the last value output by the generator.
151151 *
152- * @see {@link tokenise }
152+ * @see {@link tokenize }
153153 * @see {@link Token }
154154 */
155155function * tokens (
0 commit comments