@@ -17,14 +17,10 @@ import { type TokenKindEnum, TokenKind } from './tokenKind';
17
17
* EOF, after which the lexer will repeatedly return the same EOF token
18
18
* whenever called.
19
19
*/
20
- export function createLexer < TOptions > (
21
- source : Source ,
22
- options : TOptions ,
23
- ) : Lexer < TOptions > {
20
+ export function createLexer ( source : Source ) : Lexer {
24
21
const startOfFileToken = new Tok ( TokenKind . SOF , 0 , 0 , 0 , 0 , null ) ;
25
- const lexer : Lexer < TOptions > = {
22
+ const lexer : Lexer = {
26
23
source,
27
- options,
28
24
lastToken : startOfFileToken ,
29
25
token : startOfFileToken ,
30
26
line : 1 ,
@@ -55,9 +51,8 @@ function lookahead() {
55
51
/**
56
52
* The return type of createLexer.
57
53
*/
58
- export type Lexer < TOptions > = {
54
+ export type Lexer = {
59
55
source : Source ,
60
- options : TOptions ,
61
56
62
57
/**
63
58
* The previously focused non-ignored token.
@@ -167,7 +162,7 @@ function printCharCode(code) {
167
162
* punctuators immediately or calls the appropriate helper function for more
168
163
* complicated tokens.
169
164
*/
170
- function readToken ( lexer : Lexer < mixed > , prev : Token ) : Token {
165
+ function readToken ( lexer : Lexer , prev : Token ) : Token {
171
166
const source = lexer . source ;
172
167
const body = source . body ;
173
168
const bodyLength = body . length ;
@@ -334,7 +329,7 @@ function unexpectedCharacterMessage(code) {
334
329
function positionAfterWhitespace(
335
330
body: string,
336
331
startPosition: number,
337
- lexer: Lexer<mixed> ,
332
+ lexer: Lexer,
338
333
): number {
339
334
const bodyLength = body.length;
340
335
let position = startPosition;
0 commit comments