Skip to content

Commit 26474de

Browse files
Fix bigint generation
1 parent 0b277b4 commit 26474de

File tree

3 files changed

+5
-20
lines changed

3 files changed

+5
-20
lines changed

src/nearley/lexer.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,8 @@ export const lexer = moo.compile({
1818

1919
// Numbers
2020
complex: /(?:\d+\.?\d*|\.\d+)[jJ]/,
21-
bigint: /\d+[nN]/,
21+
bigint: /\d+/,
2222
float: /(?:\d+\.\d*|\.\d+)(?:[eE][+-]?\d+)?/,
23-
integer: /\d+/,
2423

2524
// Strings (simplified - doesn't handle all edge cases yet)
2625
stringTripleDouble: /"""(?:[^"\\]|\\["\\/bfnrt]|\\u[a-fA-F0-9]{4})*?"""/,

src/nearley/python-grammar.ts

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ import { TokenType } from '../tokens';
99

1010
const tokenTypeMap: { [key: string]: TokenType } = {
1111
'identifier': TokenType.NAME,
12-
'integer': TokenType.NUMBER,
1312
'float': TokenType.NUMBER,
1413
'bigint': TokenType.BIGINT,
1514
'complex': TokenType.COMPLEX,
@@ -86,7 +85,7 @@ function toAstToken(token: any): AstToken {
8685
return new AstToken(
8786
type,
8887
token.value,
89-
token.line || 0,
88+
token.line - 1 || 0,
9089
token.col || 0,
9190
token.offset || 0
9291
);
@@ -454,12 +453,6 @@ let ParserRules = [
454453
return new ExprNS.Variable(token, token, token);
455454
}
456455
},
457-
{"name": "atom", "symbols": [(pythonLexer.has("integer") ? {type: "integer"} : integer)], "postprocess":
458-
(d) => {
459-
const token = toAstToken(d[0]);
460-
return new ExprNS.Literal(token, token, parseInt(token.lexeme));
461-
}
462-
},
463456
{"name": "atom", "symbols": [(pythonLexer.has("float") ? {type: "float"} : float)], "postprocess":
464457
(d) => {
465458
const token = toAstToken(d[0]);
@@ -469,7 +462,7 @@ let ParserRules = [
469462
{"name": "atom", "symbols": [(pythonLexer.has("bigint") ? {type: "bigint"} : bigint)], "postprocess":
470463
(d) => {
471464
const token = toAstToken(d[0]);
472-
return new ExprNS.BigIntLiteral(token, token, token.lexeme.slice(0, -1));
465+
return new ExprNS.BigIntLiteral(token, token, token.lexeme);
473466
}
474467
},
475468
{"name": "atom", "symbols": [(pythonLexer.has("complex") ? {type: "complex"} : complex)], "postprocess":

src/nearley/python.ne

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ import { TokenType } from '../tokens';
1111

1212
const tokenTypeMap: { [key: string]: TokenType } = {
1313
'identifier': TokenType.NAME,
14-
'integer': TokenType.NUMBER,
1514
'float': TokenType.NUMBER,
1615
'bigint': TokenType.BIGINT,
1716
'complex': TokenType.COMPLEX,
@@ -88,7 +87,7 @@ function toAstToken(token: any): AstToken {
8887
return new AstToken(
8988
type,
9089
token.value,
91-
token.line || 0,
90+
token.line - 1 || 0,
9291
token.col || 0,
9392
token.offset || 0
9493
);
@@ -551,12 +550,6 @@ atom ->
551550
return new ExprNS.Variable(token, token, token);
552551
}
553552
%}
554-
| %integer {%
555-
(d) => {
556-
const token = toAstToken(d[0]);
557-
return new ExprNS.Literal(token, token, parseInt(token.lexeme));
558-
}
559-
%}
560553
| %float {%
561554
(d) => {
562555
const token = toAstToken(d[0]);
@@ -566,7 +559,7 @@ atom ->
566559
| %bigint {%
567560
(d) => {
568561
const token = toAstToken(d[0]);
569-
return new ExprNS.BigIntLiteral(token, token, token.lexeme.slice(0, -1));
562+
return new ExprNS.BigIntLiteral(token, token, token.lexeme);
570563
}
571564
%}
572565
| %complex {%

0 commit comments

Comments
 (0)