Skip to content

Commit f3a3ec0

Browse files
authored
fix: re-assign tokenizer.lexer and renderer.parser at start of each parse call (#3907)
1 parent 4625980 commit f3a3ec0

File tree

3 files changed

+27
-0
lines changed

3 files changed

+27
-0
lines changed

src/Lexer.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ export class _Lexer<ParserOutput = string, RendererOutput = string> {
105105
blockTokens(src: string, tokens?: Token[], lastParagraphClipped?: boolean): Token[];
106106
blockTokens(src: string, tokens?: TokensList, lastParagraphClipped?: boolean): TokensList;
107107
blockTokens(src: string, tokens: Token[] = [], lastParagraphClipped = false) {
108+
this.tokenizer.lexer = this;
108109
if (this.options.pedantic) {
109110
src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, '');
110111
}
@@ -297,6 +298,7 @@ export class _Lexer<ParserOutput = string, RendererOutput = string> {
297298
* Lexing/Compiling
298299
*/
299300
inlineTokens(src: string, tokens: Token[] = []): Token[] {
301+
this.tokenizer.lexer = this;
300302
// String with links masked to avoid interference with em and strong
301303
let maskedSrc = src;
302304
let match: RegExpExecArray | null = null;

src/Parser.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ export class _Parser<ParserOutput = string, RendererOutput = string> {
4040
* Parse Loop
4141
*/
4242
parse(tokens: Token[]): ParserOutput {
43+
this.renderer.parser = this;
4344
let out = '';
4445

4546
for (let i = 0; i < tokens.length; i++) {
@@ -126,6 +127,7 @@ export class _Parser<ParserOutput = string, RendererOutput = string> {
126127
* Parse Inline Tokens
127128
*/
128129
parseInline(tokens: Token[], renderer: _Renderer<ParserOutput, RendererOutput> | _TextRenderer<RendererOutput> = this.renderer): ParserOutput {
130+
this.renderer.parser = this;
129131
let out = '';
130132

131133
for (let i = 0; i < tokens.length; i++) {

test/unit/Lexer.test.js

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1929,4 +1929,27 @@ paragraph
19291929
});
19301930
});
19311931
});
1932+
1933+
describe('multiple instances', () => {
1934+
it('should produce identical tokens when multiple Lexer instances lex the same input (issue #3854)', () => {
1935+
// Create the first instance and lex — this may mutate shared _defaults.tokenizer.lexer
1936+
const lexer1 = new Lexer();
1937+
const tokensA = lexer1.lex('Test');
1938+
1939+
// Create a second instance after the first — previously tokensB.tokens would be empty
1940+
// because lexer1's constructor overwrote _defaults.tokenizer.lexer to point to lexer1
1941+
const lexer2 = new Lexer();
1942+
const tokensB = lexer2.lex('Test');
1943+
1944+
assert.deepEqual(tokensA, tokensB);
1945+
});
1946+
1947+
it('should produce identical tokens using static lex and instance lex (issue #3854)', () => {
1948+
const lexer = new Lexer();
1949+
const tokensA = Lexer.lex('Test');
1950+
const tokensB = lexer.lex('Test');
1951+
1952+
assert.deepEqual(tokensA, tokensB);
1953+
});
1954+
});
19321955
});

0 commit comments

Comments
 (0)