Skip to content

Commit b0c99b4

Browse files
authored
fix(message-compiler): linked modifier breaks with parenthesis (#1506)
* fix(message-compiler): modifier breaks with parenthesis * update * update lock file * drop rollup cjs hack * update deps
1 parent daf9603 commit b0c99b4

File tree

6 files changed

+943
-2032
lines changed

6 files changed

+943
-2032
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@
8484
"@types/node": "^18.16.18",
8585
"@typescript-eslint/eslint-plugin": "^6.0.0",
8686
"@typescript-eslint/parser": "^6.0.0",
87-
"@vitest/coverage-c8": "^0.33.0",
87+
"@vitest/coverage-v8": "^0.34.3",
8888
"api-docs-gen": "^0.4.0",
8989
"benchmark": "^2.1.4",
9090
"brotli": "^1.3.2",

packages/message-compiler/src/tokenizer.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ const enum TokenChars {
3535
}
3636

3737
const EOF = undefined
38+
const DOT = '.'
3839
const LITERAL_DELIMITER = "'"
3940
export const ERROR_DOMAIN = 'tokenizer'
4041

@@ -659,10 +660,12 @@ export function createTokenizer(
659660
return buf
660661
} else if (ch === SPACE) {
661662
return buf
662-
} else if (ch === NEW_LINE) {
663+
} else if (ch === NEW_LINE || ch === DOT) {
663664
buf += ch
664665
scnr.next()
665666
return fn(detect, buf)
667+
} else if (!isIdentifierStart(ch)) {
668+
return buf
666669
} else {
667670
buf += ch
668671
scnr.next()

packages/message-compiler/test/tokenizer/linked.test.ts

Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -553,6 +553,156 @@ test('multiple', () => {
553553
})
554554
})
555555

556+
test('wrap with paren', () => {
557+
const tokenizer = createTokenizer(`Welcome (@.upper:foo)`)
558+
expect(tokenizer.nextToken()).toEqual({
559+
type: TokenTypes.Text,
560+
value: 'Welcome (',
561+
loc: {
562+
start: { line: 1, column: 1, offset: 0 },
563+
end: { line: 1, column: 10, offset: 9 }
564+
}
565+
})
566+
expect(tokenizer.nextToken()).toEqual({
567+
type: TokenTypes.LinkedAlias,
568+
value: '@',
569+
loc: {
570+
start: { line: 1, column: 10, offset: 9 },
571+
end: { line: 1, column: 11, offset: 10 }
572+
}
573+
})
574+
expect(tokenizer.nextToken()).toEqual({
575+
type: TokenTypes.LinkedDot,
576+
value: '.',
577+
loc: {
578+
start: { line: 1, column: 11, offset: 10 },
579+
end: { line: 1, column: 12, offset: 11 }
580+
}
581+
})
582+
expect(tokenizer.nextToken()).toEqual({
583+
type: TokenTypes.LinkedModifier,
584+
value: 'upper',
585+
loc: {
586+
start: { line: 1, column: 12, offset: 11 },
587+
end: { line: 1, column: 17, offset: 16 }
588+
}
589+
})
590+
expect(tokenizer.nextToken()).toEqual({
591+
type: TokenTypes.LinkedDelimiter,
592+
value: ':',
593+
loc: {
594+
start: { line: 1, column: 17, offset: 16 },
595+
end: { line: 1, column: 18, offset: 17 }
596+
}
597+
})
598+
expect(tokenizer.nextToken()).toEqual({
599+
type: TokenTypes.LinkedKey,
600+
value: 'foo',
601+
loc: {
602+
start: { line: 1, column: 18, offset: 17 },
603+
end: { line: 1, column: 21, offset: 20 }
604+
}
605+
})
606+
expect(tokenizer.nextToken()).toEqual({
607+
type: TokenTypes.Text,
608+
value: ')',
609+
loc: {
610+
start: { line: 1, column: 21, offset: 20 },
611+
end: { line: 1, column: 22, offset: 21 }
612+
}
613+
})
614+
expect(tokenizer.nextToken()).toEqual({
615+
type: TokenTypes.EOF,
616+
loc: {
617+
start: { line: 1, column: 22, offset: 21 },
618+
end: { line: 1, column: 22, offset: 21 }
619+
}
620+
})
621+
})
622+
623+
test('wrap with paren, inside brace', () => {
624+
const tokenizer = createTokenizer(`Welcome (@.upper:{param} )`)
625+
expect(tokenizer.nextToken()).toEqual({
626+
type: TokenTypes.Text,
627+
value: 'Welcome (',
628+
loc: {
629+
start: { line: 1, column: 1, offset: 0 },
630+
end: { line: 1, column: 10, offset: 9 }
631+
}
632+
})
633+
expect(tokenizer.nextToken()).toEqual({
634+
type: TokenTypes.LinkedAlias,
635+
value: '@',
636+
loc: {
637+
start: { line: 1, column: 10, offset: 9 },
638+
end: { line: 1, column: 11, offset: 10 }
639+
}
640+
})
641+
expect(tokenizer.nextToken()).toEqual({
642+
type: TokenTypes.LinkedDot,
643+
value: '.',
644+
loc: {
645+
start: { line: 1, column: 11, offset: 10 },
646+
end: { line: 1, column: 12, offset: 11 }
647+
}
648+
})
649+
expect(tokenizer.nextToken()).toEqual({
650+
type: TokenTypes.LinkedModifier,
651+
value: 'upper',
652+
loc: {
653+
start: { line: 1, column: 12, offset: 11 },
654+
end: { line: 1, column: 17, offset: 16 }
655+
}
656+
})
657+
expect(tokenizer.nextToken()).toEqual({
658+
type: TokenTypes.LinkedDelimiter,
659+
value: ':',
660+
loc: {
661+
start: { line: 1, column: 17, offset: 16 },
662+
end: { line: 1, column: 18, offset: 17 }
663+
}
664+
})
665+
expect(tokenizer.nextToken()).toEqual({
666+
type: TokenTypes.BraceLeft,
667+
value: '{',
668+
loc: {
669+
start: { line: 1, column: 18, offset: 17 },
670+
end: { line: 1, column: 19, offset: 18 }
671+
}
672+
})
673+
expect(tokenizer.nextToken()).toEqual({
674+
type: TokenTypes.Named,
675+
value: 'param',
676+
loc: {
677+
start: { line: 1, column: 19, offset: 18 },
678+
end: { line: 1, column: 24, offset: 23 }
679+
}
680+
})
681+
expect(tokenizer.nextToken()).toEqual({
682+
type: TokenTypes.BraceRight,
683+
value: '}',
684+
loc: {
685+
start: { line: 1, column: 24, offset: 23 },
686+
end: { line: 1, column: 25, offset: 24 }
687+
}
688+
})
689+
expect(tokenizer.nextToken()).toEqual({
690+
type: TokenTypes.Text,
691+
value: ' )',
692+
loc: {
693+
start: { line: 1, column: 25, offset: 24 },
694+
end: { line: 1, column: 27, offset: 26 }
695+
}
696+
})
697+
expect(tokenizer.nextToken()).toEqual({
698+
type: TokenTypes.EOF,
699+
loc: {
700+
start: { line: 1, column: 27, offset: 26 },
701+
end: { line: 1, column: 27, offset: 26 }
702+
}
703+
})
704+
})
705+
556706
describe('errors', () => {
557707
let errors: CompileError[], options: TokenizeOptions
558708
beforeEach(() => {

0 commit comments

Comments
 (0)