Skip to content

Commit c810cd9

Browse files
committed
Reduce stack size for errors
1 parent 2962bc0 commit c810cd9

File tree

2 files changed

+49
-34
lines changed

2 files changed

+49
-34
lines changed

src/index.spec.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,20 @@ describe("path-to-regexp", () => {
143143
),
144144
);
145145
});
146+
147+
it("should contain the error line", () => {
148+
expect.hasAssertions();
149+
150+
try {
151+
pathToRegexp("/:");
152+
} catch (error) {
153+
const stack = (error as Error).stack
154+
?.split("\n")
155+
.slice(0, 6)
156+
.join("\n");
157+
expect(stack).toContain("index.spec.ts");
158+
}
159+
});
146160
});
147161

148162
describe.each(PARSER_TESTS)(

src/index.ts

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,9 @@ function errorMessage(text: string, originalPath: string | undefined) {
125125
/**
126126
* Tokenize input string.
127127
*/
128-
function* lexer(str: string): Generator<LexToken, LexToken> {
128+
function lexer(str: string): Iter {
129129
const chars = [...str];
130+
const tokens: Array<LexToken> = [];
130131
let i = 0;
131132

132133
function name() {
@@ -175,43 +176,44 @@ function* lexer(str: string): Generator<LexToken, LexToken> {
175176
const type = SIMPLE_TOKENS[value];
176177

177178
if (type) {
178-
yield { type, index: i++, value };
179+
tokens.push({ type, index: i++, value });
179180
} else if (value === "\\") {
180-
yield { type: "ESCAPED", index: i++, value: chars[i++] };
181+
tokens.push({ type: "ESCAPED", index: i++, value: chars[i++] });
181182
} else if (value === ":") {
182183
const value = name();
183-
yield { type: "PARAM", index: i, value };
184+
tokens.push({ type: "PARAM", index: i, value });
184185
} else if (value === "*") {
185186
const value = name();
186-
yield { type: "WILDCARD", index: i, value };
187+
tokens.push({ type: "WILDCARD", index: i, value });
187188
} else {
188-
yield { type: "CHAR", index: i, value: chars[i++] };
189+
tokens.push({ type: "CHAR", index: i, value: chars[i++] });
189190
}
190191
}
191192

192-
return { type: "END", index: i, value: "" };
193+
tokens.push({ type: "END", index: i, value: "" });
194+
return new Iter(tokens, str);
193195
}
194196

195197
class Iter {
196-
private _peek?: LexToken;
197-
private _tokens: Generator<LexToken, LexToken>;
198+
private _tokens: Array<LexToken>;
199+
private _index = 0;
198200

199-
constructor(private originalPath: string) {
200-
this._tokens = lexer(originalPath);
201+
constructor(
202+
tokens: Array<LexToken>,
203+
private originalPath: string,
204+
) {
205+
this._index = 0;
206+
this._tokens = tokens;
201207
}
202208

203209
peek(): LexToken {
204-
if (!this._peek) {
205-
const next = this._tokens.next();
206-
this._peek = next.value;
207-
}
208-
return this._peek;
210+
return this._tokens[this._index];
209211
}
210212

211213
tryConsume(type: TokenType): string | undefined {
212214
const token = this.peek();
213215
if (token.type !== type) return;
214-
this._peek = undefined; // Reset after consumed.
216+
this._index++;
215217
return token.value;
216218
}
217219

@@ -299,7 +301,7 @@ export class TokenData {
299301
*/
300302
export function parse(str: string, options: ParseOptions = {}): TokenData {
301303
const { encodePath = NOOP_VALUE } = options;
302-
const it = new Iter(str);
304+
const it = lexer(str);
303305

304306
function consume(endType: TokenType): Token[] {
305307
const tokens: Token[] = [];
@@ -520,7 +522,14 @@ export function pathToRegexp(
520522
} = options;
521523
const keys: Keys = [];
522524
const flags = sensitive ? "" : "i";
523-
const sources = Array.from(toRegExps(path, delimiter, keys, options));
525+
const sources: string[] = [];
526+
527+
for (const input of pathsToArray(path, [])) {
528+
const data = input instanceof TokenData ? input : parse(input, options);
529+
for (const tokens of flatten(data.tokens, 0, [])) {
530+
sources.push(toRegExp(tokens, delimiter, keys, data.originalPath));
531+
}
532+
}
524533

525534
let pattern = `^(?:${sources.join("|")})`;
526535
if (trailing) pattern += `(?:${escape(delimiter)}$)?`;
@@ -531,23 +540,15 @@ export function pathToRegexp(
531540
}
532541

533542
/**
534-
* Path or array of paths to normalize.
543+
* Convert a path or array of paths into a flat array.
535544
*/
536-
function* toRegExps(
537-
path: Path | Path[],
538-
delimiter: string,
539-
keys: Keys,
540-
options: ParseOptions,
541-
): Generator<string> {
542-
if (Array.isArray(path)) {
543-
for (const p of path) yield* toRegExps(p, delimiter, keys, options);
544-
return;
545-
}
546-
547-
const data = path instanceof TokenData ? path : parse(path, options);
548-
for (const tokens of flatten(data.tokens, 0, [])) {
549-
yield toRegExp(tokens, delimiter, keys, data.originalPath);
545+
function pathsToArray(paths: Path | Path[], init: Path[]): Path[] {
546+
if (Array.isArray(paths)) {
547+
for (const p of paths) pathsToArray(p, init);
548+
} else {
549+
init.push(paths);
550550
}
551+
return init;
551552
}
552553

553554
/**

0 commit comments

Comments
 (0)