Skip to content

Commit 84c1fbf

Browse files
committed
test(llm): add unit tests for tokenReferenceRule functionality
- Introduced tests for the tokenReferenceRule to validate its behavior in converting TokenReference components to markdown tables. - Added scenarios for handling groups, including cases with known and unknown groups to ensure robustness of the normalization process.
1 parent b04298a commit 84c1fbf

File tree

1 file changed

+34
-0
lines changed

1 file changed

+34
-0
lines changed
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import { describe, expect, it } from "bun:test";
2+
import { normalizeLLMBodyWithRules } from "../normalize-llm-body";
3+
import { tokenReferenceRule } from "./token-reference-rule";
4+
5+
describe("tokenReferenceRule", () => {
6+
it("converts TokenReference with groups to a markdown table", () => {
7+
const input = `<TokenReference groups={["radius"]} />`;
8+
9+
const actual = normalizeLLMBodyWithRules(input, [tokenReferenceRule]);
10+
11+
expect(actual).toContain("| Token |");
12+
expect(actual).toContain("| --- |");
13+
expect(actual).toContain("$radius.");
14+
});
15+
16+
it("converts TokenReference without groups to all token tables with headings", () => {
17+
const input = "<TokenReference />";
18+
19+
const actual = normalizeLLMBodyWithRules(input, [tokenReferenceRule]);
20+
21+
expect(actual).toContain("## ");
22+
expect(actual).toContain("| Token |");
23+
expect(actual).toContain("$radius.");
24+
expect(actual).toContain("$color.");
25+
});
26+
27+
it("keeps the original node when group is unknown", () => {
28+
const input = `<TokenReference groups={["nonexistent"]} />`;
29+
30+
const actual = normalizeLLMBodyWithRules(input, [tokenReferenceRule]);
31+
32+
expect(actual).toContain("TokenReference");
33+
});
34+
});

0 commit comments

Comments
 (0)