Skip to content

Commit 53e4c08

Browse files
committed
test: migrate unit tests from bun to jest
Standardize test runner to jest across CI and local development: - Update CI workflow to use jest for coverage reporting - Update Makefile test targets to use jest - Update tokenizer worker to use node:assert (jest-compatible) - Remove async/await from tokenizer.loadTokenizerModules (Promise.allSettled already returns Promise) - Change test model to gpt-5 for future-proofing This ensures consistent test execution between local and CI environments and prepares for better coverage reporting.
1 parent c92b633 commit 53e4c08

File tree

9 files changed

+21
-36
lines changed

9 files changed

+21
-36
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ jobs:
7575
- uses: ./.github/actions/setup-cmux
7676

7777
- name: Run tests with coverage
78-
run: bun test --coverage --coverage-reporter=lcov ${{ github.event.inputs.test_filter || 'src' }}
78+
run: bun x jest --coverage --coverageReporters=lcov ${{ github.event.inputs.test_filter || 'src' }}
7979

8080
- name: Upload coverage to Codecov
8181
uses: codecov/codecov-action@v5

Makefile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -204,11 +204,11 @@ check-deadcode: node_modules/.installed ## Check for potential dead code (manual
204204

205205
## Testing
206206
test-integration: node_modules/.installed ## Run all tests (unit + integration)
207-
@bun test src
207+
@bun x jest src
208208
@TEST_INTEGRATION=1 bun x jest tests
209209

210-
test-unit: node_modules/.installed ## Run unit tests
211-
@bun test src
210+
test-unit: node_modules/.installed build-main ## Run unit tests (requires build-main for worker compilation)
211+
@bun x jest src
212212

213213
test: test-unit ## Alias for test-unit
214214

src/utils/main/tokenizer.test.ts

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { beforeEach, describe, expect, jest, test } from "@jest/globals";
1+
import { beforeAll, beforeEach, describe, expect, jest, test } from "@jest/globals";
22

33
import {
44
__resetTokenizerForTests,
@@ -10,22 +10,24 @@ import {
1010

1111
jest.setTimeout(20000);
1212

13+
const model = "openai:gpt-5";
14+
beforeAll(async () => {
15+
// warm up the worker_thread and tokenizer before running tests
16+
await expect(loadTokenizerModules([model])).resolves.toHaveLength(1);
17+
});
18+
1319
beforeEach(() => {
1420
__resetTokenizerForTests();
1521
});
1622

1723
describe("tokenizer", () => {
18-
const model = "openai:gpt-4-turbo";
19-
2024
test("loadTokenizerModules warms known encodings", async () => {
21-
await expect(loadTokenizerModules([model])).resolves.toBeUndefined();
2225
const tokenizer = await getTokenizerForModel(model);
2326
expect(typeof tokenizer.encoding).toBe("string");
2427
expect(tokenizer.encoding.length).toBeGreaterThan(0);
2528
});
2629

2730
test("countTokens returns stable values", async () => {
28-
await loadTokenizerModules([model]);
2931
const text = "cmux-tokenizer-smoke-test";
3032
const first = await countTokens(model, text);
3133
const second = await countTokens(model, text);
@@ -34,7 +36,6 @@ describe("tokenizer", () => {
3436
});
3537

3638
test("countTokensBatch matches individual calls", async () => {
37-
await loadTokenizerModules([model]);
3839
const texts = ["alpha", "beta", "gamma"];
3940
const batch = await countTokensBatch(model, texts);
4041
expect(batch).toHaveLength(texts.length);

src/utils/main/tokenizer.ts

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ function normalizeModelKey(modelName: string): ModelName {
5656
return normalized as ModelName;
5757
}
5858

59-
async function resolveEncoding(modelName: ModelName): Promise<string> {
59+
function resolveEncoding(modelName: ModelName): Promise<string> {
6060
let promise = encodingPromises.get(modelName);
6161
if (!promise) {
6262
promise = tokenizerPool
@@ -117,17 +117,13 @@ async function countTokensInternal(modelName: ModelName, text: string): Promise<
117117
return pending;
118118
}
119119

120-
export async function loadTokenizerModules(
120+
export function loadTokenizerModules(
121121
modelsToWarm: string[] = Array.from(DEFAULT_WARM_MODELS)
122-
): Promise<void> {
123-
await Promise.allSettled(
124-
modelsToWarm.map(async (modelString) => {
125-
try {
126-
const modelName = normalizeModelKey(modelString);
127-
await resolveEncoding(modelName);
128-
} catch (error) {
129-
console.warn(`[tokenizer] Failed to warm encoding for '${modelString}':`, error);
130-
}
122+
): Promise<Array<PromiseSettledResult<string>>> {
123+
return Promise.allSettled(
124+
modelsToWarm.map((modelString) => {
125+
const modelName = normalizeModelKey(modelString);
126+
return resolveEncoding(modelName);
131127
})
132128
);
133129
}

src/utils/main/tokenizer.worker.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import assert from "@/utils/assert";
1+
import assert from "node:assert";
22
import { Tokenizer, models } from "ai-tokenizer";
33
import type { ModelName } from "ai-tokenizer";
44
import * as encoding from "ai-tokenizer/encoding";

tests/ipcMain/forkWorkspace.test.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ import {
1717
import { detectDefaultTrunkBranch } from "../../src/git";
1818
import { HistoryService } from "../../src/services/historyService";
1919
import { createCmuxMessage } from "../../src/types/message";
20-
import * as path from "path";
2120

2221
// Skip all tests if TEST_INTEGRATION is not set
2322
const describeIntegration = shouldRunIntegrationTests() ? describe : describe.skip;

tests/ipcMain/openai-web-search.test.ts

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,4 @@
1-
import {
2-
setupWorkspace,
3-
shouldRunIntegrationTests,
4-
validateApiKeys,
5-
type TestEnvironment,
6-
} from "./setup";
1+
import { setupWorkspace, shouldRunIntegrationTests, validateApiKeys } from "./setup";
72
import { sendMessageWithModel, createEventCollector, assertStreamSuccess } from "./helpers";
83

94
// Skip all tests if TEST_INTEGRATION is not set

tests/ipcMain/resumeStream.test.ts

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,4 @@
1-
import {
2-
setupWorkspace,
3-
shouldRunIntegrationTests,
4-
validateApiKeys,
5-
type TestEnvironment,
6-
} from "./setup";
1+
import { setupWorkspace, shouldRunIntegrationTests, validateApiKeys } from "./setup";
72
import { sendMessageWithModel, createEventCollector, waitFor } from "./helpers";
83
import { IPC_CHANNELS } from "../../src/constants/ipc-constants";
94
import type { Result } from "../../src/types/result";

tests/ipcMain/sendMessage.test.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import {
55
setupWorkspaceWithoutProvider,
66
shouldRunIntegrationTests,
77
validateApiKeys,
8-
type TestEnvironment,
98
} from "./setup";
109
import {
1110
sendMessageWithModel,

0 commit comments

Comments
 (0)