Skip to content

Commit 7154cb8

Browse files
committed
Disable all network connection in jest
1 parent 41dfca2 commit 7154cb8

File tree

6 files changed

+129
-3
lines changed

6 files changed

+129
-3
lines changed

scripts/run-tests.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
const { execSync } = require("child_process")
33

44
if (process.platform === "win32") {
5-
execSync("npm-run-all test:* lint:*", { stdio: "inherit" })
5+
execSync("npm-run-all test:*", { stdio: "inherit" })
66
} else {
7-
execSync("npm-run-all -p test:* lint:*", { stdio: "inherit" })
7+
execSync("npm-run-all -p test:*", { stdio: "inherit" })
88
}

src/__mocks__/jest.setup.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,15 @@
1+
import nock from "nock"
2+
3+
nock.disableNetConnect()
4+
5+
export function allowNetConnect(host?: string | RegExp) {
6+
if (host) {
7+
nock.enableNetConnect(host)
8+
} else {
9+
nock.enableNetConnect()
10+
}
11+
}
12+
113
// Mock the logger globally for all tests
214
jest.mock("../utils/logging", () => ({
315
logger: {

src/api/providers/__tests__/glama.test.ts

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,36 @@ import { Anthropic } from "@anthropic-ai/sdk"
55
import { GlamaHandler } from "../glama"
66
import { ApiHandlerOptions } from "../../../shared/api"
77

8+
// Mock dependencies
9+
jest.mock("../fetchers/cache", () => ({
10+
getModels: jest.fn().mockImplementation(() => {
11+
return Promise.resolve({
12+
"anthropic/claude-3-7-sonnet": {
13+
maxTokens: 8192,
14+
contextWindow: 200000,
15+
supportsImages: true,
16+
supportsPromptCache: true,
17+
inputPrice: 3,
18+
outputPrice: 15,
19+
cacheWritesPrice: 3.75,
20+
cacheReadsPrice: 0.3,
21+
description: "Claude 3.7 Sonnet",
22+
thinking: false,
23+
supportsComputerUse: true,
24+
},
25+
"openai/gpt-4o": {
26+
maxTokens: 4096,
27+
contextWindow: 128000,
28+
supportsImages: true,
29+
supportsPromptCache: false,
30+
inputPrice: 5,
31+
outputPrice: 15,
32+
description: "GPT-4o",
33+
},
34+
})
35+
}),
36+
}))
37+
838
// Mock OpenAI client
939
const mockCreate = jest.fn()
1040
const mockWithResponse = jest.fn()

src/api/providers/__tests__/openrouter.test.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,38 @@ import { ApiHandlerOptions } from "../../../shared/api"
99
// Mock dependencies
1010
jest.mock("openai")
1111
jest.mock("delay", () => jest.fn(() => Promise.resolve()))
12+
jest.mock("../fetchers/cache", () => ({
13+
getModels: jest.fn().mockImplementation(() => {
14+
return Promise.resolve({
15+
"anthropic/claude-3.7-sonnet": {
16+
maxTokens: 8192,
17+
contextWindow: 200000,
18+
supportsImages: true,
19+
supportsPromptCache: true,
20+
inputPrice: 3,
21+
outputPrice: 15,
22+
cacheWritesPrice: 3.75,
23+
cacheReadsPrice: 0.3,
24+
description: "Claude 3.7 Sonnet",
25+
thinking: false,
26+
supportsComputerUse: true,
27+
},
28+
"anthropic/claude-3.7-sonnet:thinking": {
29+
maxTokens: 128000,
30+
contextWindow: 200000,
31+
supportsImages: true,
32+
supportsPromptCache: true,
33+
inputPrice: 3,
34+
outputPrice: 15,
35+
cacheWritesPrice: 3.75,
36+
cacheReadsPrice: 0.3,
37+
description: "Claude 3.7 Sonnet with thinking",
38+
thinking: true,
39+
supportsComputerUse: true,
40+
},
41+
})
42+
}),
43+
}))
1244

1345
describe("OpenRouterHandler", () => {
1446
const mockOptions: ApiHandlerOptions = {

src/api/providers/__tests__/unbound.test.ts

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,58 @@ import { ApiHandlerOptions } from "../../../shared/api"
66

77
import { UnboundHandler } from "../unbound"
88

9+
// Mock dependencies
10+
jest.mock("../fetchers/cache", () => ({
11+
getModels: jest.fn().mockImplementation(() => {
12+
return Promise.resolve({
13+
"anthropic/claude-3-5-sonnet-20241022": {
14+
maxTokens: 8192,
15+
contextWindow: 200000,
16+
supportsImages: true,
17+
supportsPromptCache: true,
18+
inputPrice: 3,
19+
outputPrice: 15,
20+
cacheWritesPrice: 3.75,
21+
cacheReadsPrice: 0.3,
22+
description: "Claude 3.5 Sonnet",
23+
thinking: false,
24+
supportsComputerUse: true,
25+
},
26+
"anthropic/claude-3-7-sonnet-20250219": {
27+
maxTokens: 8192,
28+
contextWindow: 200000,
29+
supportsImages: true,
30+
supportsPromptCache: true,
31+
inputPrice: 3,
32+
outputPrice: 15,
33+
cacheWritesPrice: 3.75,
34+
cacheReadsPrice: 0.3,
35+
description: "Claude 3.7 Sonnet",
36+
thinking: false,
37+
supportsComputerUse: true,
38+
},
39+
"openai/gpt-4o": {
40+
maxTokens: 4096,
41+
contextWindow: 128000,
42+
supportsImages: true,
43+
supportsPromptCache: false,
44+
inputPrice: 5,
45+
outputPrice: 15,
46+
description: "GPT-4o",
47+
},
48+
"openai/o3-mini": {
49+
maxTokens: 4096,
50+
contextWindow: 128000,
51+
supportsImages: true,
52+
supportsPromptCache: false,
53+
inputPrice: 1,
54+
outputPrice: 3,
55+
description: "O3 Mini",
56+
},
57+
})
58+
}),
59+
}))
60+
961
// Mock OpenAI client
1062
const mockCreate = jest.fn()
1163
const mockWithResponse = jest.fn()

src/api/providers/glama.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ export class GlamaHandler extends RouterProvider implements SingleCompletionHand
1919
constructor(options: ApiHandlerOptions) {
2020
super({
2121
options,
22-
name: "unbound",
22+
name: "glama",
2323
baseURL: "https://glama.ai/api/gateway/openai/v1",
2424
apiKey: options.glamaApiKey,
2525
modelId: options.glamaModelId,

0 commit comments

Comments
 (0)