Skip to content

Commit af3adde

Browse files
committed
Fix tests
1 parent 58ecbdc commit af3adde

File tree

3 files changed

+32
-44
lines changed

3 files changed

+32
-44
lines changed

src/api/providers/__tests__/unbound.test.ts

Lines changed: 30 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,7 @@ jest.mock("openai", () => {
2121
[Symbol.asyncIterator]: async function* () {
2222
// First chunk with content
2323
yield {
24-
choices: [
25-
{
26-
delta: { content: "Test response" },
27-
index: 0,
28-
},
29-
],
24+
choices: [{ delta: { content: "Test response" }, index: 0 }],
3025
}
3126
// Second chunk with usage data
3227
yield {
@@ -52,15 +47,14 @@ jest.mock("openai", () => {
5247
}
5348

5449
const result = mockCreate(...args)
50+
5551
if (args[0].stream) {
5652
mockWithResponse.mockReturnValue(
57-
Promise.resolve({
58-
data: stream,
59-
response: { headers: new Map() },
60-
}),
53+
Promise.resolve({ data: stream, response: { headers: new Map() } }),
6154
)
6255
result.withResponse = mockWithResponse
6356
}
57+
6458
return result
6559
},
6660
},
@@ -75,10 +69,10 @@ describe("UnboundHandler", () => {
7569

7670
beforeEach(() => {
7771
mockOptions = {
78-
apiModelId: "anthropic/claude-3-5-sonnet-20241022",
7972
unboundApiKey: "test-api-key",
8073
unboundModelId: "anthropic/claude-3-5-sonnet-20241022",
8174
}
75+
8276
handler = new UnboundHandler(mockOptions)
8377
mockCreate.mockClear()
8478
mockWithResponse.mockClear()
@@ -97,9 +91,9 @@ describe("UnboundHandler", () => {
9791
})
9892

9993
describe("constructor", () => {
100-
it("should initialize with provided options", () => {
94+
it("should initialize with provided options", async () => {
10195
expect(handler).toBeInstanceOf(UnboundHandler)
102-
expect(handler.getModel().id).toBe(mockOptions.apiModelId)
96+
expect((await handler.fetchModel()).id).toBe(mockOptions.unboundModelId)
10397
})
10498
})
10599

@@ -115,24 +109,18 @@ describe("UnboundHandler", () => {
115109
it("should handle streaming responses with text and usage data", async () => {
116110
const stream = handler.createMessage(systemPrompt, messages)
117111
const chunks: Array<{ type: string } & Record<string, any>> = []
112+
118113
for await (const chunk of stream) {
119114
chunks.push(chunk)
120115
}
121116

122117
expect(chunks.length).toBe(3)
123118

124119
// Verify text chunk
125-
expect(chunks[0]).toEqual({
126-
type: "text",
127-
text: "Test response",
128-
})
120+
expect(chunks[0]).toEqual({ type: "text", text: "Test response" })
129121

130122
// Verify regular usage data
131-
expect(chunks[1]).toEqual({
132-
type: "usage",
133-
inputTokens: 10,
134-
outputTokens: 5,
135-
})
123+
expect(chunks[1]).toEqual({ type: "usage", inputTokens: 10, outputTokens: 5 })
136124

137125
// Verify usage data with cache information
138126
expect(chunks[2]).toEqual({
@@ -149,6 +137,7 @@ describe("UnboundHandler", () => {
149137
messages: expect.any(Array),
150138
stream: true,
151139
}),
140+
152141
expect.objectContaining({
153142
headers: {
154143
"X-Unbound-Metadata": expect.stringContaining("roo-code"),
@@ -169,6 +158,7 @@ describe("UnboundHandler", () => {
169158
for await (const chunk of stream) {
170159
chunks.push(chunk)
171160
}
161+
172162
fail("Expected error to be thrown")
173163
} catch (error) {
174164
expect(error).toBeInstanceOf(Error)
@@ -181,6 +171,7 @@ describe("UnboundHandler", () => {
181171
it("should complete prompt successfully", async () => {
182172
const result = await handler.completePrompt("Test prompt")
183173
expect(result).toBe("Test response")
174+
184175
expect(mockCreate).toHaveBeenCalledWith(
185176
expect.objectContaining({
186177
model: "claude-3-5-sonnet-20241022",
@@ -202,24 +193,22 @@ describe("UnboundHandler", () => {
202193
})
203194

204195
it("should handle empty response", async () => {
205-
mockCreate.mockResolvedValueOnce({
206-
choices: [{ message: { content: "" } }],
207-
})
196+
mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: "" } }] })
208197
const result = await handler.completePrompt("Test prompt")
209198
expect(result).toBe("")
210199
})
211200

212201
it("should not set max_tokens for non-Anthropic models", async () => {
213202
mockCreate.mockClear()
214203

215-
const nonAnthropicOptions = {
204+
const nonAnthropicHandler = new UnboundHandler({
216205
apiModelId: "openai/gpt-4o",
217206
unboundApiKey: "test-key",
218207
unboundModelId: "openai/gpt-4o",
219-
}
220-
const nonAnthropicHandler = new UnboundHandler(nonAnthropicOptions)
208+
})
221209

222210
await nonAnthropicHandler.completePrompt("Test prompt")
211+
223212
expect(mockCreate).toHaveBeenCalledWith(
224213
expect.objectContaining({
225214
model: "gpt-4o",
@@ -232,20 +221,21 @@ describe("UnboundHandler", () => {
232221
}),
233222
}),
234223
)
224+
235225
expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("max_tokens")
236226
})
237227

238228
it("should not set temperature for openai/o3-mini", async () => {
239229
mockCreate.mockClear()
240230

241-
const openaiOptions = {
231+
const openaiHandler = new UnboundHandler({
242232
apiModelId: "openai/o3-mini",
243233
unboundApiKey: "test-key",
244234
unboundModelId: "openai/o3-mini",
245-
}
246-
const openaiHandler = new UnboundHandler(openaiOptions)
235+
})
247236

248237
await openaiHandler.completePrompt("Test prompt")
238+
249239
expect(mockCreate).toHaveBeenCalledWith(
250240
expect.objectContaining({
251241
model: "o3-mini",
@@ -257,24 +247,22 @@ describe("UnboundHandler", () => {
257247
}),
258248
}),
259249
)
250+
260251
expect(mockCreate.mock.calls[0][0]).not.toHaveProperty("temperature")
261252
})
262253
})
263254

264-
describe("getModel", () => {
265-
it("should return model info", () => {
266-
const modelInfo = handler.getModel()
267-
expect(modelInfo.id).toBe(mockOptions.apiModelId)
255+
describe("fetchModel", () => {
256+
it("should return model info", async () => {
257+
const modelInfo = await handler.fetchModel()
258+
expect(modelInfo.id).toBe(mockOptions.unboundModelId)
268259
expect(modelInfo.info).toBeDefined()
269260
})
270261

271-
it("should return default model when invalid model provided", () => {
272-
const handlerWithInvalidModel = new UnboundHandler({
273-
...mockOptions,
274-
unboundModelId: "invalid/model",
275-
})
276-
const modelInfo = handlerWithInvalidModel.getModel()
277-
expect(modelInfo.id).toBe("anthropic/claude-3-5-sonnet-20241022") // Default model
262+
it("should return default model when invalid model provided", async () => {
263+
const handlerWithInvalidModel = new UnboundHandler({ ...mockOptions, unboundModelId: "invalid/model" })
264+
const modelInfo = await handlerWithInvalidModel.fetchModel()
265+
expect(modelInfo.id).toBe("anthropic/claude-3-7-sonnet-20250219")
278266
expect(modelInfo.info).toBeDefined()
279267
})
280268
})

src/api/providers/router-provider.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ export abstract class RouterProvider extends BaseProvider {
4343
this.client = new OpenAI({ baseURL, apiKey })
4444
}
4545

46-
protected async fetchModel() {
46+
public async fetchModel() {
4747
this.models = await getModels(this.name)
4848
return this.getModel()
4949
}

src/shared/api.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1087,7 +1087,7 @@ export const mistralModels = {
10871087

10881088
// Unbound Security
10891089
// https://www.unboundsecurity.ai/ai-gateway
1090-
export const unboundDefaultModelId = "anthropic/claude-3-5-sonnet-20241022"
1090+
export const unboundDefaultModelId = "anthropic/claude-3-7-sonnet-20250219"
10911091
export const unboundDefaultModelInfo: ModelInfo = {
10921092
maxTokens: 8192,
10931093
contextWindow: 200_000,

0 commit comments

Comments
 (0)