Skip to content

Commit d8c44a4

Browse files
author
xiaose
committed
feat: format test
1 parent 8cd9e53 commit d8c44a4

File tree

1 file changed

+9
-14
lines changed

1 file changed

+9
-14
lines changed

src/api/providers/__tests__/minimax.spec.ts

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,8 @@ describe("MiniMaxHandler", () => {
8080
const model = handlerWithModel.getModel()
8181
expect(model.id).toBe(testModelId)
8282
expect(model.info).toEqual(minimaxModels[testModelId])
83-
expect(model.info.contextWindow).toBe(1_000_192)
84-
expect(model.info.maxTokens).toBe(25_600)
83+
expect(model.info.contextWindow).toBe(192_000)
84+
expect(model.info.maxTokens).toBe(128_000)
8585
expect(model.info.supportsPromptCache).toBe(false)
8686
})
8787
})
@@ -129,11 +129,6 @@ describe("MiniMaxHandler", () => {
129129
expect(model.info).toEqual(minimaxModels[minimaxDefaultModelId])
130130
})
131131

132-
it("should use undefined as default API key when none is specified", () => {
133-
new MiniMaxHandler({})
134-
expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: undefined }))
135-
})
136-
137132
it("should default to MiniMax-M2 model", () => {
138133
const handlerDefault = new MiniMaxHandler({ minimaxApiKey: "test-minimax-api-key" })
139134
const model = handlerDefault.getModel()
@@ -236,7 +231,7 @@ describe("MiniMaxHandler", () => {
236231
expect.objectContaining({
237232
model: modelId,
238233
max_tokens: modelInfo.maxTokens,
239-
temperature: 0,
234+
temperature: 1,
240235
messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]),
241236
stream: true,
242237
stream_options: { include_usage: true },
@@ -245,7 +240,7 @@ describe("MiniMaxHandler", () => {
245240
)
246241
})
247242

248-
it("should use temperature 0 by default", async () => {
243+
it("should use temperature 1 by default", async () => {
249244
mockCreate.mockImplementationOnce(() => {
250245
return {
251246
[Symbol.asyncIterator]: () => ({
@@ -261,7 +256,7 @@ describe("MiniMaxHandler", () => {
261256

262257
expect(mockCreate).toHaveBeenCalledWith(
263258
expect.objectContaining({
264-
temperature: 0,
259+
temperature: 1,
265260
}),
266261
undefined,
267262
)
@@ -271,12 +266,12 @@ describe("MiniMaxHandler", () => {
271266
describe("Model Configuration", () => {
272267
it("should correctly configure MiniMax-M2 model properties", () => {
273268
const model = minimaxModels["MiniMax-M2"]
274-
expect(model.maxTokens).toBe(25_600)
275-
expect(model.contextWindow).toBe(1_000_192)
269+
expect(model.maxTokens).toBe(128_000)
270+
expect(model.contextWindow).toBe(192_000)
276271
expect(model.supportsImages).toBe(false)
277272
expect(model.supportsPromptCache).toBe(false)
278-
expect(model.inputPrice).toBe(0.4)
279-
expect(model.outputPrice).toBe(2.2)
273+
expect(model.inputPrice).toBe(0.3)
274+
expect(model.outputPrice).toBe(1.2)
280275
})
281276
})
282277
})

0 commit comments

Comments
 (0)