|
2 | 2 |
|
3 | 3 | import { AnthropicHandler } from "../anthropic" |
4 | 4 | import { ApiHandlerOptions } from "../../../shared/api" |
| 5 | +import delay from "delay" |
5 | 6 |
|
6 | 7 | const mockCreate = vitest.fn() |
| 8 | +const mockFetch = vitest.fn() |
| 9 | + |
| 10 | +vitest.mock("delay", () => ({ |
| 11 | + default: vitest.fn(() => Promise.resolve()), |
| 12 | +})) |
| 13 | + |
| 14 | +// Mock global fetch |
| 15 | +global.fetch = mockFetch as any |
7 | 16 |
|
8 | 17 | vitest.mock("@anthropic-ai/sdk", () => { |
9 | 18 | const mockAnthropicConstructor = vitest.fn().mockImplementation(() => ({ |
@@ -289,4 +298,342 @@ describe("AnthropicHandler", () => { |
289 | 298 | expect(model.info.outputPrice).toBe(22.5) |
290 | 299 | }) |
291 | 300 | }) |
| 301 | + |
| 302 | + describe("Batch API", () => { |
| 303 | + beforeEach(() => { |
| 304 | + vitest.clearAllMocks() |
| 305 | + // Reset fetch mock |
| 306 | + mockFetch.mockReset() |
| 307 | + }) |
| 308 | + |
| 309 | + it("should use batch API when anthropicUseBatchApi is enabled", async () => { |
| 310 | + const handlerWithBatch = new AnthropicHandler({ |
| 311 | + ...mockOptions, |
| 312 | + anthropicUseBatchApi: true, |
| 313 | + }) |
| 314 | + |
| 315 | + // Mock batch API responses |
| 316 | + mockFetch |
| 317 | + // First call: Create batch job |
| 318 | + .mockResolvedValueOnce({ |
| 319 | + ok: true, |
| 320 | + json: async () => ({ |
| 321 | + id: "batch-123", |
| 322 | + status: "processing", |
| 323 | + created_at: "2024-01-01T00:00:00Z", |
| 324 | + }), |
| 325 | + }) |
| 326 | + // Second call: Check job status (still processing) |
| 327 | + .mockResolvedValueOnce({ |
| 328 | + ok: true, |
| 329 | + json: async () => ({ |
| 330 | + id: "batch-123", |
| 331 | + status: "processing", |
| 332 | + created_at: "2024-01-01T00:00:00Z", |
| 333 | + }), |
| 334 | + }) |
| 335 | + // Third call: Check job status (ended) |
| 336 | + .mockResolvedValueOnce({ |
| 337 | + ok: true, |
| 338 | + json: async () => ({ |
| 339 | + id: "batch-123", |
| 340 | + status: "ended", |
| 341 | + created_at: "2024-01-01T00:00:00Z", |
| 342 | + ended_at: "2024-01-01T00:00:30Z", |
| 343 | + }), |
| 344 | + }) |
| 345 | + // Fourth call: Get results |
| 346 | + .mockResolvedValueOnce({ |
| 347 | + ok: true, |
| 348 | + json: async () => ({ |
| 349 | + results: [ |
| 350 | + { |
| 351 | + custom_id: "req_123", |
| 352 | + result: { |
| 353 | + type: "succeeded", |
| 354 | + message: { |
| 355 | + content: [{ type: "text", text: "Batch response" }], |
| 356 | + usage: { |
| 357 | + input_tokens: 100, |
| 358 | + output_tokens: 50, |
| 359 | + }, |
| 360 | + }, |
| 361 | + }, |
| 362 | + }, |
| 363 | + ], |
| 364 | + }), |
| 365 | + }) |
| 366 | + |
| 367 | + const systemPrompt = "You are a helpful assistant" |
| 368 | + const messages = [{ role: "user" as const, content: "Hello" }] |
| 369 | + |
| 370 | + const stream = handlerWithBatch.createMessage(systemPrompt, messages) |
| 371 | + |
| 372 | + const chunks: any[] = [] |
| 373 | + for await (const chunk of stream) { |
| 374 | + chunks.push(chunk) |
| 375 | + } |
| 376 | + |
| 377 | + // Verify batch job was created |
| 378 | + expect(mockFetch).toHaveBeenCalledWith( |
| 379 | + expect.stringContaining("/v1/messages/batches"), |
| 380 | + expect.objectContaining({ |
| 381 | + method: "POST", |
| 382 | + headers: expect.objectContaining({ |
| 383 | + "Content-Type": "application/json", |
| 384 | + "x-api-key": mockOptions.apiKey, |
| 385 | + "anthropic-version": "2023-06-01", |
| 386 | + "anthropic-beta": "message-batches-2024-09-24", |
| 387 | + }), |
| 388 | + }), |
| 389 | + ) |
| 390 | + |
| 391 | + // Verify polling occurred |
| 392 | + expect(mockFetch).toHaveBeenCalledWith( |
| 393 | + expect.stringContaining("/v1/messages/batches/batch-123"), |
| 394 | + expect.objectContaining({ |
| 395 | + method: "GET", |
| 396 | + }), |
| 397 | + ) |
| 398 | + |
| 399 | + // Verify results were retrieved |
| 400 | + expect(mockFetch).toHaveBeenCalledWith( |
| 401 | + expect.stringContaining("/v1/messages/batches/batch-123/results"), |
| 402 | + expect.objectContaining({ |
| 403 | + method: "GET", |
| 404 | + }), |
| 405 | + ) |
| 406 | + |
| 407 | + // Verify response content |
| 408 | + const textChunks = chunks.filter((chunk) => chunk.type === "text") |
| 409 | + expect(textChunks.some((chunk) => chunk.text.includes("Batch response"))).toBe(true) |
| 410 | + |
| 411 | + // Verify cost calculation with 50% discount |
| 412 | + const usageChunk = chunks.find((chunk) => chunk.type === "usage" && chunk.totalCost !== undefined) |
| 413 | + expect(usageChunk).toBeDefined() |
| 414 | + }) |
| 415 | + |
| 416 | + it("should handle batch API timeout", async () => { |
| 417 | + const handlerWithBatch = new AnthropicHandler({ |
| 418 | + ...mockOptions, |
| 419 | + anthropicUseBatchApi: true, |
| 420 | + }) |
| 421 | + |
| 422 | + // Mock batch job creation |
| 423 | + mockFetch |
| 424 | + .mockResolvedValueOnce({ |
| 425 | + ok: true, |
| 426 | + json: async () => ({ |
| 427 | + id: "batch-123", |
| 428 | + status: "processing", |
| 429 | + created_at: "2024-01-01T00:00:00Z", |
| 430 | + }), |
| 431 | + }) |
| 432 | + // Keep returning processing status |
| 433 | + .mockResolvedValue({ |
| 434 | + ok: true, |
| 435 | + json: async () => ({ |
| 436 | + id: "batch-123", |
| 437 | + status: "processing", |
| 438 | + created_at: "2024-01-01T00:00:00Z", |
| 439 | + }), |
| 440 | + }) |
| 441 | + |
| 442 | + // Mock Date.now to simulate timeout |
| 443 | + const originalDateNow = Date.now |
| 444 | + let currentTime = originalDateNow() |
| 445 | + Date.now = vitest.fn(() => { |
| 446 | + currentTime += 11 * 60 * 1000 // Add 11 minutes each call |
| 447 | + return currentTime |
| 448 | + }) |
| 449 | + |
| 450 | + const systemPrompt = "You are a helpful assistant" |
| 451 | + const messages = [{ role: "user" as const, content: "Hello" }] |
| 452 | + |
| 453 | + const stream = handlerWithBatch.createMessage(systemPrompt, messages) |
| 454 | + |
| 455 | + // Expect timeout error |
| 456 | + await expect(async () => { |
| 457 | + const chunks: any[] = [] |
| 458 | + for await (const chunk of stream) { |
| 459 | + chunks.push(chunk) |
| 460 | + } |
| 461 | + }).rejects.toThrow("Batch job timed out after 10 minutes") |
| 462 | + |
| 463 | + // Restore Date.now |
| 464 | + Date.now = originalDateNow |
| 465 | + }) |
| 466 | + |
| 467 | + it("should handle batch API failure", async () => { |
| 468 | + const handlerWithBatch = new AnthropicHandler({ |
| 469 | + ...mockOptions, |
| 470 | + anthropicUseBatchApi: true, |
| 471 | + }) |
| 472 | + |
| 473 | + // Mock batch job creation |
| 474 | + mockFetch |
| 475 | + .mockResolvedValueOnce({ |
| 476 | + ok: true, |
| 477 | + json: async () => ({ |
| 478 | + id: "batch-123", |
| 479 | + status: "processing", |
| 480 | + created_at: "2024-01-01T00:00:00Z", |
| 481 | + }), |
| 482 | + }) |
| 483 | + // Return failed status |
| 484 | + .mockResolvedValueOnce({ |
| 485 | + ok: true, |
| 486 | + json: async () => ({ |
| 487 | + id: "batch-123", |
| 488 | + status: "failed", |
| 489 | + created_at: "2024-01-01T00:00:00Z", |
| 490 | + error: { |
| 491 | + type: "api_error", |
| 492 | + message: "Batch processing failed", |
| 493 | + }, |
| 494 | + }), |
| 495 | + }) |
| 496 | + |
| 497 | + const systemPrompt = "You are a helpful assistant" |
| 498 | + const messages = [{ role: "user" as const, content: "Hello" }] |
| 499 | + |
| 500 | + const stream = handlerWithBatch.createMessage(systemPrompt, messages) |
| 501 | + |
| 502 | + // Expect failure error |
| 503 | + await expect(async () => { |
| 504 | + const chunks: any[] = [] |
| 505 | + for await (const chunk of stream) { |
| 506 | + chunks.push(chunk) |
| 507 | + } |
| 508 | + }).rejects.toThrow("Batch job failed: Batch processing failed") |
| 509 | + }) |
| 510 | + |
| 511 | + it("should show progress updates during batch processing", async () => { |
| 512 | + const handlerWithBatch = new AnthropicHandler({ |
| 513 | + ...mockOptions, |
| 514 | + anthropicUseBatchApi: true, |
| 515 | + }) |
| 516 | + |
| 517 | + // Mock delay to return immediately |
| 518 | + const mockDelay = vitest.mocked(delay) |
| 519 | + mockDelay.mockResolvedValue(undefined as any) |
| 520 | + |
| 521 | + let callCount = 0 |
| 522 | + mockFetch |
| 523 | + // First call: Create batch job |
| 524 | + .mockResolvedValueOnce({ |
| 525 | + ok: true, |
| 526 | + json: async () => ({ |
| 527 | + id: "batch-123", |
| 528 | + status: "processing", |
| 529 | + created_at: "2024-01-01T00:00:00Z", |
| 530 | + }), |
| 531 | + }) |
| 532 | + // Multiple status checks |
| 533 | + .mockImplementation(() => { |
| 534 | + callCount++ |
| 535 | + if (callCount <= 5) { |
| 536 | + return Promise.resolve({ |
| 537 | + ok: true, |
| 538 | + json: async () => ({ |
| 539 | + id: "batch-123", |
| 540 | + status: "processing", |
| 541 | + created_at: "2024-01-01T00:00:00Z", |
| 542 | + }), |
| 543 | + }) |
| 544 | + } else if (callCount === 6) { |
| 545 | + return Promise.resolve({ |
| 546 | + ok: true, |
| 547 | + json: async () => ({ |
| 548 | + id: "batch-123", |
| 549 | + status: "ended", |
| 550 | + created_at: "2024-01-01T00:00:00Z", |
| 551 | + ended_at: "2024-01-01T00:00:30Z", |
| 552 | + }), |
| 553 | + }) |
| 554 | + } else { |
| 555 | + // Results |
| 556 | + return Promise.resolve({ |
| 557 | + ok: true, |
| 558 | + json: async () => ({ |
| 559 | + results: [ |
| 560 | + { |
| 561 | + custom_id: "req_123", |
| 562 | + result: { |
| 563 | + type: "succeeded", |
| 564 | + message: { |
| 565 | + content: [{ type: "text", text: "Batch response" }], |
| 566 | + usage: { |
| 567 | + input_tokens: 100, |
| 568 | + output_tokens: 50, |
| 569 | + }, |
| 570 | + }, |
| 571 | + }, |
| 572 | + }, |
| 573 | + ], |
| 574 | + }), |
| 575 | + }) |
| 576 | + } |
| 577 | + }) |
| 578 | + |
| 579 | + // Mock Date.now for progress updates |
| 580 | + const originalDateNow = Date.now |
| 581 | + let currentTime = originalDateNow() |
| 582 | + Date.now = vitest.fn(() => { |
| 583 | + currentTime += 21000 // Add 21 seconds each call to trigger progress updates |
| 584 | + return currentTime |
| 585 | + }) |
| 586 | + |
| 587 | + const systemPrompt = "You are a helpful assistant" |
| 588 | + const messages = [{ role: "user" as const, content: "Hello" }] |
| 589 | + |
| 590 | + const stream = handlerWithBatch.createMessage(systemPrompt, messages) |
| 591 | + |
| 592 | + const chunks: any[] = [] |
| 593 | + for await (const chunk of stream) { |
| 594 | + chunks.push(chunk) |
| 595 | + } |
| 596 | + |
| 597 | + // Verify progress messages |
| 598 | + const textChunks = chunks.filter((chunk) => chunk.type === "text") |
| 599 | + expect(textChunks.some((chunk) => chunk.text.includes("Creating batch job"))).toBe(true) |
| 600 | + expect(textChunks.some((chunk) => chunk.text.includes("[Batch API] Processing"))).toBe(true) |
| 601 | + expect(textChunks.some((chunk) => chunk.text.includes("Retrieving batch results"))).toBe(true) |
| 602 | + |
| 603 | + // Restore Date.now |
| 604 | + Date.now = originalDateNow |
| 605 | + }) |
| 606 | + |
| 607 | + it("should use regular streaming API when batch API is disabled", async () => { |
| 608 | + const handlerWithoutBatch = new AnthropicHandler({ |
| 609 | + ...mockOptions, |
| 610 | + anthropicUseBatchApi: false, |
| 611 | + }) |
| 612 | + |
| 613 | + const systemPrompt = "You are a helpful assistant" |
| 614 | + const messages = [ |
| 615 | + { |
| 616 | + role: "user" as const, |
| 617 | + content: [{ type: "text" as const, text: "Hello" }], |
| 618 | + }, |
| 619 | + ] |
| 620 | + |
| 621 | + const stream = handlerWithoutBatch.createMessage(systemPrompt, messages) |
| 622 | + |
| 623 | + const chunks: any[] = [] |
| 624 | + for await (const chunk of stream) { |
| 625 | + chunks.push(chunk) |
| 626 | + } |
| 627 | + |
| 628 | + // Should use regular API (mockCreate), not batch API (fetch) |
| 629 | + expect(mockCreate).toHaveBeenCalled() |
| 630 | + expect(mockFetch).not.toHaveBeenCalled() |
| 631 | + |
| 632 | + // Verify regular streaming response |
| 633 | + const textChunks = chunks.filter((chunk) => chunk.type === "text") |
| 634 | + expect(textChunks).toHaveLength(2) |
| 635 | + expect(textChunks[0].text).toBe("Hello") |
| 636 | + expect(textChunks[1].text).toBe(" world") |
| 637 | + }) |
| 638 | + }) |
292 | 639 | }) |
0 commit comments