Skip to content

Commit 008b62e

Browse files
daniel-lxschrarnoldus
authored andcommitted
Fix LiteLLM test failures after merge (RooCodeInc#8870)
* Use monotonic clock for rate limiting * Fix LiteLLM test failures after merge - Remove supportsComputerUse from LiteLLM implementation as it's no longer part of ModelInfo interface - Update test expectations to include cacheWritesPrice and cacheReadsPrice fields - Fix test for max_output_tokens preference functionality --------- Co-authored-by: Christiaan Arnoldus <[email protected]>
1 parent ebac08e commit 008b62e

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

src/api/providers/fetchers/__tests__/litellm.spec.ts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -645,10 +645,11 @@ describe("getLiteLLMModels", () => {
645645
maxTokens: 64000,
646646
contextWindow: 200000,
647647
supportsImages: true,
648-
supportsComputerUse: true,
649648
supportsPromptCache: false,
650649
inputPrice: undefined,
651650
outputPrice: undefined,
651+
cacheWritesPrice: undefined,
652+
cacheReadsPrice: undefined,
652653
description: "claude-3-5-sonnet-4-5 via LiteLLM proxy",
653654
})
654655

@@ -657,10 +658,11 @@ describe("getLiteLLMModels", () => {
657658
maxTokens: 8192,
658659
contextWindow: 128000,
659660
supportsImages: false,
660-
supportsComputerUse: false,
661661
supportsPromptCache: false,
662662
inputPrice: undefined,
663663
outputPrice: undefined,
664+
cacheWritesPrice: undefined,
665+
cacheReadsPrice: undefined,
664666
description: "model-with-only-max-tokens via LiteLLM proxy",
665667
})
666668

@@ -669,10 +671,11 @@ describe("getLiteLLMModels", () => {
669671
maxTokens: 16384,
670672
contextWindow: 100000,
671673
supportsImages: false,
672-
supportsComputerUse: false,
673674
supportsPromptCache: false,
674675
inputPrice: undefined,
675676
outputPrice: undefined,
677+
cacheWritesPrice: undefined,
678+
cacheReadsPrice: undefined,
676679
description: "model-with-only-max-output-tokens via LiteLLM proxy",
677680
})
678681
})

src/api/providers/fetchers/litellm.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ export async function getLiteLLMModels(apiKey: string, baseUrl: string): Promise
4444
maxTokens: modelInfo.max_output_tokens || modelInfo.max_tokens || 8192,
4545
contextWindow: modelInfo.max_input_tokens || 200000,
4646
supportsImages: Boolean(modelInfo.supports_vision),
47-
// litellm_params.model may have a prefix like openrouter/
4847
supportsPromptCache: Boolean(modelInfo.supports_prompt_caching),
4948
inputPrice: modelInfo.input_cost_per_token ? modelInfo.input_cost_per_token * 1000000 : undefined,
5049
outputPrice: modelInfo.output_cost_per_token

0 commit comments

Comments
 (0)