11import axios from "axios"
22import { getLiteLLMModels } from "../litellm"
3- import { OPEN_ROUTER_COMPUTER_USE_MODELS } from "../../../../shared/api"
43
54// Mock axios
65jest . mock ( "axios" )
@@ -26,6 +25,7 @@ describe("getLiteLLMModels", () => {
2625 supports_prompt_caching : false ,
2726 input_cost_per_token : 0.000003 ,
2827 output_cost_per_token : 0.000015 ,
28+ supports_computer_use : true ,
2929 } ,
3030 litellm_params : {
3131 model : "anthropic/claude-3.5-sonnet" ,
@@ -40,6 +40,7 @@ describe("getLiteLLMModels", () => {
4040 supports_prompt_caching : false ,
4141 input_cost_per_token : 0.00001 ,
4242 output_cost_per_token : 0.00003 ,
43+ supports_computer_use : false ,
4344 } ,
4445 litellm_params : {
4546 model : "openai/gpt-4-turbo" ,
@@ -105,7 +106,6 @@ describe("getLiteLLMModels", () => {
105106 } )
106107
107108 it ( "handles computer use models correctly" , async ( ) => {
108- const computerUseModel = Array . from ( OPEN_ROUTER_COMPUTER_USE_MODELS ) [ 0 ]
109109 const mockResponse = {
110110 data : {
111111 data : [
@@ -115,9 +115,22 @@ describe("getLiteLLMModels", () => {
115115 max_tokens : 4096 ,
116116 max_input_tokens : 200000 ,
117117 supports_vision : true ,
118+ supports_computer_use : true ,
118119 } ,
119120 litellm_params : {
120- model : `anthropic/${ computerUseModel } ` ,
121+ model : `anthropic/test-computer-model` ,
122+ } ,
123+ } ,
124+ {
125+ model_name : "test-non-computer-model" ,
126+ model_info : {
127+ max_tokens : 4096 ,
128+ max_input_tokens : 200000 ,
129+ supports_vision : false ,
130+ supports_computer_use : false ,
131+ } ,
132+ litellm_params : {
133+ model : `anthropic/test-non-computer-model` ,
121134 } ,
122135 } ,
123136 ] ,
@@ -138,6 +151,17 @@ describe("getLiteLLMModels", () => {
138151 outputPrice : undefined ,
139152 description : "test-computer-model via LiteLLM proxy" ,
140153 } )
154+
155+ expect ( result [ "test-non-computer-model" ] ) . toEqual ( {
156+ maxTokens : 4096 ,
157+ contextWindow : 200000 ,
158+ supportsImages : false ,
159+ supportsComputerUse : false ,
160+ supportsPromptCache : false ,
161+ inputPrice : undefined ,
162+ outputPrice : undefined ,
163+ description : "test-non-computer-model via LiteLLM proxy" ,
164+ } )
141165 } )
142166
143167 it ( "throws error for unexpected response format" , async ( ) => {
0 commit comments