@@ -66,31 +66,15 @@ export async function getRequestyModels() {
6666 maxTokens : rawModel . max_output_tokens ,
6767 contextWindow : rawModel . context_window ,
6868 supportsPromptCache : rawModel . supports_caching ,
69+ supportsImages : rawModel . supports_vision ,
70+ supportsComputerUse : rawModel . supports_computer_use ,
6971 inputPrice : parseApiPrice ( rawModel . input_price ) ,
7072 outputPrice : parseApiPrice ( rawModel . output_price ) ,
7173 description : rawModel . description ,
7274 cacheWritesPrice : parseApiPrice ( rawModel . caching_price ) ,
7375 cacheReadsPrice : parseApiPrice ( rawModel . cached_price ) ,
7476 }
7577
76- switch ( rawModel . id ) {
77- case rawModel . id . startsWith ( "anthropic/claude-3-7-sonnet" ) :
78- modelInfo . supportsComputerUse = true
79- modelInfo . supportsImages = true
80- modelInfo . maxTokens = 16384
81- break
82- case rawModel . id . startsWith ( "anthropic/claude-3-5-sonnet-20241022" ) :
83- modelInfo . supportsComputerUse = true
84- modelInfo . supportsImages = true
85- modelInfo . maxTokens = 8192
86- break
87- case rawModel . id . startsWith ( "anthropic/" ) :
88- modelInfo . maxTokens = 8192
89- break
90- default :
91- break
92- }
93-
9478 models [ rawModel . id ] = modelInfo
9579 }
9680 } catch ( error ) {
0 commit comments