@@ -11991,6 +11991,108 @@
11991
11991
"mode": "chat",
11992
11992
"supports_tool_choice": true
11993
11993
},
11994
+ "openrouter/openai/gpt-4.1": {
11995
+ "max_tokens": 32768,
11996
+ "max_input_tokens": 1047576,
11997
+ "max_output_tokens": 32768,
11998
+ "input_cost_per_token": 2e-06,
11999
+ "output_cost_per_token": 8e-06,
12000
+ "cache_read_input_token_cost": 5e-07,
12001
+ "litellm_provider": "openrouter",
12002
+ "mode": "chat",
12003
+ "supports_function_calling": true,
12004
+ "supports_parallel_function_calling": true,
12005
+ "supports_response_schema": true,
12006
+ "supports_vision": true,
12007
+ "supports_prompt_caching": true,
12008
+ "supports_system_messages": true,
12009
+ "supports_tool_choice": true
12010
+ },
12011
+ "openrouter/openai/gpt-4.1-2025-04-14": {
12012
+ "max_tokens": 32768,
12013
+ "max_input_tokens": 1047576,
12014
+ "max_output_tokens": 32768,
12015
+ "input_cost_per_token": 2e-06,
12016
+ "output_cost_per_token": 8e-06,
12017
+ "cache_read_input_token_cost": 5e-07,
12018
+ "litellm_provider": "openrouter",
12019
+ "mode": "chat",
12020
+ "supports_function_calling": true,
12021
+ "supports_parallel_function_calling": true,
12022
+ "supports_response_schema": true,
12023
+ "supports_vision": true,
12024
+ "supports_prompt_caching": true,
12025
+ "supports_system_messages": true,
12026
+ "supports_tool_choice": true
12027
+ },
12028
+ "openrouter/openai/gpt-4.1-mini": {
12029
+ "max_tokens": 32768,
12030
+ "max_input_tokens": 1047576,
12031
+ "max_output_tokens": 32768,
12032
+ "input_cost_per_token": 4e-07,
12033
+ "output_cost_per_token": 1.6e-06,
12034
+ "cache_read_input_token_cost": 1e-07,
12035
+ "litellm_provider": "openrouter",
12036
+ "mode": "chat",
12037
+ "supports_function_calling": true,
12038
+ "supports_parallel_function_calling": true,
12039
+ "supports_response_schema": true,
12040
+ "supports_vision": true,
12041
+ "supports_prompt_caching": true,
12042
+ "supports_system_messages": true,
12043
+ "supports_tool_choice": true
12044
+ },
12045
+ "openrouter/openai/gpt-4.1-mini-2025-04-14": {
12046
+ "max_tokens": 32768,
12047
+ "max_input_tokens": 1047576,
12048
+ "max_output_tokens": 32768,
12049
+ "input_cost_per_token": 4e-07,
12050
+ "output_cost_per_token": 1.6e-06,
12051
+ "cache_read_input_token_cost": 1e-07,
12052
+ "litellm_provider": "openrouter",
12053
+ "mode": "chat",
12054
+ "supports_function_calling": true,
12055
+ "supports_parallel_function_calling": true,
12056
+ "supports_response_schema": true,
12057
+ "supports_vision": true,
12058
+ "supports_prompt_caching": true,
12059
+ "supports_system_messages": true,
12060
+ "supports_tool_choice": true
12061
+ },
12062
+ "openrouter/openai/gpt-4.1-nano": {
12063
+ "max_tokens": 32768,
12064
+ "max_input_tokens": 1047576,
12065
+ "max_output_tokens": 32768,
12066
+ "input_cost_per_token": 1e-07,
12067
+ "output_cost_per_token": 4e-07,
12068
+ "cache_read_input_token_cost": 2.5e-08,
12069
+ "litellm_provider": "openrouter",
12070
+ "mode": "chat",
12071
+ "supports_function_calling": true,
12072
+ "supports_parallel_function_calling": true,
12073
+ "supports_response_schema": true,
12074
+ "supports_vision": true,
12075
+ "supports_prompt_caching": true,
12076
+ "supports_system_messages": true,
12077
+ "supports_tool_choice": true
12078
+ },
12079
+ "openrouter/openai/gpt-4.1-nano-2025-04-14": {
12080
+ "max_tokens": 32768,
12081
+ "max_input_tokens": 1047576,
12082
+ "max_output_tokens": 32768,
12083
+ "input_cost_per_token": 1e-07,
12084
+ "output_cost_per_token": 4e-07,
12085
+ "cache_read_input_token_cost": 2.5e-08,
12086
+ "litellm_provider": "openrouter",
12087
+ "mode": "chat",
12088
+ "supports_function_calling": true,
12089
+ "supports_parallel_function_calling": true,
12090
+ "supports_response_schema": true,
12091
+ "supports_vision": true,
12092
+ "supports_prompt_caching": true,
12093
+ "supports_system_messages": true,
12094
+ "supports_tool_choice": true
12095
+ },
11994
12096
"openrouter/openai/gpt-5-mini": {
11995
12097
"max_tokens": 128000,
11996
12098
"max_input_tokens": 400000,
@@ -14970,32 +15072,32 @@
14970
15072
"output_cost_per_token": 6e-06,
14971
15073
"max_input_tokens": 262000,
14972
15074
"litellm_provider": "together_ai",
14973
- "supports_function_calling": false ,
14974
- "supports_parallel_function_calling": false ,
15075
+ "supports_function_calling": true ,
15076
+ "supports_parallel_function_calling": true ,
14975
15077
"mode": "chat",
14976
- "supports_tool_choice": false ,
15078
+ "supports_tool_choice": true ,
14977
15079
"source": "https://www.together.ai/models/qwen3-235b-a22b-instruct-2507-fp8"
14978
15080
},
14979
15081
"together_ai/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": {
14980
15082
"input_cost_per_token": 2e-06,
14981
15083
"output_cost_per_token": 2e-06,
14982
15084
"max_input_tokens": 256000,
14983
15085
"litellm_provider": "together_ai",
14984
- "supports_function_calling": false ,
14985
- "supports_parallel_function_calling": false ,
15086
+ "supports_function_calling": true ,
15087
+ "supports_parallel_function_calling": true ,
14986
15088
"mode": "chat",
14987
- "supports_tool_choice": false ,
15089
+ "supports_tool_choice": true ,
14988
15090
"source": "https://www.together.ai/models/qwen3-coder-480b-a35b-instruct"
14989
15091
},
14990
15092
"together_ai/Qwen/Qwen3-235B-A22B-Thinking-2507": {
14991
15093
"input_cost_per_token": 6.5e-07,
14992
15094
"output_cost_per_token": 3e-06,
14993
15095
"max_input_tokens": 256000,
14994
15096
"litellm_provider": "together_ai",
14995
- "supports_function_calling": false ,
14996
- "supports_parallel_function_calling": false ,
15097
+ "supports_function_calling": true ,
15098
+ "supports_parallel_function_calling": true ,
14997
15099
"mode": "chat",
14998
- "supports_tool_choice": false ,
15100
+ "supports_tool_choice": true ,
14999
15101
"source": "https://www.together.ai/models/qwen3-235b-a22b-thinking-2507"
15000
15102
},
15001
15103
"together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": {
@@ -15038,10 +15140,10 @@
15038
15140
"output_cost_per_token": 2.19e-06,
15039
15141
"max_input_tokens": 128000,
15040
15142
"litellm_provider": "together_ai",
15041
- "supports_function_calling": false ,
15042
- "supports_parallel_function_calling": false ,
15143
+ "supports_function_calling": true ,
15144
+ "supports_parallel_function_calling": true ,
15043
15145
"mode": "chat",
15044
- "supports_tool_choice": false ,
15146
+ "supports_tool_choice": true ,
15045
15147
"source": "https://www.together.ai/models/deepseek-r1-0528-throughput"
15046
15148
},
15047
15149
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501": {
15066
15168
"output_cost_per_token": 6e-07,
15067
15169
"max_input_tokens": 128000,
15068
15170
"litellm_provider": "together_ai",
15069
- "supports_function_calling": false ,
15070
- "supports_tool_choice": false ,
15071
- "supports_parallel_function_calling": false ,
15171
+ "supports_function_calling": true ,
15172
+ "supports_tool_choice": true ,
15173
+ "supports_parallel_function_calling": true ,
15072
15174
"mode": "chat",
15073
15175
"source": "https://www.together.ai/models/gpt-oss-120b"
15074
15176
},
15077
15179
"output_cost_per_token": 2e-07,
15078
15180
"max_input_tokens": 128000,
15079
15181
"litellm_provider": "together_ai",
15080
- "supports_function_calling": false ,
15081
- "supports_tool_choice": false ,
15082
- "supports_parallel_function_calling": false ,
15182
+ "supports_function_calling": true ,
15183
+ "supports_tool_choice": true ,
15184
+ "supports_parallel_function_calling": true ,
15083
15185
"mode": "chat",
15084
15186
"source": "https://www.together.ai/models/gpt-oss-20b"
15085
15187
},
@@ -15088,12 +15190,24 @@
15088
15190
"output_cost_per_token": 1.1e-06,
15089
15191
"max_input_tokens": 128000,
15090
15192
"litellm_provider": "together_ai",
15091
- "supports_function_calling": false ,
15092
- "supports_tool_choice": false ,
15093
- "supports_parallel_function_calling": false ,
15193
+ "supports_function_calling": true ,
15194
+ "supports_tool_choice": true ,
15195
+ "supports_parallel_function_calling": true ,
15094
15196
"mode": "chat",
15095
15197
"source": "https://www.together.ai/models/glm-4-5-air"
15096
15198
},
15199
+ "together_ai/deepseek-ai/DeepSeek-V3.1": {
15200
+ "input_cost_per_token": 0.6e-06,
15201
+ "output_cost_per_token": 1.7e-06,
15202
+ "max_tokens": 128000,
15203
+ "litellm_provider": "together_ai",
15204
+ "supports_function_calling": true,
15205
+ "supports_parallel_function_calling": true,
15206
+ "supports_reasoning": true,
15207
+ "mode": "chat",
15208
+ "supports_tool_choice": true,
15209
+ "source": "https://www.together.ai/models/deepseek-v3-1"
15210
+ },
15097
15211
"ollama/codegemma": {
15098
15212
"max_tokens": 8192,
15099
15213
"max_input_tokens": 8192,
0 commit comments