-
Notifications
You must be signed in to change notification settings - Fork 339
add models (Minimax M2.1, Bytedance, Mistral) #926
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -93,9 +93,16 @@ class ModelName(str, Enum): | |||||||||
| phi_4_5p6b = "phi_4_5p6b" | ||||||||||
| phi_4_mini = "phi_4_mini" | ||||||||||
| mistral_large = "mistral_large" | ||||||||||
| mistral_3_large_2512 = "mistral_3_large_2512" | ||||||||||
| mistral_nemo = "mistral_nemo" | ||||||||||
| mistral_small_3 = "mistral_small_3" | ||||||||||
| mistral_medium_3_1 = "mistral_medium_3_1" | ||||||||||
| mistral_small_creative = ( | ||||||||||
| "mistral_small_creative" # mistralai/mistral-small-creative | ||||||||||
| ) | ||||||||||
| ministral_3_14b_2512 = "ministral_3_14b_2512" | ||||||||||
| ministral_3_8b_2512 = "ministral_3_8b_2512" | ||||||||||
| ministral_3_3b_2512 = "ministral_3_3b_2512" | ||||||||||
| magistral_medium = "magistral_medium" | ||||||||||
| magistral_medium_thinking = "magistral_medium_thinking" | ||||||||||
| gemma_2_2b = "gemma_2_2b" | ||||||||||
|
|
@@ -209,8 +216,11 @@ class ModelName(str, Enum): | |||||||||
| hunyuan_a13b_no_thinking = "hunyuan_a13b_no_thinking" | ||||||||||
| minimax_m1_80k = "minimax_m1_80k" | ||||||||||
| minimax_m2 = "minimax_m2" | ||||||||||
| minimax_m2_1 = "minimax_m2_1" | ||||||||||
| pangu_pro_moe_72b_a16b = "pangu_pro_moe_72b_a16b" | ||||||||||
| bytedance_seed_oss_36b = "bytedance_seed_oss_36b" | ||||||||||
| bytedance_seed_1_6 = "bytedance_seed_1_6" | ||||||||||
| bytedance_seed_1_6_flash = "bytedance_seed_1_6_flash" | ||||||||||
| stepfun_step3 = "stepfun_step3" | ||||||||||
|
|
||||||||||
|
|
||||||||||
|
|
@@ -2320,6 +2330,64 @@ class KilnModel(BaseModel): | |||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Mistral Small Creative | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
| name=ModelName.mistral_small_creative, | ||||||||||
| friendly_name="Mistral Small Creative", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="mistralai/mistral-small-creative", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_instruction_and_object, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Ministral 3 14B 2512 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
| name=ModelName.ministral_3_14b_2512, | ||||||||||
| friendly_name="Ministral 3 14B 2512", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="mistralai/ministral-14b-2512", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| ), | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.together_ai, | ||||||||||
| model_id="mistralai/Ministral-3-14B-Instruct-2512", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_instruction_and_object, | ||||||||||
| supports_function_calling=False, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Ministral 3 8B 2512 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
| name=ModelName.ministral_3_8b_2512, | ||||||||||
| friendly_name="Ministral 3 8B 2512", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="mistralai/ministral-8b-2512", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Ministral 3 3B 2512 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
| name=ModelName.ministral_3_3b_2512, | ||||||||||
| friendly_name="Ministral 3 3B 2512", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="mistralai/ministral-3b-2512", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
|
Comment on lines
+2333
to
+2390
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🧩 Analysis chain🏁 Script executed: # Find the StructuredOutputMode enum definition and adapter implementations
rg -n "class StructuredOutputMode" --type=py
fd -e py -path "*adapter*" | head -20Repository: Kiln-AI/Kiln Length of output: 330 🏁 Script executed: # Check OpenRouter and Together AI adapter implementations
rg -n "openrouter|together_ai" --type=py libs/core/kiln_ai/adapters/ -lRepository: Kiln-AI/Kiln Length of output: 1101 🏁 Script executed: # Look for how structured_output_mode is handled in adapters
rg -n "structured_output_mode" --type=py libs/core/kiln_ai/adapters/ -A 3Repository: Kiln-AI/Kiln Length of output: 50369 🏁 Script executed: # Get the StructuredOutputMode enum definition
cat -n libs/core/kiln_ai/datamodel/datamodel_enums.py | head -50Repository: Kiln-AI/Kiln Length of output: 2659 🏁 Script executed: # Find OpenRouter and Together AI adapter implementations
find libs/core/kiln_ai/adapters -name "*openrouter*" -o -name "*together*"Repository: Kiln-AI/Kiln Length of output: 176 🏁 Script executed: # Search for how structured_output_mode is processed in adapters
rg "def.*structured_output_mode|json_schema|json_instruction_and_object" libs/core/kiln_ai/adapters/model_adapters/ -A 3 -B 1Repository: Kiln-AI/Kiln Length of output: 35341 🏁 Script executed: # Check lines 2333-2390 and nearby to see what models are being added
sed -n '2333,2390p' libs/core/kiln_ai/adapters/ml_model_list.pyRepository: Kiln-AI/Kiln Length of output: 2113 🏁 Script executed: # Check lines 2450-2462 mentioned in the comment
sed -n '2450,2462p' libs/core/kiln_ai/adapters/ml_model_list.pyRepository: Kiln-AI/Kiln Length of output: 487 🏁 Script executed: # Search for provider-specific validation or mode handling
rg "openrouter|together_ai" libs/core/kiln_ai/adapters/model_adapters/ -B 2 -A 5 | head -100Repository: Kiln-AI/Kiln Length of output: 10068 🏁 Script executed: # Check if there's any validation that restricts modes per provider
rg "structured_output_mode.*openrouter|openrouter.*structured_output_mode|together_ai.*structured_output_mode" -iRepository: Kiln-AI/Kiln Length of output: 38 🏁 Script executed: # Look for any tests that verify OpenRouter or Together AI support
rg "openrouter|together_ai" libs/core/kiln_ai/adapters/test_ml_model_list.py -B 3 -A 3Repository: Kiln-AI/Kiln Length of output: 2317 🏁 Script executed: # Check for any special handling or documented restrictions for OpenRouter json_schema
rg "json_schema.*openrouter|openrouter.*json_schema" -i libs/core/Repository: Kiln-AI/Kiln Length of output: 257 🏁 Script executed: # Look for Together AI specific mode handling
rg "together_ai" libs/core/kiln_ai/adapters/model_adapters/ -B 3 -A 3Repository: Kiln-AI/Kiln Length of output: 871 🏁 Script executed: # Check the test suite for Together AI structured output mode testing
rg "together_ai" libs/core/kiln_ai/adapters/test_ml_model_list.py -B 3 -A 3 | head -60Repository: Kiln-AI/Kiln Length of output: 309 🏁 Script executed: # Check if there are any integration docs or known limitations
fd "README|readme|INTEGRATION|integration" libs/core/kiln_ai/adapters/ | xargs cat 2>/dev/nullRepository: Kiln-AI/Kiln Length of output: 3157 🏁 Script executed: # Find the context of the OpenRouter json_schema limitation comment
rg "json_schema.*sometimes works" -B 5 -A 5 libs/core/kiln_ai/adapters/ml_model_list.pyRepository: Kiln-AI/Kiln Length of output: 636 🏁 Script executed: # Check if there are similar limitations for Together AI
rg "together_ai.*json_schema|json_instruction_and_object.*together" -B 3 -A 3 libs/core/kiln_ai/adapters/ml_model_list.pyRepository: Kiln-AI/Kiln Length of output: 38 🏁 Script executed: # Look for any existing OpenRouter models to see what modes they use
rg "openrouter" libs/core/kiln_ai/adapters/ml_model_list.py | grep -i "structured_output_mode" | head -20Repository: Kiln-AI/Kiln Length of output: 38 🏁 Script executed: # Check for any tests that validate OpenRouter or Together AI model configurations
rg "test.*openrouter|test.*together_ai" libs/core/kiln_ai/adapters/ -l | xargs grep -l "structured_output_mode"Repository: Kiln-AI/Kiln Length of output: 274 🏁 Script executed: # Look for all OpenRouter models and their structured_output_mode settings
rg "ModelProviderName.openrouter" libs/core/kiln_ai/adapters/ml_model_list.py -A 5 | grep -A 1 "openrouter" | grep "structured_output_mode"Repository: Kiln-AI/Kiln Length of output: 1132 🏁 Script executed: # Check what modes are used across all OpenRouter models
rg "name=ModelProviderName.openrouter" libs/core/kiln_ai/adapters/ml_model_list.py -A 8 | grep "structured_output_mode" | sort | uniq -cRepository: Kiln-AI/Kiln Length of output: 497 🏁 Script executed: # Get the full context of that OpenRouter limitation comment
sed -n '1290,1310p' libs/core/kiln_ai/adapters/ml_model_list.pyRepository: Kiln-AI/Kiln Length of output: 885 🏁 Script executed: # Check which Mistral models currently exist with OpenRouter
rg "mistral.*openrouter|openrouter.*mistral" -i libs/core/kiln_ai/adapters/ml_model_list.py -B 2 -A 5Repository: Kiln-AI/Kiln Length of output: 38 Use OpenRouter has a documented limitation where
The adapter explicitly supports 🤖 Prompt for AI Agents |
||||||||||
| # Mistral Medium 3.1 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
|
|
@@ -2379,6 +2447,19 @@ class KilnModel(BaseModel): | |||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Mistral Large 2512 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
| name=ModelName.mistral_3_large_2512, | ||||||||||
| friendly_name="Mistral Large 3 2512", | ||||||||||
|
Comment on lines
+2453
to
+2454
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. To align with the suggested change in the
Suggested change
|
||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="mistralai/mistral-large-2512", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Mistral Large | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.mistral, | ||||||||||
|
|
@@ -5171,6 +5252,12 @@ class KilnModel(BaseModel): | |||||||||
| reasoning_capable=True, | ||||||||||
| reasoning_optional_for_structured_output=True, | ||||||||||
| ), | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.cerebras, | ||||||||||
| model_id="zai-glm-4.7", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # GLM 4.6 | ||||||||||
|
|
@@ -5190,6 +5277,12 @@ class KilnModel(BaseModel): | |||||||||
| model_id="zai-org/GLM-4.6", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_instructions, | ||||||||||
| ), | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.cerebras, | ||||||||||
| model_id="zai-glm-4.6", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # GLM 4.5V | ||||||||||
|
|
@@ -5556,6 +5649,32 @@ class KilnModel(BaseModel): | |||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Minimax M2.1 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.minimax, | ||||||||||
| name=ModelName.minimax_m2_1, | ||||||||||
| friendly_name="Minimax M2.1", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="minimax/minimax-m2.1", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| supports_data_gen=True, | ||||||||||
| r1_openrouter_options=True, | ||||||||||
| require_openrouter_reasoning=True, | ||||||||||
| parser=ModelParserID.r1_thinking, | ||||||||||
| ), | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.fireworks_ai, | ||||||||||
| model_id="accounts/fireworks/models/minimax-m2p1", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| supports_data_gen=True, | ||||||||||
| reasoning_optional_for_structured_output=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Minimax M2 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.minimax, | ||||||||||
|
|
@@ -5580,6 +5699,14 @@ class KilnModel(BaseModel): | |||||||||
| supports_data_gen=True, | ||||||||||
| reasoning_optional_for_structured_output=True, | ||||||||||
| ), | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.fireworks_ai, | ||||||||||
| model_id="accounts/fireworks/models/minimax-m2", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| supports_data_gen=True, | ||||||||||
| reasoning_optional_for_structured_output=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Pangu Pro MOE | ||||||||||
|
|
@@ -5598,7 +5725,7 @@ class KilnModel(BaseModel): | |||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Bytedance | ||||||||||
| # Bytedance Seed OSS 36B | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.bytedance, | ||||||||||
| name=ModelName.bytedance_seed_oss_36b, | ||||||||||
|
|
@@ -5623,6 +5750,36 @@ class KilnModel(BaseModel): | |||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Bytedance Seed 1.6 | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.bytedance, | ||||||||||
| name=ModelName.bytedance_seed_1_6, | ||||||||||
| friendly_name="ByteDance Seed 1.6", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="bytedance-seed/seed-1.6", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| supports_data_gen=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # Bytedance Seed 1.6 Flash | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.bytedance, | ||||||||||
| name=ModelName.bytedance_seed_1_6_flash, | ||||||||||
| friendly_name="ByteDance Seed 1.6 Flash", | ||||||||||
| providers=[ | ||||||||||
| KilnModelProvider( | ||||||||||
| name=ModelProviderName.openrouter, | ||||||||||
| model_id="bytedance-seed/seed-1.6-flash", | ||||||||||
| structured_output_mode=StructuredOutputMode.json_schema, | ||||||||||
| reasoning_capable=True, | ||||||||||
| supports_data_gen=True, | ||||||||||
| ), | ||||||||||
| ], | ||||||||||
| ), | ||||||||||
| # StepFun | ||||||||||
| KilnModel( | ||||||||||
| family=ModelFamily.stepfun, | ||||||||||
|
|
||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There's an inconsistency in this model's naming. On OpenRouter, the model ID is
mistralai/mistral-large-2512and it's named "Mistral Large 2512", without the "3". For consistency with the provider's naming, it's better to remove the_3_from the enum member name.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
disagree with gemini, it is called Mistral Large 3 https://openrouter.ai/mistralai/mistral-large-2512