Skip to content

Commit ec98fd6

Browse files
committed
testing schema for haiku4.5, sonnet4.5
1 parent f853ac6 commit ec98fd6

5 files changed

+278
-5
lines changed

lib/ruby_llm/models.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -373,7 +373,8 @@
373373
]
374374
},
375375
"capabilities": [
376-
"function_calling"
376+
"function_calling",
377+
"structured_output"
377378
],
378379
"pricing": {
379380
"text_tokens": {

spec/fixtures/vcr_cassettes/chat_with_schema_with_anthropic_claude-sonnet-4-5_accepts_a_json_schema_and_returns_structured_output.yml

Lines changed: 84 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spec/fixtures/vcr_cassettes/chat_with_schema_with_anthropic_claude-sonnet-4-5_allows_removing_schema_with_nil_mid-conversation.yml

Lines changed: 180 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spec/ruby_llm/chat_schema_spec.rb

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,17 @@
1919
end
2020

2121
# Test providers that support JSON Schema structured output
22-
CHAT_MODELS.select { |m| %i[openai anthropic].include?(m[:provider]) }.each do |model_info|
22+
CHAT_SCHEMA_MODELS.reject { _1[:provider] == :gemini }.each do |model_info|
2323
model = model_info[:model]
2424
provider = model_info[:provider]
2525

2626
context "with #{provider}/#{model}" do
2727
let(:chat) { RubyLLM.chat(model: model, provider: provider) }
2828

2929
it 'accepts a JSON schema and returns structured output' do
30-
skip 'Model does not support structured output' unless chat.model.structured_output?
30+
# All models listed here should support structured output and the
31+
# metadata should confirm that
32+
raise 'Model returns false for structured_output?' unless chat.model.structured_output?
3133

3234
response = chat
3335
.with_schema(person_schema)
@@ -66,7 +68,7 @@
6668
end
6769

6870
# Test Gemini provider separately due to different schema format
69-
CHAT_MODELS.select { |model_info| model_info[:provider] == :gemini }.each do |model_info|
71+
CHAT_SCHEMA_MODELS.select { _1[:provider] == :gemini }.each do |model_info|
7072
model = model_info[:model]
7173
provider = model_info[:provider]
7274

spec/support/models_to_test.rb

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
# frozen_string_literal: true
22

33
CHAT_MODELS = [
4-
{ provider: :anthropic, model: 'claude-haiku-4-5' },
54
{ provider: :openrouter, model: 'claude-haiku-4-5' },
65
{ provider: :bedrock, model: 'claude-3-5-haiku' },
76
{ provider: :deepseek, model: 'deepseek-chat' },
@@ -15,6 +14,13 @@
1514
{ provider: :vertexai, model: 'gemini-2.5-flash' }
1615
].freeze
1716

17+
CHAT_SCHEMA_MODELS = [
18+
{ provider: :anthropic, model: 'claude-haiku-4-5' },
19+
{ provider: :anthropic, model: 'claude-sonnet-4-5' },
20+
{ provider: :gemini, model: 'gemini-2.5-flash' },
21+
{ provider: :openai, model: 'gpt-4.1-nano' }
22+
].freeze
23+
1824
PDF_MODELS = [
1925
{ provider: :anthropic, model: 'claude-haiku-4-5' },
2026
{ provider: :bedrock, model: 'claude-3-7-sonnet' },

0 commit comments

Comments
 (0)