Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions lib/ruby_llm/providers/bedrock/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ def render_payload(messages, tools:, temperature:, model:, stream: false, schema
additional_fields = render_additional_model_request_fields(thinking)
payload[:additionalModelRequestFields] = additional_fields if additional_fields

output_config = build_output_config(schema)
payload[:outputConfig] = output_config if output_config

payload
end

Expand Down Expand Up @@ -238,6 +241,26 @@ def render_additional_model_request_fields(thinking)
fields.empty? ? nil : fields
end

def build_output_config(schema)
return nil unless schema

cleaned = RubyLLM::Utils.deep_dup(schema)
cleaned.delete(:strict)
cleaned.delete('strict')

{
textFormat: {
type: 'json_schema',
structure: {
jsonSchema: {
schema: JSON.generate(cleaned),
name: 'response'
}
}
}
}
end

def render_reasoning_fields(thinking)
return nil unless thinking&.enabled?

Expand Down
18 changes: 17 additions & 1 deletion lib/ruby_llm/providers/bedrock/models.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class Bedrock
module Models
module_function

REGION_PREFIXES = %w[us eu ap sa ca me af il].freeze
REGION_PREFIXES = %w[global us eu ap sa ca me af il].freeze

def models_api_base
"https://bedrock.#{bedrock_region}.amazonaws.com"
Expand Down Expand Up @@ -100,10 +100,26 @@ def parse_capabilities(model_data)
converse = model_data['converse'] || {}
capabilities << 'function_calling' if converse.is_a?(Hash)
capabilities << 'reasoning' if converse.dig('reasoningSupported', 'embedded')
capabilities << 'structured_output' if supports_structured_output?(model_data['modelId'])

capabilities
end

# Structured output supported on Claude 4.5+ and assumed for future major versions.
# Bedrock IDs look like: us.anthropic.claude-haiku-4-5-20251001-v1:0
# Must handle optional region prefix (us./eu./global.) and anthropic. prefix.
def supports_structured_output?(model_id)
return false unless model_id

normalized = model_id.sub(/\A(?:#{REGION_PREFIXES.join('|')})\./, '').delete_prefix('anthropic.')
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

needs global in REGION_PREFIXES

match = normalized.match(/claude-(?:opus|sonnet|haiku)-(\d+)-(\d{1,2})(?:\b|-)/)
return false unless match

major = match[1].to_i
minor = match[2].to_i
major > 4 || (major == 4 && minor >= 5)
end

def reasoning_embedded?(model)
metadata = RubyLLM::Utils.deep_symbolize_keys(model.metadata || {})
converse = metadata[:converse] || {}
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions spec/ruby_llm/chat_schema_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

# Test providers that support structured output with JSON schema
# Note: Only test models that have json_schema support, not just json_object
CHAT_MODELS.select { |model_info| %i[openai anthropic].include?(model_info[:provider]) }.each do |model_info|
STRUCTURED_OUTPUT_MODELS.each do |model_info|
model = model_info[:model]
provider = model_info[:provider]

Expand Down Expand Up @@ -125,7 +125,7 @@
end

test_model = CHAT_MODELS.find do |model_info|
%i[openai gemini].include?(model_info[:provider])
%i[openai gemini bedrock].include?(model_info[:provider])
end

if test_model
Expand Down
77 changes: 77 additions & 0 deletions spec/ruby_llm/providers/bedrock/chat_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# frozen_string_literal: true

require 'spec_helper'

RSpec.describe RubyLLM::Providers::Bedrock::Chat do
describe '.render_payload' do
let(:model) do
instance_double(RubyLLM::Model::Info,
id: 'anthropic.claude-haiku-4-5-20251001-v1:0',
max_tokens: nil,
metadata: {})
end

let(:base_args) do
{
tools: {},
temperature: nil,
model: model,
stream: false
}
end

def render_payload(messages = [], **overrides)
described_class.render_payload(messages, **base_args, **overrides)
end

context 'when schema is provided' do
let(:schema) do
{
type: 'object',
properties: { name: { type: 'string' } },
required: ['name'],
additionalProperties: false,
strict: true
}
end

it 'includes outputConfig with stringified schema' do
payload = render_payload(schema: schema)

output_config = payload[:outputConfig]
expect(output_config).not_to be_nil
expect(output_config[:textFormat][:type]).to eq('json_schema')

json_schema = output_config[:textFormat][:structure][:jsonSchema]
expect(json_schema[:name]).to eq('response')
expect(json_schema[:schema]).to be_a(String)

parsed = JSON.parse(json_schema[:schema])
expect(parsed['type']).to eq('object')
expect(parsed['properties']).to eq({ 'name' => { 'type' => 'string' } })
end

it 'strips :strict from the schema' do
payload = render_payload(schema: schema)

json_schema = payload[:outputConfig][:textFormat][:structure][:jsonSchema]
parsed = JSON.parse(json_schema[:schema])
expect(parsed).not_to have_key('strict')
expect(parsed).not_to have_key(:strict)
end

it 'does not mutate the original schema' do
original = schema.dup
render_payload(schema: schema)
expect(schema).to eq(original)
end
end

context 'when schema is nil' do
it 'does not include outputConfig' do
payload = render_payload(schema: nil)
expect(payload).not_to have_key(:outputConfig)
end
end
end
end
24 changes: 24 additions & 0 deletions spec/ruby_llm/providers/bedrock/models_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# frozen_string_literal: true

require 'spec_helper'

RSpec.describe RubyLLM::Providers::Bedrock::Models do
describe '.supports_structured_output?' do
{
'anthropic.claude-haiku-4-5-20251001-v1:0' => true,
'anthropic.claude-sonnet-4-5-20250929-v1:0' => true,
'anthropic.claude-opus-4-5-20250514-v1:0' => true,
'us.anthropic.claude-opus-4-6-v1' => true,
'eu.anthropic.claude-haiku-4-5-20251001-v1:0' => true,
'global.anthropic.claude-haiku-4-5-20251001-v1:0' => true,
'anthropic.claude-opus-4-20250514-v1:0' => false,
'anthropic.claude-3-5-sonnet-20241022-v2:0' => false,
'amazon.nova-2-lite-v1:0' => false,
nil => false
}.each do |model_id, expected|
it "returns #{expected} for #{model_id.inspect}" do
expect(described_class.supports_structured_output?(model_id)).to eq(expected)
end
end
end
end
7 changes: 7 additions & 0 deletions spec/support/models_to_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@ def filter_local_providers(models)
].freeze
CHAT_MODELS = filter_local_providers(chat_models).freeze

structured_output_models = [
{ provider: :openai, model: 'gpt-5-nano' },
{ provider: :anthropic, model: 'claude-haiku-4-5' },
{ provider: :bedrock, model: 'claude-haiku-4-5' }
]
STRUCTURED_OUTPUT_MODELS = structured_output_models.freeze

thinking_models = [
{ provider: :anthropic, model: 'claude-haiku-4-5' },
{ provider: :azure, model: 'Kimi-K2.5' },
Expand Down