From 0329a410c647f9ce4675ebba0de3ade4ff4ea658 Mon Sep 17 00:00:00 2001 From: aoki-ryusei Date: Wed, 4 Mar 2026 14:54:59 +0900 Subject: [PATCH 1/5] Add Gemini provider via OpenAI-compatible API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enables using Google's Gemini models through the OpenAI-compatible API endpoint at generativelanguage.googleapis.com. The provider inherits from OpenAI::ChatProvider, reusing streaming, tool use, and structured output functionality while overriding only Gemini-specific behaviors. Implementation: - GeminiProvider inherits OpenAI::ChatProvider, overrides message_merge_delta to fix Gemini's streaming role duplication (Gemini sends role in every chunk, causing "assistantassistant...") - Gemini::Options handles API key resolution: explicit api_key, access_token alias, then environment variables (GEMINI_API_KEY, GOOGLE_API_KEY in priority order) - Reuses OpenAI::Chat::RequestType — no protocol translation needed as Gemini implements OpenAI-compatible format - organization_id and project_id disabled (not used by Gemini API) - Connection error handling with instrumentation logging Follows the same pattern established by OllamaProvider which also inherits from OpenAI::ChatProvider for OpenAI-compatible endpoints. --- lib/active_agent/providers/gemini/_types.rb | 13 ++++ lib/active_agent/providers/gemini/options.rb | 41 ++++++++++ lib/active_agent/providers/gemini_provider.rb | 77 +++++++++++++++++++ 3 files changed, 131 insertions(+) create mode 100644 lib/active_agent/providers/gemini/_types.rb create mode 100644 lib/active_agent/providers/gemini/options.rb create mode 100644 lib/active_agent/providers/gemini_provider.rb diff --git a/lib/active_agent/providers/gemini/_types.rb b/lib/active_agent/providers/gemini/_types.rb new file mode 100644 index 00000000..87c4ac6e --- /dev/null +++ b/lib/active_agent/providers/gemini/_types.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require_relative "options" +require_relative "../open_ai/chat/_types" + +module ActiveAgent + module Providers + module Gemini + # Reuse OpenAI Chat request type (same API format) + RequestType = OpenAI::Chat::RequestType + end + end +end diff --git a/lib/active_agent/providers/gemini/options.rb b/lib/active_agent/providers/gemini/options.rb new file mode 100644 index 00000000..b1184c83 --- /dev/null +++ b/lib/active_agent/providers/gemini/options.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require_relative "../open_ai/options" + +module ActiveAgent + module Providers + module Gemini + # Configuration options for Gemini provider + # + # Extends OpenAI::Options with Gemini-specific settings including + # the default base URL for Gemini's OpenAI-compatible API endpoint. + # + # @example Basic configuration + # options = Options.new(api_key: 'your-api-key') + # + # @example With environment variable + # # Set GEMINI_API_KEY or GOOGLE_API_KEY + # options = Options.new({}) + # + # @see https://ai.google.dev/gemini-api/docs/openai + class Options < ActiveAgent::Providers::OpenAI::Options + GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/" + + attribute :base_url, :string, fallback: GEMINI_BASE_URL + + private + + def resolve_api_key(kwargs) + kwargs[:api_key] || + kwargs[:access_token] || + ENV["GEMINI_API_KEY"] || + ENV["GOOGLE_API_KEY"] + end + + # Not used as part of Gemini + def resolve_organization_id(kwargs) = nil + def resolve_project_id(kwargs) = nil + end + end + end +end diff --git a/lib/active_agent/providers/gemini_provider.rb b/lib/active_agent/providers/gemini_provider.rb new file mode 100644 index 00000000..4277688c --- /dev/null +++ b/lib/active_agent/providers/gemini_provider.rb @@ -0,0 +1,77 @@ +require_relative "_base_provider" + +require_gem!(:openai, __FILE__) + +require_relative "open_ai_provider" +require_relative "gemini/_types" + +module ActiveAgent + module Providers + # Provides access to Google's Gemini API via OpenAI-compatible endpoint. + # + # Extends OpenAI provider to work with Gemini's OpenAI-compatible API, + # enabling access to Gemini models through a familiar interface. + # + # @see OpenAI::ChatProvider + # @see https://ai.google.dev/gemini-api/docs/openai + class GeminiProvider < OpenAI::ChatProvider + # @return [String] + def self.service_name + "Gemini" + end + + # @return [Class] + def self.options_klass + namespace::Options + end + + # @return [ActiveModel::Type::Value] + def self.prompt_request_type + namespace::RequestType.new + end + + protected + + # Executes chat completion request with Gemini-specific error handling. + # + # @see OpenAI::ChatProvider#api_prompt_execute + # @param parameters [Hash] + # @return [Object, nil] response object or nil for streaming + # @raise [OpenAI::Errors::APIConnectionError] when Gemini API unreachable + def api_prompt_execute(parameters) + super + + rescue ::OpenAI::Errors::APIConnectionError => exception + log_connection_error(exception) + raise exception + end + + # Merges streaming delta into the message with role cleanup. + # + # Overrides parent to handle Gemini's role copying behavior which duplicates + # the role field in every streaming chunk, requiring manual cleanup to prevent + # message corruption. + # + # @see OpenAI::ChatProvider#message_merge_delta + # @param message [Hash] + # @param delta [Hash] + # @return [Hash] + def message_merge_delta(message, delta) + message[:role] = delta.delete(:role) if delta[:role] + + hash_merge_delta(message, delta) + end + + # Logs connection failures with Gemini API details for debugging. + # + # @param error [Exception] + # @return [void] + def log_connection_error(error) + instrument("connection_error.provider.active_agent", + uri_base: options.base_url, + exception: error.class, + message: error.message) + end + end + end +end From aaafbdfd221857af11d311e6a7cb6f16cc502b21 Mon Sep 17 00:00:00 2001 From: aoki-ryusei Date: Wed, 4 Mar 2026 15:04:15 +0900 Subject: [PATCH 2/5] Add tests for Gemini provider Comprehensive test coverage for GeminiProvider, Gemini::Options, and streaming lifecycle behaviors. Test coverage (21 tests, 35 assertions): - Provider class (6 tests): service_name, options_klass, prompt_request_type delegation to OpenAI::Chat::RequestType, initialization, inheritance from OpenAI::ChatProvider, client construction - Options (8 tests): api_key validation, GEMINI_API_KEY env resolution, GOOGLE_API_KEY env resolution, GEMINI over GOOGLE precedence, explicit-over-ENV precedence, access_token alias, organization_id returns nil, project_id returns nil - Streaming lifecycle (7 tests): inherits :open event emission from OpenAI::ChatProvider, broadcast_stream_open idempotency, message_merge_delta handles Gemini role duplication correctly, full lifecycle event ordering (open -> update -> close), streaming flag state transitions The streaming tests specifically verify the message_merge_delta override prevents role concatenation when Gemini sends role in every chunk. --- .../app/agents/providers/gemini_agent.rb | 22 ++ test/dummy/config/active_agent.yml | 12 ++ test/providers/gemini/gemini_provider_test.rb | 57 +++++ test/providers/gemini/options_test.rb | 116 ++++++++++ .../gemini/streaming_lifecycle_test.rb | 201 ++++++++++++++++++ 5 files changed, 408 insertions(+) create mode 100644 test/dummy/app/agents/providers/gemini_agent.rb create mode 100644 test/providers/gemini/gemini_provider_test.rb create mode 100644 test/providers/gemini/options_test.rb create mode 100644 test/providers/gemini/streaming_lifecycle_test.rb diff --git a/test/dummy/app/agents/providers/gemini_agent.rb b/test/dummy/app/agents/providers/gemini_agent.rb new file mode 100644 index 00000000..4abc1075 --- /dev/null +++ b/test/dummy/app/agents/providers/gemini_agent.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module Providers + # Example agent using Google's Gemini models. + # + # Demonstrates basic prompt generation with the Gemini provider. + # Configured to use Gemini 2.0 Flash with default instructions. + # + # @example Basic usage + # response = Providers::GeminiAgent.ask(message: "Hello").generate_now + # response.message.content #=> "Hi! How can I help you today?" + # region agent + class GeminiAgent < ApplicationAgent + generate_with :gemini, model: "gemini-2.0-flash" + + # @return [ActiveAgent::Generation] + def ask + prompt(message: params[:message]) + end + end + # endregion agent +end diff --git a/test/dummy/config/active_agent.yml b/test/dummy/config/active_agent.yml index 08b8e2bd..ca646697 100644 --- a/test/dummy/config/active_agent.yml +++ b/test/dummy/config/active_agent.yml @@ -38,6 +38,12 @@ mock: &mock ruby_llm: &ruby_llm service: "RubyLLM" # endregion ruby_llm_anchor +# region gemini_anchor +gemini: &gemini + service: "Gemini" + model: "gemini-2.0-flash" + api_key: <%= Rails.application.credentials.dig(:gemini, :api_key) %> +# endregion gemini_anchor # endregion config_anchors # region config_development @@ -72,6 +78,10 @@ development: ruby_llm: <<: *ruby_llm # endregion ruby_llm_dev_config + # region gemini_dev_config + gemini: + <<: *gemini + # endregion gemini_dev_config # endregion config_development # region config_test @@ -92,4 +102,6 @@ test: <<: *mock ruby_llm: <<: *ruby_llm + gemini: + <<: *gemini # endregion config_test diff --git a/test/providers/gemini/gemini_provider_test.rb b/test/providers/gemini/gemini_provider_test.rb new file mode 100644 index 00000000..4d652edf --- /dev/null +++ b/test/providers/gemini/gemini_provider_test.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +require "test_helper" + +begin + require "openai" +rescue LoadError + puts "OpenAI gem not available, skipping Gemini provider tests" + return +end + +require_relative "../../../lib/active_agent/providers/gemini_provider" + +class GeminiProviderTest < ActiveSupport::TestCase + setup do + @valid_config = { + service: "Gemini", + api_key: "test-api-key", + messages: [ { role: "user", content: "Hello" } ] + } + end + + test "service_name returns Gemini" do + assert_equal "Gemini", ActiveAgent::Providers::GeminiProvider.service_name + end + + test "options_klass returns Gemini::Options" do + assert_equal( + ActiveAgent::Providers::Gemini::Options, + ActiveAgent::Providers::GeminiProvider.options_klass + ) + end + + test "prompt_request_type returns Gemini::RequestType" do + request_type = ActiveAgent::Providers::GeminiProvider.prompt_request_type + + # Gemini::RequestType is aliased to OpenAI::Chat::RequestType + assert_instance_of ActiveAgent::Providers::OpenAI::Chat::RequestType, request_type + end + + test "initializes provider with valid configuration" do + provider = ActiveAgent::Providers::GeminiProvider.new(@valid_config) + + assert_instance_of ActiveAgent::Providers::GeminiProvider, provider + end + + test "inherits from OpenAI::ChatProvider" do + assert ActiveAgent::Providers::GeminiProvider < ActiveAgent::Providers::OpenAI::ChatProvider + end + + test "client is configured with Gemini base_url" do + provider = ActiveAgent::Providers::GeminiProvider.new(@valid_config) + client = provider.client + + assert_kind_of ::OpenAI::Client, client + end +end diff --git a/test/providers/gemini/options_test.rb b/test/providers/gemini/options_test.rb new file mode 100644 index 00000000..1a8709c1 --- /dev/null +++ b/test/providers/gemini/options_test.rb @@ -0,0 +1,116 @@ +# frozen_string_literal: true + +require "test_helper" + +begin + require "openai" +rescue LoadError + puts "OpenAI gem not available, skipping Gemini options tests" + return +end + +require_relative "../../../lib/active_agent/providers/gemini_provider" + +class GeminiOptionsTest < ActiveSupport::TestCase + setup do + @valid_options = { + api_key: "test-api-key" + } + end + + test "validates presence of api_key" do + original_keys = [ + ENV["GEMINI_API_KEY"], + ENV["GOOGLE_API_KEY"] + ] + ENV.delete("GEMINI_API_KEY") + ENV.delete("GOOGLE_API_KEY") + + options = ActiveAgent::Providers::Gemini::Options.new({}) + + assert_not options.valid? + assert_includes options.errors[:api_key], "can't be blank" + ensure + ENV["GEMINI_API_KEY"] = original_keys[0] + ENV["GOOGLE_API_KEY"] = original_keys[1] + end + + test "resolves api_key from GEMINI_API_KEY environment variable" do + original_keys = [ + ENV["GEMINI_API_KEY"], + ENV["GOOGLE_API_KEY"] + ] + ENV["GEMINI_API_KEY"] = "env-gemini-key" + ENV.delete("GOOGLE_API_KEY") + + options = ActiveAgent::Providers::Gemini::Options.new({}) + + assert_equal "env-gemini-key", options.api_key + ensure + ENV["GEMINI_API_KEY"] = original_keys[0] + ENV["GOOGLE_API_KEY"] = original_keys[1] + end + + test "resolves api_key from GOOGLE_API_KEY environment variable" do + original_keys = [ + ENV["GEMINI_API_KEY"], + ENV["GOOGLE_API_KEY"] + ] + ENV.delete("GEMINI_API_KEY") + ENV["GOOGLE_API_KEY"] = "env-google-key" + + options = ActiveAgent::Providers::Gemini::Options.new({}) + + assert_equal "env-google-key", options.api_key + ensure + ENV["GEMINI_API_KEY"] = original_keys[0] + ENV["GOOGLE_API_KEY"] = original_keys[1] + end + + test "prefers GEMINI_API_KEY over GOOGLE_API_KEY" do + original_keys = [ + ENV["GEMINI_API_KEY"], + ENV["GOOGLE_API_KEY"] + ] + ENV["GEMINI_API_KEY"] = "gemini-key" + ENV["GOOGLE_API_KEY"] = "google-key" + + options = ActiveAgent::Providers::Gemini::Options.new({}) + + assert_equal "gemini-key", options.api_key + ensure + ENV["GEMINI_API_KEY"] = original_keys[0] + ENV["GOOGLE_API_KEY"] = original_keys[1] + end + + test "prefers explicit api_key over environment variables" do + original_key = ENV["GEMINI_API_KEY"] + ENV["GEMINI_API_KEY"] = "env-key" + + options = ActiveAgent::Providers::Gemini::Options.new(@valid_options) + + assert_equal "test-api-key", options.api_key + ensure + ENV["GEMINI_API_KEY"] = original_key + end + + test "accepts access_token as alias for api_key" do + options = ActiveAgent::Providers::Gemini::Options.new( + access_token: "token-via-access-token" + ) + + assert_equal "token-via-access-token", options.api_key + end + + test "organization_id returns nil" do + options = ActiveAgent::Providers::Gemini::Options.new(@valid_options) + + assert_nil options.organization + end + + test "project_id returns nil" do + options = ActiveAgent::Providers::Gemini::Options.new(@valid_options) + + assert_nil options.project + end +end diff --git a/test/providers/gemini/streaming_lifecycle_test.rb b/test/providers/gemini/streaming_lifecycle_test.rb new file mode 100644 index 00000000..167a5195 --- /dev/null +++ b/test/providers/gemini/streaming_lifecycle_test.rb @@ -0,0 +1,201 @@ +# frozen_string_literal: true + +require "test_helper" + +begin + require "openai" +rescue LoadError + puts "OpenAI gem not available, skipping Gemini streaming lifecycle tests" + return +end + +require_relative "../../../lib/active_agent/providers/gemini_provider" + +module Providers + module Gemini + class StreamingLifecycleTest < ActiveSupport::TestCase + setup do + @stream_events = [] + + @provider = ActiveAgent::Providers::GeminiProvider.new( + service: "Gemini", + api_key: "test-api-key", + model: "gemini-2.0-flash", + messages: [ { role: "user", content: "Hello" } ], + stream: true, + stream_broadcaster: ->(message, delta, event_type) { + @stream_events << { message: message, delta: delta, type: event_type } + } + ) + + # Initialize message stack for streaming + @provider.send(:message_stack).push({ + index: 0, + role: "assistant", + content: "" + }) + end + + # Reuse OpenAI mock structures since Gemini inherits from OpenAI::ChatProvider + MockChunk = Struct.new(:choices, keyword_init: true) + MockChoice = Struct.new(:index, :delta, keyword_init: true) + MockDelta = Struct.new(:content, :role, keyword_init: true) do + def as_json + { content: content, role: role }.compact + end + end + + MockChunkEvent = Struct.new(:type, :chunk, keyword_init: true) + MockContentDoneEvent = Struct.new(:type, :content, :parsed, keyword_init: true) + + test "inherits streaming lifecycle from OpenAI::ChatProvider - emits :open event" do + chunk = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "Hi", role: "assistant")) ] + ) + event = MockChunkEvent.new(type: :chunk, chunk: chunk) + + @provider.send(:process_stream_chunk, event) + + open_events = @stream_events.select { |e| e[:type] == :open } + assert_equal 1, open_events.size, "Gemini should emit :open event via inherited process_stream_chunk" + end + + test "broadcast_stream_open is idempotent - only fires once" do + 3.times do + chunk = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "x")) ] + ) + event = MockChunkEvent.new(type: :chunk, chunk: chunk) + @provider.send(:process_stream_chunk, event) + end + + open_events = @stream_events.select { |e| e[:type] == :open } + assert_equal 1, open_events.size, "Expected only one :open event even after multiple chunks" + end + + test "message_merge_delta handles Gemini role duplication correctly" do + # Gemini sends role in every streaming chunk (unlike OpenAI which only sends it in first chunk) + # This test verifies the role is not concatenated (e.g., "assistantassistant") + + message = {} + + # First chunk sets the role + delta1 = { role: "assistant", content: "Hi" } + result = @provider.send(:message_merge_delta, message, delta1) + assert_equal "assistant", result[:role] + assert_equal "Hi", result[:content] + + # Second chunk also has role (Gemini behavior) + delta2 = { role: "assistant", content: " there" } + result = @provider.send(:message_merge_delta, result, delta2) + + # Role should NOT be "assistantassistant" + assert_equal "assistant", result[:role], "Role should not be concatenated" + assert_equal "Hi there", result[:content], "Content should be concatenated" + + # Third chunk + delta3 = { role: "assistant", content: "!" } + result = @provider.send(:message_merge_delta, result, delta3) + + assert_equal "assistant", result[:role], "Role should still be 'assistant'" + assert_equal "Hi there!", result[:content] + end + + test "full streaming lifecycle with Gemini role handling" do + # Gemini duplicates role in every delta - message_merge_delta handles this + chunk1 = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "Hi", role: "assistant")) ] + ) + @provider.send(:process_stream_chunk, MockChunkEvent.new(type: :chunk, chunk: chunk1)) + + # Subsequent chunks also have role (Gemini behavior) + chunk2 = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: " there", role: "assistant")) ] + ) + @provider.send(:process_stream_chunk, MockChunkEvent.new(type: :chunk, chunk: chunk2)) + + chunk3 = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "!", role: "assistant")) ] + ) + @provider.send(:process_stream_chunk, MockChunkEvent.new(type: :chunk, chunk: chunk3)) + + done_event = MockContentDoneEvent.new( + type: :"content.done", + content: "Hi there!", + parsed: nil + ) + + # Stub process_prompt_finished to just call broadcast_stream_close + @provider.stub(:process_prompt_finished, ->(*_) { @provider.send(:broadcast_stream_close) }) do + @provider.send(:process_stream_chunk, done_event) + end + + event_types = @stream_events.map { |e| e[:type] } + + assert_equal :open, event_types.first, "First event should be :open" + assert_equal :close, event_types.last, "Last event should be :close" + assert event_types.include?(:update), "Should have :update events" + + # Verify ordering + open_index = event_types.index(:open) + first_update_index = event_types.index(:update) + close_index = event_types.index(:close) + assert open_index < first_update_index, ":open should appear before first :update" + assert first_update_index < close_index, ":update should appear before :close" + + # Verify role is not corrupted in final message + final_message = @provider.send(:message_stack).last + assert_equal "assistant", final_message[:role], "Final message role should be 'assistant', not concatenated" + end + + test "streaming flag is set to true after broadcast_stream_open" do + refute @provider.send(:streaming), "streaming should be false initially" + + chunk = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "Hi")) ] + ) + event = MockChunkEvent.new(type: :chunk, chunk: chunk) + @provider.send(:process_stream_chunk, event) + + assert @provider.send(:streaming), "streaming should be true after open" + end + + test "streaming flag is reset to false after broadcast_stream_close" do + # Open the stream + chunk = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: "Hi", role: "assistant")) ] + ) + @provider.send(:process_stream_chunk, MockChunkEvent.new(type: :chunk, chunk: chunk)) + + assert @provider.send(:streaming), "streaming should be true after open" + + # Close the stream + done_event = MockContentDoneEvent.new( + type: :"content.done", + content: "Hi", + parsed: nil + ) + + # Stub process_prompt_finished to just call broadcast_stream_close + @provider.stub(:process_prompt_finished, ->(*_) { @provider.send(:broadcast_stream_close) }) do + @provider.send(:process_stream_chunk, done_event) + end + + refute @provider.send(:streaming), "streaming should be false after close" + end + + test "process_stream_chunk emits :update events for content" do + %w[Hi there !].each do |content| + chunk = MockChunk.new( + choices: [ MockChoice.new(index: 0, delta: MockDelta.new(content: content, role: "assistant")) ] + ) + event = MockChunkEvent.new(type: :chunk, chunk: chunk) + @provider.send(:process_stream_chunk, event) + end + + update_events = @stream_events.select { |e| e[:type] == :update } + assert_equal 3, update_events.size, "Expected three :update events" + end + end + end +end From 934f6602ba58ac4fe0b3e0cf74ee6e43d034b266 Mon Sep 17 00:00:00 2001 From: aoki-ryusei Date: Wed, 4 Mar 2026 15:11:25 +0900 Subject: [PATCH 3/5] Add embedding support for Gemini provider Enables text embedding functionality using Gemini's OpenAI-compatible embeddings endpoint. Implementation: - Add embed_request_type class method returning OpenAI::Embedding::RequestType (Gemini uses same request format as OpenAI) - Add api_embed_execute with connection error handling and instrumentation - Add Gemini::Embedding::RequestType alias in _types.rb Test coverage (1 test, 1 assertion): - embed_request_type returns OpenAI::Embedding::RequestType instance --- lib/active_agent/providers/gemini/_types.rb | 6 ++++++ lib/active_agent/providers/gemini_provider.rb | 17 +++++++++++++++++ test/providers/gemini/gemini_provider_test.rb | 7 +++++++ 3 files changed, 30 insertions(+) diff --git a/lib/active_agent/providers/gemini/_types.rb b/lib/active_agent/providers/gemini/_types.rb index 87c4ac6e..4cec6911 100644 --- a/lib/active_agent/providers/gemini/_types.rb +++ b/lib/active_agent/providers/gemini/_types.rb @@ -2,12 +2,18 @@ require_relative "options" require_relative "../open_ai/chat/_types" +require_relative "../open_ai/embedding/_types" module ActiveAgent module Providers module Gemini # Reuse OpenAI Chat request type (same API format) RequestType = OpenAI::Chat::RequestType + + # Reuse OpenAI Embedding types (same API format) + module Embedding + RequestType = OpenAI::Embedding::RequestType + end end end end diff --git a/lib/active_agent/providers/gemini_provider.rb b/lib/active_agent/providers/gemini_provider.rb index 4277688c..39e52bd3 100644 --- a/lib/active_agent/providers/gemini_provider.rb +++ b/lib/active_agent/providers/gemini_provider.rb @@ -30,6 +30,11 @@ def self.prompt_request_type namespace::RequestType.new end + # @return [ActiveModel::Type::Value] + def self.embed_request_type + namespace::Embedding::RequestType.new + end + protected # Executes chat completion request with Gemini-specific error handling. @@ -46,6 +51,18 @@ def api_prompt_execute(parameters) raise exception end + # Executes embedding request with Gemini-specific error handling. + # + # @param parameters [Hash] + # @return [Hash] symbolized API response + # @raise [OpenAI::Errors::APIConnectionError] when Gemini API unreachable + def api_embed_execute(parameters) + client.embeddings.create(**parameters).as_json.deep_symbolize_keys + rescue ::OpenAI::Errors::APIConnectionError => exception + log_connection_error(exception) + raise exception + end + # Merges streaming delta into the message with role cleanup. # # Overrides parent to handle Gemini's role copying behavior which duplicates diff --git a/test/providers/gemini/gemini_provider_test.rb b/test/providers/gemini/gemini_provider_test.rb index 4d652edf..a2cc3a01 100644 --- a/test/providers/gemini/gemini_provider_test.rb +++ b/test/providers/gemini/gemini_provider_test.rb @@ -38,6 +38,13 @@ class GeminiProviderTest < ActiveSupport::TestCase assert_instance_of ActiveAgent::Providers::OpenAI::Chat::RequestType, request_type end + test "embed_request_type returns OpenAI::Embedding::RequestType" do + request_type = ActiveAgent::Providers::GeminiProvider.embed_request_type + + # Gemini::Embedding::RequestType is aliased to OpenAI::Embedding::RequestType + assert_instance_of ActiveAgent::Providers::OpenAI::Embedding::RequestType, request_type + end + test "initializes provider with valid configuration" do provider = ActiveAgent::Providers::GeminiProvider.new(@valid_config) From a30d62ee4e5a30ea2e5084805e7c750e0faccf08 Mon Sep 17 00:00:00 2001 From: aoki-ryusei Date: Thu, 5 Mar 2026 13:30:20 +0900 Subject: [PATCH 4/5] Replace top-level return with Minitest skip in Gemini tests - Follows Minitest best practices for optional dependency handling - Skipped tests are now visible in test reports (e.g., "3 skips") - Use Minitest's official skip method for conditional test skipping - Use unique constant names per file to avoid conflicts - Change puts to warn for proper stderr output --- test/providers/gemini/gemini_provider_test.rb | 10 ++++++---- test/providers/gemini/options_test.rb | 10 ++++++---- test/providers/gemini/streaming_lifecycle_test.rb | 10 ++++++---- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/test/providers/gemini/gemini_provider_test.rb b/test/providers/gemini/gemini_provider_test.rb index a2cc3a01..73358e9e 100644 --- a/test/providers/gemini/gemini_provider_test.rb +++ b/test/providers/gemini/gemini_provider_test.rb @@ -2,17 +2,19 @@ require "test_helper" -begin +GEMINI_PROVIDER_OPENAI_AVAILABLE = begin require "openai" + true rescue LoadError - puts "OpenAI gem not available, skipping Gemini provider tests" - return + warn "OpenAI gem not available, skipping Gemini provider tests" + false end -require_relative "../../../lib/active_agent/providers/gemini_provider" +require_relative "../../../lib/active_agent/providers/gemini_provider" if GEMINI_PROVIDER_OPENAI_AVAILABLE class GeminiProviderTest < ActiveSupport::TestCase setup do + skip "OpenAI gem not available" unless GEMINI_PROVIDER_OPENAI_AVAILABLE @valid_config = { service: "Gemini", api_key: "test-api-key", diff --git a/test/providers/gemini/options_test.rb b/test/providers/gemini/options_test.rb index 1a8709c1..4b0ad176 100644 --- a/test/providers/gemini/options_test.rb +++ b/test/providers/gemini/options_test.rb @@ -2,17 +2,19 @@ require "test_helper" -begin +GEMINI_OPTIONS_OPENAI_AVAILABLE = begin require "openai" + true rescue LoadError - puts "OpenAI gem not available, skipping Gemini options tests" - return + warn "OpenAI gem not available, skipping Gemini options tests" + false end -require_relative "../../../lib/active_agent/providers/gemini_provider" +require_relative "../../../lib/active_agent/providers/gemini_provider" if GEMINI_OPTIONS_OPENAI_AVAILABLE class GeminiOptionsTest < ActiveSupport::TestCase setup do + skip "OpenAI gem not available" unless GEMINI_OPTIONS_OPENAI_AVAILABLE @valid_options = { api_key: "test-api-key" } diff --git a/test/providers/gemini/streaming_lifecycle_test.rb b/test/providers/gemini/streaming_lifecycle_test.rb index 167a5195..7198b7c1 100644 --- a/test/providers/gemini/streaming_lifecycle_test.rb +++ b/test/providers/gemini/streaming_lifecycle_test.rb @@ -2,19 +2,21 @@ require "test_helper" -begin +GEMINI_STREAMING_OPENAI_AVAILABLE = begin require "openai" + true rescue LoadError - puts "OpenAI gem not available, skipping Gemini streaming lifecycle tests" - return + warn "OpenAI gem not available, skipping Gemini streaming lifecycle tests" + false end -require_relative "../../../lib/active_agent/providers/gemini_provider" +require_relative "../../../lib/active_agent/providers/gemini_provider" if GEMINI_STREAMING_OPENAI_AVAILABLE module Providers module Gemini class StreamingLifecycleTest < ActiveSupport::TestCase setup do + skip "OpenAI gem not available" unless GEMINI_STREAMING_OPENAI_AVAILABLE @stream_events = [] @provider = ActiveAgent::Providers::GeminiProvider.new( From a635251cd53490857836838199d2c292b301a12b Mon Sep 17 00:00:00 2001 From: aoki-ryusei Date: Thu, 5 Mar 2026 13:34:21 +0900 Subject: [PATCH 5/5] Rename test to match actual assertion in Gemini provider test The test "client is configured with Gemini base_url" only asserts the client type, not the base_url. Renamed to "client returns OpenAI::Client instance" to accurately reflect what it tests. --- test/providers/gemini/gemini_provider_test.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/providers/gemini/gemini_provider_test.rb b/test/providers/gemini/gemini_provider_test.rb index 73358e9e..bc4a6ba1 100644 --- a/test/providers/gemini/gemini_provider_test.rb +++ b/test/providers/gemini/gemini_provider_test.rb @@ -57,7 +57,7 @@ class GeminiProviderTest < ActiveSupport::TestCase assert ActiveAgent::Providers::GeminiProvider < ActiveAgent::Providers::OpenAI::ChatProvider end - test "client is configured with Gemini base_url" do + test "client returns OpenAI::Client instance" do provider = ActiveAgent::Providers::GeminiProvider.new(@valid_config) client = provider.client