Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion lib/embeddings/vector_representations/gemini.rb
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def pg_index_type
end

def vector_from(text, asymetric: false)
inference_client.perform!(text).dig(:embedding, :values)
inference_client.perform!(text)
end

# There is no public tokenizer for Gemini, and from the ones we already ship in the plugin
Expand Down
6 changes: 5 additions & 1 deletion lib/inference/gemini_embeddings.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
module ::DiscourseAi
module Inference
class GeminiEmbeddings
def self.instance
new(SiteSetting.ai_gemini_api_key)
end

def initialize(api_key, referer = Discourse.base_url)
@api_key = api_key
@referer = referer
Expand All @@ -21,7 +25,7 @@ def perform!(content)

case response.status
when 200
JSON.parse(response.body, symbolize_names: true)
JSON.parse(response.body, symbolize_names: true).dig(:embedding, :values)
when 429
# TODO add a AdminDashboard Problem?
else
Expand Down
18 changes: 18 additions & 0 deletions spec/lib/modules/embeddings/vector_representations/gemini_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# frozen_string_literal: true

require_relative "vector_rep_shared_examples"

RSpec.describe DiscourseAi::Embeddings::VectorRepresentations::Gemini do
subject(:vector_rep) { described_class.new(truncation) }

let(:truncation) { DiscourseAi::Embeddings::Strategies::Truncation.new }
let!(:api_key) { "test-123" }

before { SiteSetting.ai_gemini_api_key = api_key }

def stub_vector_mapping(text, expected_embedding)
EmbeddingsGenerationStubs.gemini_service(api_key, text, expected_embedding)
end

it_behaves_like "generates and store embedding using with vector representation"
end
10 changes: 10 additions & 0 deletions spec/support/embeddings_generation_stubs.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,15 @@ def openai_service(model, string, embedding, extra_args: {})
.with(body: JSON.dump({ model: model, input: string }.merge(extra_args)))
.to_return(status: 200, body: JSON.dump({ data: [{ embedding: embedding }] }))
end

def gemini_service(api_key, string, embedding)
WebMock
.stub_request(
:post,
"https://generativelanguage.googleapis.com/v1beta/models/embedding-001:embedContent\?key\=#{api_key}",
)
.with(body: JSON.dump({ content: { parts: [{ text: string }] } }))
.to_return(status: 200, body: JSON.dump({ embedding: { values: embedding } }))
end
end
end
Loading