Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 9d21f38

Browse files
committed
endpoints
1 parent 0ef5289 commit 9d21f38

File tree

13 files changed

+335
-93
lines changed

13 files changed

+335
-93
lines changed
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
# frozen_string_literal: true
2+
3+
module DiscourseAi
4+
module Admin
5+
class AiEmbeddingsController < ::Admin::AdminController
6+
requires_plugin ::DiscourseAi::PLUGIN_NAME
7+
8+
def index
9+
embedding_defs = EmbeddingDefinition.all.order(:display_name)
10+
11+
render json: {
12+
ai_embeddings:
13+
ActiveModel::ArraySerializer.new(
14+
embedding_defs,
15+
each_serializer: AiEmbeddingDefinitionSerializer,
16+
root: false,
17+
).as_json,
18+
meta: {
19+
provider_params: EmbeddingDefinition.provider_params,
20+
providers: EmbeddingDefinition.provider_names,
21+
tokenizers:
22+
EmbeddingDefinition.tokenizer_names.map { |tn|
23+
{ id: tn, name: tn.split("::").last }
24+
},
25+
},
26+
}
27+
end
28+
29+
def new
30+
end
31+
32+
def edit
33+
embedding_def = EmbeddingDefinition.find(params[:id])
34+
render json: AiEmbeddingDefinitionSerializer.new(embedding_def)
35+
end
36+
37+
def create
38+
embedding_def = EmbeddingDefinition.new(ai_embeddings_params)
39+
40+
if embedding_def.save
41+
render json: AiEmbeddingDefinitionSerializer.new(embedding_def), status: :created
42+
else
43+
render_json_error embedding_def
44+
end
45+
end
46+
47+
def update
48+
embedding_def = EmbeddingDefinition.find(params[:id])
49+
50+
if embedding_def.update(ai_embeddings_params)
51+
render json: AiEmbeddingDefinitionSerializer.new(embedding_def)
52+
else
53+
render_json_error embedding_def
54+
end
55+
end
56+
57+
def destroy
58+
embedding_def = EmbeddingDefinition.find(params[:id])
59+
60+
if embedding_def.id == SiteSetting.ai_embeddings_selected_model.to_i
61+
return render_json_error(I18n.t("discourse_ai.embeddings.delete_failed"), status: 409)
62+
end
63+
64+
if embedding_def.destroy
65+
head :no_content
66+
else
67+
render_json_error embedding_def
68+
end
69+
end
70+
71+
def test
72+
RateLimiter.new(
73+
current_user,
74+
"ai_embeddings_test_#{current_user.id}",
75+
3,
76+
1.minute,
77+
).performed!
78+
79+
embedding_def = EmbeddingDefinition.new(ai_embeddings_params)
80+
DiscourseAi::Embeddings::Vector.new(embedding_def).vector_from("this is a test")
81+
82+
render json: { success: true }
83+
rescue Net::HTTPBadResponse => e
84+
render json: { success: false, error: e.message }
85+
end
86+
87+
private
88+
89+
def ai_embeddings_params
90+
permitted =
91+
params.require(:ai_embedding).permit(
92+
:display_name,
93+
:dimensions,
94+
:max_sequence_length,
95+
:pg_function,
96+
:provider,
97+
:url,
98+
:api_key,
99+
:tokenizer_class,
100+
)
101+
102+
extra_field_names = EmbeddingDefinition.provider_params.dig(permitted[:provider]&.to_sym)
103+
if extra_field_names.present?
104+
received_prov_params =
105+
params.dig(:ai_embedding, :provider_params)&.slice(*extra_field_names.keys)
106+
107+
if received_prov_params.present?
108+
permitted[:provider_params] = received_prov_params.permit!
109+
end
110+
end
111+
112+
permitted
113+
end
114+
end
115+
end
116+
end

app/models/embedding_definition.rb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,15 @@ def tokenizer_names
2323
].map(&:name)
2424
end
2525

26-
def self.provider_params
26+
def provider_params
2727
{ discourse: { model_name: :text }, open_ai: { model_name: :text } }
2828
end
2929
end
3030

3131
validates :provider, presence: true, inclusion: provider_names
3232
validates :display_name, presence: true, length: { maximum: 100 }
3333
validates :tokenizer_class, presence: true, inclusion: tokenizer_names
34+
validates_presence_of :url, :api_key, :dimensions, :max_sequence_length, :pg_function
3435

3536
def tokenizer
3637
tokenizer_class.constantize
@@ -60,7 +61,7 @@ def lookup_custom_param(key)
6061
def endpoint_url
6162
return url if !url.starts_with?("srv://")
6263

63-
service = DiscourseAi::Utils::DnsSrv.lookup(url)
64+
service = DiscourseAi::Utils::DnsSrv.lookup(url.sub("srv://", ""))
6465
"https://#{service.target}:#{service.port}"
6566
end
6667

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# frozen_string_literal: true
2+
3+
class AiEmbeddingDefinitionSerializer < ApplicationSerializer
4+
root "ai_embedding"
5+
6+
attributes :display_name,
7+
:dimensions,
8+
:max_sequence_length,
9+
:pg_function,
10+
:provider,
11+
:url,
12+
:api_key,
13+
:tokenizer_class,
14+
:provider_params
15+
end

config/locales/server.en.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -434,6 +434,7 @@ en:
434434
cannot_edit_builtin: "You can't edit a built-in model."
435435

436436
embeddings:
437+
delete_failed: "This model is currently in use. Update the `ai embeddings selected model` first."
437438
configuration:
438439
disable_embeddings: "You have to disable 'ai embeddings enabled' first."
439440
choose_model: "Set 'ai embeddings selected model' first."

config/routes.rb

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,13 @@
9696
controller: "discourse_ai/admin/ai_llm_quotas",
9797
path: "quotas",
9898
only: %i[index create update destroy]
99+
100+
resources :ai_embeddings,
101+
only: %i[index new create edit update destroy],
102+
path: "ai-embeddings",
103+
controller: "discourse_ai/admin/ai_embeddings" do
104+
collection { get :test }
105+
end
99106
end
100107
end
101108

lib/inference/cloudflare_workers_ai.rb

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,6 @@ def initialize(endpoint, api_token, referer = Discourse.base_url)
99
@referer = referer
1010
end
1111

12-
def self.instance(model)
13-
new(
14-
SiteSetting.ai_cloudflare_workers_account_id,
15-
SiteSetting.ai_cloudflare_workers_api_token,
16-
model,
17-
)
18-
end
19-
2012
attr_reader :endpoint, :api_token, :referer
2113

2214
def perform!(content)
@@ -40,7 +32,7 @@ def perform!(content)
4032
Rails.logger.warn(
4133
"Cloudflare Workers AI Embeddings failed with status: #{response.status} body: #{response.body}",
4234
)
43-
raise Net::HTTPBadResponse
35+
raise Net::HTTPBadResponse.new(response.body.to_s)
4436
end
4537
end
4638
end

lib/inference/discourse_classifier.rb

Lines changed: 3 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -10,25 +10,6 @@ def initialize(endpoint, api_key, model, referer = Discourse.base_url)
1010
@referer = referer
1111
end
1212

13-
def self.instance(model)
14-
endpoint =
15-
if SiteSetting.ai_embeddings_discourse_service_api_endpoint_srv.present?
16-
service =
17-
DiscourseAi::Utils::DnsSrv.lookup(
18-
SiteSetting.ai_embeddings_discourse_service_api_endpoint_srv,
19-
)
20-
"https://#{service.target}:#{service.port}"
21-
else
22-
SiteSetting.ai_embeddings_discourse_service_api_endpoint
23-
end
24-
25-
new(
26-
"#{endpoint}/api/v1/classify",
27-
SiteSetting.ai_embeddings_discourse_service_api_key,
28-
model,
29-
)
30-
end
31-
3213
attr_reader :endpoint, :api_key, :model, :referer
3314

3415
def perform!(content)
@@ -38,7 +19,9 @@ def perform!(content)
3819
conn = Faraday.new { |f| f.adapter FinalDestination::FaradayAdapter }
3920
response = conn.post(endpoint, { model: model, content: content }.to_json, headers)
4021

41-
raise Net::HTTPBadResponse if ![200, 415].include?(response.status)
22+
if ![200, 415].include?(response.status)
23+
raise raise Net::HTTPBadResponse.new(response.body.to_s)
24+
end
4225

4326
JSON.parse(response.body, symbolize_names: true)
4427
end

lib/inference/gemini_embeddings.rb

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,6 @@
33
module ::DiscourseAi
44
module Inference
55
class GeminiEmbeddings
6-
def self.instance
7-
new(
8-
"https://generativelanguage.googleapis.com/v1beta/models/embedding-001:embedContent",
9-
SiteSetting.ai_gemini_api_key,
10-
)
11-
end
12-
136
def initialize(embedding_url, api_key, referer = Discourse.base_url)
147
@api_key = api_key
158
@embedding_url = embedding_url
@@ -35,7 +28,7 @@ def perform!(content)
3528
Rails.logger.warn(
3629
"Google Gemini Embeddings failed with status: #{response.status} body: #{response.body}",
3730
)
38-
raise Net::HTTPBadResponse
31+
raise Net::HTTPBadResponse.new(response.body.to_s)
3932
end
4033
end
4134
end

lib/inference/hugging_face_text_embeddings.rb

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,6 @@ def initialize(endpoint, key, referer = Discourse.base_url)
1212
attr_reader :endpoint, :key, :referer
1313

1414
class << self
15-
def instance
16-
endpoint =
17-
if SiteSetting.ai_hugging_face_tei_endpoint_srv.present?
18-
service =
19-
DiscourseAi::Utils::DnsSrv.lookup(SiteSetting.ai_hugging_face_tei_endpoint_srv)
20-
"https://#{service.target}:#{service.port}"
21-
else
22-
SiteSetting.ai_hugging_face_tei_endpoint
23-
end
24-
25-
new(endpoint, SiteSetting.ai_hugging_face_tei_api_key)
26-
end
27-
2815
def configured?
2916
SiteSetting.ai_hugging_face_tei_endpoint.present? ||
3017
SiteSetting.ai_hugging_face_tei_endpoint_srv.present?
@@ -100,7 +87,7 @@ def perform!(content)
10087
conn = Faraday.new { |f| f.adapter FinalDestination::FaradayAdapter }
10188
response = conn.post(endpoint, body, headers)
10289

103-
raise Net::HTTPBadResponse if ![200].include?(response.status)
90+
raise Net::HTTPBadResponse.new(response.body.to_s) if ![200].include?(response.status)
10491

10592
JSON.parse(response.body, symbolize_names: true).first
10693
end

lib/inference/open_ai_embeddings.rb

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,6 @@ def initialize(endpoint, api_key, model, dimensions)
1212

1313
attr_reader :endpoint, :api_key, :model, :dimensions
1414

15-
def self.instance(model:, dimensions: nil)
16-
new(SiteSetting.ai_openai_embeddings_url, SiteSetting.ai_openai_api_key, model, dimensions)
17-
end
18-
1915
def perform!(content)
2016
headers = { "Content-Type" => "application/json" }
2117

@@ -29,7 +25,7 @@ def perform!(content)
2925
payload[:dimensions] = dimensions if dimensions.present?
3026

3127
conn = Faraday.new { |f| f.adapter FinalDestination::FaradayAdapter }
32-
response = conn.post(SiteSetting.ai_openai_embeddings_url, payload.to_json, headers)
28+
response = conn.post(endpoint, payload.to_json, headers)
3329

3430
case response.status
3531
when 200
@@ -40,7 +36,7 @@ def perform!(content)
4036
Rails.logger.warn(
4137
"OpenAI Embeddings failed with status: #{response.status} body: #{response.body}",
4238
)
43-
raise Net::HTTPBadResponse
39+
raise Net::HTTPBadResponse.new(response.body.to_s)
4440
end
4541
end
4642
end

0 commit comments

Comments
 (0)