Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 273a1fa

Browse files
committed
FIX: occurrences of old model setting
1 parent 7af3ce8 commit 273a1fa

File tree

2 files changed

+23
-19
lines changed

2 files changed

+23
-19
lines changed

lib/embeddings/semantic_search.rb

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ class SemanticSearch
66
def self.clear_cache_for(query)
77
digest = OpenSSL::Digest::SHA1.hexdigest(query)
88

9-
hyde_key =
10-
"semantic-search-#{digest}-#{SiteSetting.ai_embeddings_semantic_search_hyde_model}"
9+
hyde_model_id = find_ai_hyde_model_id
10+
hyde_key = "semantic-search-#{digest}-#{hyde_model_id}"
1111

1212
Discourse.cache.delete(hyde_key)
1313
Discourse.cache.delete("#{hyde_key}-#{SiteSetting.ai_embeddings_selected_model}")
@@ -20,12 +20,9 @@ def initialize(guardian)
2020

2121
def cached_query?(query)
2222
digest = OpenSSL::Digest::SHA1.hexdigest(query)
23+
hyde_model_id = find_ai_hyde_model_id
2324
embedding_key =
24-
build_embedding_key(
25-
digest,
26-
SiteSetting.ai_embeddings_semantic_search_hyde_model,
27-
SiteSetting.ai_embeddings_selected_model,
28-
)
25+
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
2926

3027
Discourse.cache.read(embedding_key).present?
3128
end
@@ -36,14 +33,11 @@ def vector
3633

3734
def hyde_embedding(search_term)
3835
digest = OpenSSL::Digest::SHA1.hexdigest(search_term)
39-
hyde_key = build_hyde_key(digest, SiteSetting.ai_embeddings_semantic_search_hyde_model)
36+
hyde_model_id = find_ai_hyde_model_id
37+
hyde_key = build_hyde_key(digest, hyde_model_id)
4038

4139
embedding_key =
42-
build_embedding_key(
43-
digest,
44-
SiteSetting.ai_embeddings_semantic_search_hyde_model,
45-
SiteSetting.ai_embeddings_selected_model,
46-
)
40+
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
4741

4842
hypothetical_post =
4943
Discourse
@@ -111,6 +105,7 @@ def quick_search(query)
111105
max_semantic_results_per_page = 100
112106
search = Search.new(query, { guardian: guardian })
113107
search_term = search.term
108+
hyde_model_id = find_ai_hyde_model_id
114109

115110
return [] if search_term.nil? || search_term.length < SiteSetting.min_search_term_length
116111

@@ -119,11 +114,7 @@ def quick_search(query)
119114
digest = OpenSSL::Digest::SHA1.hexdigest(search_term)
120115

121116
embedding_key =
122-
build_embedding_key(
123-
digest,
124-
SiteSetting.ai_embeddings_semantic_search_hyde_model,
125-
SiteSetting.ai_embeddings_selected_model,
126-
)
117+
build_embedding_key(digest, hyde_model_id, SiteSetting.ai_embeddings_selected_model)
127118

128119
search_term_embedding =
129120
Discourse
@@ -221,6 +212,19 @@ def find_ai_hyde_model(persona_klass)
221212
end
222213
end
223214

215+
def find_ai_hyde_model_id
216+
persona_llm_id =
217+
AiPersona.find_by(
218+
id: SiteSetting.ai_embeddings_semantic_search_hyde_persona,
219+
)&.default_llm_id
220+
221+
if persona_llm_id.present?
222+
persona_llm_id
223+
else
224+
SiteSetting.ai_default_llm_model.to_i || LlmModel.last&.id
225+
end
226+
end
227+
224228
private
225229

226230
attr_reader :guardian

lib/tasks/create_topics.rake

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ namespace :ai do
8888
messages: [{ type: :user, content: prompt, id: "user" }],
8989
)
9090

91-
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).generate(
91+
DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_defaulit_llm_model).generate(
9292
prompt,
9393
user: Discourse.system_user,
9494
feature_name: "topic-generator",

0 commit comments

Comments
 (0)