Skip to content

Commit 7c6ddda

Browse files
committed
[DERCBOT-919] Fix TU
1 parent 21b7681 commit 7c6ddda

File tree

3 files changed

+30
-15
lines changed

3 files changed

+30
-15
lines changed

bot/admin/server/src/main/kotlin/service/RAGValidationService.kt

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,22 +37,22 @@ object RAGValidationService {
3737
private val vectorStoreProviderService: VectorStoreProviderService get() = injector.provide()
3838

3939
fun validate(ragConfig: BotRAGConfiguration): Set<ErrorMessage> {
40+
val observabilitySetting = ObservabilityService.getObservabilityConfiguration(
41+
ragConfig.namespace, ragConfig.botId, enabled = true
42+
)?.setting
43+
4044
return mutableSetOf<ErrorMessage>().apply {
4145
val questionCondensingLlmErrors = llmProviderService.checkSetting(
4246
LLMProviderSettingStatusQuery(
4347
ragConfig.questionCondensingLlmSetting!!,
44-
ObservabilityService.getObservabilityConfiguration(
45-
ragConfig.namespace, ragConfig.botId, enabled = true
46-
)?.setting
48+
observabilitySetting
4749
)
4850
).getErrors("LLM setting check failed (for question condensing)")
4951

5052
val questionAnsweringLlmErrors = llmProviderService.checkSetting(
5153
LLMProviderSettingStatusQuery(
5254
ragConfig.questionAnsweringLlmSetting!!,
53-
ObservabilityService.getObservabilityConfiguration(
54-
ragConfig.namespace, ragConfig.botId, enabled = true
55-
)?.setting
55+
observabilitySetting
5656
)
5757
).getErrors("LLM setting check failed (for question answering)")
5858

bot/admin/server/src/test/kotlin/service/RAGServiceTest.kt

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,13 @@ class RAGServiceTest : AbstractTest() {
7070
namespace = NAMESPACE,
7171
botId = BOT_ID,
7272
enabled = false,
73-
questionCondensingLlmSetting = null,
74-
questionCondensingPrompt = null,
73+
questionCondensingLlmSetting = OpenAILLMSettingDTO(
74+
apiKey = "apikey",
75+
model = MODEL,
76+
temperature = TEMPERATURE,
77+
baseUrl = "https://api.openai.com/v1"
78+
),
79+
questionCondensingPrompt = PromptTemplate(template = PROMPT),
7580
questionAnsweringLlmSetting = OpenAILLMSettingDTO(
7681
apiKey = "apikey",
7782
model = MODEL,
@@ -86,7 +91,11 @@ class RAGServiceTest : AbstractTest() {
8691
model = "model",
8792
apiBase = "url"
8893
),
89-
noAnswerSentence = "No answer sentence"
94+
noAnswerSentence = "No answer sentence",
95+
documentsRequired = true,
96+
debugEnabled = false,
97+
maxDocumentsRetrieved = 2,
98+
maxMessagesFromHistory = 2,
9099
)
91100

92101
private val DEFAULT_BOT_CONFIG = aApplication.copy(namespace = NAMESPACE, botId = BOT_ID)

bot/admin/server/src/test/kotlin/service/RAGValidationServiceTest.kt

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -79,12 +79,16 @@ class RAGValidationServiceTest {
7979
private val ragConfiguration = BotRAGConfigurationDTO(
8080
namespace = "namespace",
8181
botId = "botId",
82-
questionCondensingLlmSetting = null,
83-
questionCondensingPrompt = null,
82+
questionCondensingLlmSetting = openAILLMSetting,
83+
questionCondensingPrompt = PromptTemplate(template = "test"),
8484
questionAnsweringLlmSetting = openAILLMSetting,
8585
questionAnsweringPrompt = PromptTemplate(template = "How to bike in the rain"),
8686
emSetting = azureOpenAIEMSetting,
8787
noAnswerSentence = " No answer sentence",
88+
documentsRequired = true,
89+
debugEnabled = false,
90+
maxDocumentsRetrieved = 2,
91+
maxMessagesFromHistory = 2,
8892
)
8993

9094
@Test
@@ -167,7 +171,7 @@ class RAGValidationServiceTest {
167171
fun `validation of the RAG configuration when the Orchestrator returns 2 errors for LLM and 1 for Embedding model, the RAG function has not been activated`() {
168172

169173
// GIVEN
170-
// - 3 errors returned by Generative AI Orchestrator for LLM (2) and EM (1)
174+
// - 3 errors returned by Generative AI Orchestrator for LLM (4 = 2 for condensing + 2 for answering) and EM (1)
171175
// - RAG is not enabled
172176
every {
173177
llmProviderService.checkSetting(any())
@@ -191,11 +195,13 @@ class RAGValidationServiceTest {
191195
)
192196

193197
// THEN :
194-
// Check that 3 errors have been found
195-
assertEquals(2, errors.size)
198+
// Check that 3 groups of errors have been found
199+
assertEquals(3, errors.size)
196200
assertEquals("10", (((errors.elementAt(0).params) as List<*>)[0] as ErrorResponse).code)
197201
assertEquals("20", (((errors.elementAt(0).params) as List<*>)[1] as ErrorResponse).code)
198-
assertEquals("30", (((errors.elementAt(1).params) as List<*>)[0] as ErrorResponse).code)
202+
assertEquals("10", (((errors.elementAt(1).params) as List<*>)[0] as ErrorResponse).code)
203+
assertEquals("20", (((errors.elementAt(1).params) as List<*>)[1] as ErrorResponse).code)
204+
assertEquals("30", (((errors.elementAt(2).params) as List<*>)[0] as ErrorResponse).code)
199205
}
200206

201207
private fun createFakeErrorResponse(code: String) = ErrorResponse(

0 commit comments

Comments
 (0)