Skip to content

Commit 95b76dc

Browse files
authored
[Obs AI Assistant] Attempt to fix flaky KB migration test (elastic#206324)
## Summary Closes elastic#202926 ### Problem The KB migration test appears to be flaky sometimes, because the migration takes a few seconds to run. Therefore, when the test expects to have semantic_text for all entries, it fails. ### Solution Adding a retry statement to avoid the failure in cases where the migration takes a little while to run. ### Checklist - [x] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed - [x] The PR description includes the appropriate Release Notes section, and the correct `release_note:*` label is applied per the [guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
1 parent d04318f commit 95b76dc

File tree

1 file changed

+29
-26
lines changed

1 file changed

+29
-26
lines changed

x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/knowledge_base_migration.spec.ts

Lines changed: 29 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
2424
const esArchiver = getService('esArchiver');
2525
const es = getService('es');
2626
const ml = getService('ml');
27+
const retry = getService('retry');
2728

2829
const archive =
2930
'x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15';
@@ -91,32 +92,34 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
9192
});
9293

9394
it('the docs have semantic_text embeddings', async () => {
94-
const hits = await getKnowledgeBaseEntries();
95-
const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text);
96-
expect(hasSemanticTextEmbeddings).to.be(true);
97-
98-
expect(
99-
orderBy(hits, '_source.title').map(({ _source }) => {
100-
const { text, inference } = _source?.semantic_text!;
101-
102-
return {
103-
text,
104-
inferenceId: inference.inference_id,
105-
chunkCount: inference.chunks.length,
106-
};
107-
})
108-
).to.eql([
109-
{
110-
text: 'To infinity and beyond!',
111-
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
112-
chunkCount: 1,
113-
},
114-
{
115-
text: "The user's favourite color is blue.",
116-
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
117-
chunkCount: 1,
118-
},
119-
]);
95+
await retry.try(async () => {
96+
const hits = await getKnowledgeBaseEntries();
97+
const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text);
98+
expect(hasSemanticTextEmbeddings).to.be(true);
99+
100+
expect(
101+
orderBy(hits, '_source.title').map(({ _source }) => {
102+
const { text, inference } = _source?.semantic_text!;
103+
104+
return {
105+
text,
106+
inferenceId: inference.inference_id,
107+
chunkCount: inference.chunks.length,
108+
};
109+
})
110+
).to.eql([
111+
{
112+
text: 'To infinity and beyond!',
113+
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
114+
chunkCount: 1,
115+
},
116+
{
117+
text: "The user's favourite color is blue.",
118+
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
119+
chunkCount: 1,
120+
},
121+
]);
122+
});
120123
});
121124

122125
it('returns entries correctly via API', async () => {

0 commit comments

Comments
 (0)