Skip to content

Commit ec01897

Browse files
committed
feat: support other configured models in AI assistant
1 parent b340107 commit ec01897

File tree

5 files changed

+83
-43
lines changed

5 files changed

+83
-43
lines changed

report-app/report-server.ts

Lines changed: 37 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import {fileURLToPath} from 'node:url';
1010
import {chatWithReportAI} from '../runner/reporting/report-ai-chat';
1111
import {convertV2ReportToV3Report} from '../runner/reporting/migrations/v2_to_v3';
1212
import {FetchedLocalReports, fetchReportsFromDisk} from '../runner/reporting/report-local-disk';
13-
import {AiChatRequest, RunInfo} from '../runner/shared-interfaces';
13+
import {AiChatRequest, AIConfigState, RunInfo} from '../runner/shared-interfaces';
1414

1515
// This will result in a lot of loading and would slow down the serving,
1616
// so it's loaded lazily below.
@@ -88,19 +88,42 @@ app.post('/api/reports/:id/chat', async (req, res) => {
8888
return;
8989
}
9090

91-
const {prompt, pastMessages, model} = req.body as AiChatRequest;
92-
const assessments = reports.flatMap(run => run.results);
93-
const abortController = new AbortController();
94-
const summary = await chatWithReportAI(
95-
await (llm ?? getOrCreateGenkitLlmRunner()),
96-
prompt,
97-
abortController.signal,
98-
assessments,
99-
pastMessages,
100-
model,
101-
);
102-
103-
res.json(summary);
91+
try {
92+
const {prompt, pastMessages, model} = req.body as AiChatRequest;
93+
const assessments = reports.flatMap(run => run.results);
94+
const abortController = new AbortController();
95+
const summary = await chatWithReportAI(
96+
await (llm ?? getOrCreateGenkitLlmRunner()),
97+
prompt,
98+
abortController.signal,
99+
assessments,
100+
pastMessages,
101+
model,
102+
);
103+
res.json(summary);
104+
} catch (e) {
105+
console.error(e);
106+
if (e instanceof Error) {
107+
console.error(e.stack);
108+
}
109+
res.status(500);
110+
res.end(`Unexpected error. See terminal logs.`);
111+
}
112+
});
113+
114+
app.get('/api/ai-config-state', async (req, res) => {
115+
try {
116+
const llm = await getOrCreateGenkitLlmRunner();
117+
return res.json({
118+
configuredModels: llm.getSupportedModelsWithAPIKey(),
119+
} satisfies AIConfigState);
120+
} catch (e) {
121+
console.error('Could not instantiate LLM instance. Error:', e);
122+
if (e instanceof Error) {
123+
console.error(e.stack);
124+
}
125+
return res.json({configuredModels: []});
126+
}
104127
});
105128

106129
app.use(

report-app/src/app/shared/ai-assistant/ai-assistant.html

Lines changed: 30 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ <h2>AI Assistant</h2>
66
</div>
77
<div class="model-selector-container">
88
<select [(ngModel)]="selectedModel">
9-
@for (model of models; track model) {
10-
<option [value]="model.id">{{ model.name }}</option>
9+
@for (model of models(); track model) {
10+
<option [value]="model">{{ model }}</option>
1111
}
1212
</select>
1313
</div>
@@ -29,19 +29,34 @@ <h2>AI Assistant</h2>
2929
</header>
3030
<div class="ai-assistant-body">
3131
<div class="messages-container" #messagesContainer>
32-
@for (message of messages; track $index) {
33-
<div
34-
class="message"
35-
[class.user-message]="message.role === 'user'"
36-
[class.model-message]="message.role === 'model'"
37-
>
38-
<div [innerHTML]="message.text"></div>
39-
</div>
40-
}
41-
@if (isLoading()) {
42-
<div class="message model-message">
43-
<message-spinner message="Thinking..." />
44-
</div>
32+
@if (aiConfigState.isLoading()) {
33+
<message-spinner message="Checking available models for chat..." />
34+
} @else if (aiConfigState.error() !== undefined) {
35+
Error fetching available models.
36+
} @else if (models().length === 0) {
37+
@if (models().length === 0) {
38+
No models available. Make sure you set API keys as per
39+
<a
40+
href="https://github.com/angular/web-codegen-scorer?tab=readme-ov-file#setup"
41+
target="_blank"
42+
>Setup instructions.</a
43+
>
44+
}
45+
} @else {
46+
@for (message of messages; track $index) {
47+
<div
48+
class="message"
49+
[class.user-message]="message.role === 'user'"
50+
[class.model-message]="message.role === 'model'"
51+
>
52+
<div [innerHTML]="message.text"></div>
53+
</div>
54+
}
55+
@if (isLoading()) {
56+
<div class="message model-message">
57+
<message-spinner message="Thinking..." />
58+
</div>
59+
}
4560
}
4661
</div>
4762
<div class="input-container">

report-app/src/app/shared/ai-assistant/ai-assistant.ts

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,15 @@
1-
import {HttpClient} from '@angular/common/http';
2-
import {Component, inject, input, output, signal} from '@angular/core';
1+
import {HttpClient, httpResource} from '@angular/common/http';
2+
import {Component, computed, inject, input, linkedSignal, output, signal} from '@angular/core';
33
import {FormsModule} from '@angular/forms';
44
import {firstValueFrom} from 'rxjs';
55
import {
66
AiChatMessage,
77
AiChatRequest,
88
AiChatResponse,
9+
AIConfigState,
910
} from '../../../../../runner/shared-interfaces';
1011
import {MessageSpinner} from '../message-spinner';
1112

12-
interface Model {
13-
id: string;
14-
name: string;
15-
}
16-
1713
@Component({
1814
selector: 'app-ai-assistant',
1915
templateUrl: './ai-assistant.html',
@@ -34,12 +30,9 @@ export class AiAssistant {
3430

3531
private readonly http = inject(HttpClient);
3632

37-
protected readonly models: Model[] = [
38-
{id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash'},
39-
{id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro'},
40-
{id: 'gemini-2.5-flash-lite', name: 'Gemini 2.5 Flash Lite'},
41-
];
42-
protected selectedModel = this.models[0].id;
33+
protected readonly aiConfigState = httpResource<AIConfigState>(() => '/api/ai-config-state');
34+
protected readonly models = computed(() => this.aiConfigState.value()?.configuredModels ?? []);
35+
protected selectedModel = linkedSignal(() => this.models()[0]);
4336

4437
protected toggleExpanded(): void {
4538
this.isExpanded.set(!this.isExpanded());
@@ -60,7 +53,7 @@ export class AiAssistant {
6053
const payload: AiChatRequest = {
6154
prompt,
6255
pastMessages,
63-
model: this.selectedModel,
56+
model: this.selectedModel(),
6457
};
6558

6659
try {

runner/codegen/genkit/genkit-runner.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,10 @@ export class GenkitRunner implements LlmRunner {
9696
return MODEL_PROVIDERS.flatMap(p => p.getSupportedModels());
9797
}
9898

99+
getSupportedModelsWithAPIKey(): string[] {
100+
return MODEL_PROVIDERS.filter(p => p.getApiKey() !== null).flatMap(p => p.getSupportedModels());
101+
}
102+
99103
private async _genkitRequest(
100104
provider: GenkitModelProvider,
101105
model: ModelReference<any>,

runner/shared-interfaces.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -323,6 +323,11 @@ export interface AiChatMessage {
323323
text: string;
324324
}
325325

326+
/** Interface describing the response of the AI config state report-server endpoint. */
327+
export interface AIConfigState {
328+
configuredModels: string[];
329+
}
330+
326331
/**
327332
* A summary of build outcomes and code quality scores for an entire assessment run.
328333
*/

0 commit comments

Comments
 (0)