Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 37 additions & 14 deletions report-app/report-server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {fileURLToPath} from 'node:url';
import {chatWithReportAI} from '../runner/reporting/report-ai-chat';
import {convertV2ReportToV3Report} from '../runner/reporting/migrations/v2_to_v3';
import {FetchedLocalReports, fetchReportsFromDisk} from '../runner/reporting/report-local-disk';
import {AiChatRequest, RunInfo} from '../runner/shared-interfaces';
import {AiChatRequest, AIConfigState, RunInfo} from '../runner/shared-interfaces';

// This will result in a lot of loading and would slow down the serving,
// so it's loaded lazily below.
Expand Down Expand Up @@ -88,19 +88,42 @@ app.post('/api/reports/:id/chat', async (req, res) => {
return;
}

const {prompt, pastMessages, model} = req.body as AiChatRequest;
const assessments = reports.flatMap(run => run.results);
const abortController = new AbortController();
const summary = await chatWithReportAI(
await (llm ?? getOrCreateGenkitLlmRunner()),
prompt,
abortController.signal,
assessments,
pastMessages,
model,
);

res.json(summary);
try {
const {prompt, pastMessages, model} = req.body as AiChatRequest;
const assessments = reports.flatMap(run => run.results);
const abortController = new AbortController();
const summary = await chatWithReportAI(
await (llm ?? getOrCreateGenkitLlmRunner()),
prompt,
abortController.signal,
assessments,
pastMessages,
model,
);
res.json(summary);
} catch (e) {
console.error(e);
if (e instanceof Error) {
console.error(e.stack);
}
res.status(500);
res.end(`Unexpected error. See terminal logs.`);
}
});

app.get('/api/ai-config-state', async (req, res) => {
try {
const llm = await getOrCreateGenkitLlmRunner();
return res.json({
configuredModels: llm.getSupportedModelsWithAPIKey(),
} satisfies AIConfigState);
} catch (e) {
console.error('Could not instantiate LLM instance. Error:', e);
if (e instanceof Error) {
console.error(e.stack);
}
return res.json({configuredModels: []});
}
});

app.use(
Expand Down
16 changes: 10 additions & 6 deletions report-app/src/app/pages/report-viewer/report-viewer.html
Original file line number Diff line number Diff line change
Expand Up @@ -173,15 +173,17 @@ <h4>Repair System Prompt</h4>
</expansion-panel>

@if (report.details.summary.aiSummary !== undefined) {
<button class="fab" (click)="isAiAssistantVisible.set(true)">
<button #aiAssistButton class="fab" (click)="isAiAssistantVisible.set(true)">
<span class="material-symbols-outlined">smart_toy</span>
</button>

<app-ai-assistant
[class.hidden]="!isAiAssistantVisible()"
[reportGroupId]="reportGroupId()"
(close)="isAiAssistantVisible.set(false)"
/>
@defer (on interaction(aiAssistButton)) {
<app-ai-assistant
[class.hidden]="!isAiAssistantVisible()"
[reportGroupId]="reportGroupId()"
(close)="isAiAssistantVisible.set(false)"
/>
}
}

@if (missingDeps().length > 0) {
Expand Down Expand Up @@ -346,6 +348,8 @@ <h5>





Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure what prettier does here..

}}%)
</span>
</div>
Expand Down
45 changes: 30 additions & 15 deletions report-app/src/app/shared/ai-assistant/ai-assistant.html
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ <h2>AI Assistant</h2>
</div>
<div class="model-selector-container">
<select [(ngModel)]="selectedModel">
@for (model of models; track model) {
<option [value]="model.id">{{ model.name }}</option>
@for (model of models(); track model) {
<option [value]="model">{{ model }}</option>
}
</select>
</div>
Expand All @@ -29,19 +29,34 @@ <h2>AI Assistant</h2>
</header>
<div class="ai-assistant-body">
<div class="messages-container" #messagesContainer>
@for (message of messages; track $index) {
<div
class="message"
[class.user-message]="message.role === 'user'"
[class.model-message]="message.role === 'model'"
>
<div [innerHTML]="message.text"></div>
</div>
}
@if (isLoading()) {
<div class="message model-message">
<message-spinner message="Thinking..." />
</div>
@if (aiConfigState.isLoading()) {
<message-spinner message="Checking available models for chat..." />
} @else if (aiConfigState.error() !== undefined) {
Error fetching available models.
} @else if (models().length === 0) {
@if (models().length === 0) {
No models available. Make sure you set API keys as per
<a
href="https://github.com/angular/web-codegen-scorer?tab=readme-ov-file#setup"
target="_blank"
>Setup instructions.</a
>
}
} @else {
@for (message of messages; track $index) {
<div
class="message"
[class.user-message]="message.role === 'user'"
[class.model-message]="message.role === 'model'"
>
<div [innerHTML]="message.text"></div>
</div>
}
@if (isLoading()) {
<div class="message model-message">
<message-spinner message="Thinking..." />
</div>
}
}
</div>
<div class="input-container">
Expand Down
21 changes: 7 additions & 14 deletions report-app/src/app/shared/ai-assistant/ai-assistant.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@
import {HttpClient} from '@angular/common/http';
import {Component, inject, input, output, signal} from '@angular/core';
import {HttpClient, httpResource} from '@angular/common/http';
import {Component, computed, inject, input, linkedSignal, output, signal} from '@angular/core';
import {FormsModule} from '@angular/forms';
import {firstValueFrom} from 'rxjs';
import {
AiChatMessage,
AiChatRequest,
AiChatResponse,
AIConfigState,
} from '../../../../../runner/shared-interfaces';
import {MessageSpinner} from '../message-spinner';

interface Model {
id: string;
name: string;
}

@Component({
selector: 'app-ai-assistant',
templateUrl: './ai-assistant.html',
Expand All @@ -34,12 +30,9 @@ export class AiAssistant {

private readonly http = inject(HttpClient);

protected readonly models: Model[] = [
{id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash'},
{id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro'},
{id: 'gemini-2.5-flash-lite', name: 'Gemini 2.5 Flash Lite'},
];
protected selectedModel = this.models[0].id;
protected readonly aiConfigState = httpResource<AIConfigState>(() => '/api/ai-config-state');
protected readonly models = computed(() => this.aiConfigState.value()?.configuredModels ?? []);
protected selectedModel = linkedSignal(() => this.models()[0]);

protected toggleExpanded(): void {
this.isExpanded.set(!this.isExpanded());
Expand All @@ -60,7 +53,7 @@ export class AiAssistant {
const payload: AiChatRequest = {
prompt,
pastMessages,
model: this.selectedModel,
model: this.selectedModel(),
};

try {
Expand Down
4 changes: 4 additions & 0 deletions runner/codegen/genkit/genkit-runner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,10 @@ export class GenkitRunner implements LlmRunner {
return MODEL_PROVIDERS.flatMap(p => p.getSupportedModels());
}

getSupportedModelsWithAPIKey(): string[] {
return MODEL_PROVIDERS.filter(p => p.getApiKey() !== null).flatMap(p => p.getSupportedModels());
}

private async _genkitRequest(
provider: GenkitModelProvider,
model: ModelReference<any>,
Expand Down
5 changes: 5 additions & 0 deletions runner/shared-interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,11 @@ export interface AiChatMessage {
text: string;
}

/** Interface describing the response of the AI config state report-server endpoint. */
export interface AIConfigState {
configuredModels: string[];
}

/**
* A summary of build outcomes and code quality scores for an entire assessment run.
*/
Expand Down
Loading