Skip to content

Commit b39edbc

Browse files
committed
feat: add AI-powered analysis for meeting recordings
Implements automatic extraction of summary and action items from meeting transcripts using OpenAI GPT-4o-mini. Analysis runs in renderer process with zustand state management for live UI updates. Key features: - Generate 3-7 word summary titles from transcripts - Extract actionable tasks with context - Display analysis status with live updates in UI - Manual retrigger via buttons if analysis fails or is skipped - Gracefully handle missing OpenAI API key (skip but show UI) - Store results in backend via updateDesktopRecordingTranscript Changes: - Extended activeRecordingStore with analysis fields and state machine - Created aiAnalysisService for LLM integration - Updated recordingService to trigger analysis on meeting end - Enhanced RecordingView UI with conditional rendering for analysis states - Updated PostHogAPIClient to support summary/tasks in transcript updates
1 parent 4076241 commit b39edbc

File tree

5 files changed

+420
-15
lines changed

5 files changed

+420
-15
lines changed

src/api/posthogClient.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -393,6 +393,8 @@ export class PostHogAPIClient {
393393
is_final: boolean;
394394
}>;
395395
full_text?: string;
396+
summary?: string;
397+
extracted_tasks?: Array<{ title: string; description: string }>;
396398
},
397399
) {
398400
this.validateRecordingId(recordingId);

src/renderer/features/notetaker/components/RecordingView.tsx

Lines changed: 159 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
1-
import { Badge, Box, Card, Flex, Text } from "@radix-ui/themes";
1+
import { Badge, Box, Button, Card, Flex, Text } from "@radix-ui/themes";
22
import type { RecordingItem } from "@renderer/features/notetaker/hooks/useAllRecordings";
3-
import type { TranscriptSegment } from "@renderer/stores/activeRecordingStore";
3+
import { analyzeRecording } from "@renderer/services/recordingService";
4+
import type {
5+
AnalysisStatus,
6+
TranscriptSegment,
7+
} from "@renderer/stores/activeRecordingStore";
48
import { useEffect, useRef, useState } from "react";
59

610
interface RecordingViewProps {
@@ -58,6 +62,42 @@ export function RecordingView({ recordingItem }: RecordingViewProps) {
5862
),
5963
];
6064

65+
const analysisStatus: AnalysisStatus | undefined =
66+
recordingItem.type === "active"
67+
? recordingItem.recording.analysisStatus
68+
: undefined;
69+
70+
const summary =
71+
recordingItem.type === "active"
72+
? recordingItem.recording.summary
73+
: recordingItem.recording.transcript?.summary;
74+
75+
const extractedTasks =
76+
recordingItem.type === "active"
77+
? recordingItem.recording.extractedTasks
78+
: (recordingItem.recording.transcript?.extracted_tasks as
79+
| Array<{
80+
title: string;
81+
description: string;
82+
}>
83+
| undefined);
84+
85+
const analysisError =
86+
recordingItem.type === "active"
87+
? recordingItem.recording.analysisError
88+
: undefined;
89+
90+
const [isAnalyzing, setIsAnalyzing] = useState(false);
91+
92+
const handleAnalyze = async () => {
93+
setIsAnalyzing(true);
94+
try {
95+
await analyzeRecording(recording.id);
96+
} finally {
97+
setIsAnalyzing(false);
98+
}
99+
};
100+
61101
return (
62102
<Box
63103
p="4"
@@ -107,11 +147,57 @@ export function RecordingView({ recordingItem }: RecordingViewProps) {
107147
Summary
108148
</Text>
109149
<Card>
110-
<Flex align="center" justify="center" py="4">
111-
<Text size="2" color="gray">
112-
Coming soon
113-
</Text>
114-
</Flex>
150+
{summary ? (
151+
<Text size="2">{summary}</Text>
152+
) : (
153+
<Flex
154+
direction="column"
155+
align="center"
156+
justify="center"
157+
py="4"
158+
gap="2"
159+
>
160+
{analysisStatus === "analyzing_summary" ? (
161+
<Text size="2" color="gray">
162+
Analyzing...
163+
</Text>
164+
) : analysisStatus === "error" ? (
165+
<>
166+
<Text size="2" color="red">
167+
{analysisError || "Analysis failed"}
168+
</Text>
169+
<Button
170+
size="1"
171+
onClick={handleAnalyze}
172+
disabled={isAnalyzing}
173+
>
174+
Retry
175+
</Button>
176+
</>
177+
) : analysisStatus === "skipped" ? (
178+
<>
179+
<Text size="2" color="gray">
180+
Configure OpenAI API key to analyze
181+
</Text>
182+
<Button
183+
size="1"
184+
onClick={handleAnalyze}
185+
disabled={isAnalyzing}
186+
>
187+
Analyze now
188+
</Button>
189+
</>
190+
) : (
191+
<Button
192+
size="1"
193+
onClick={handleAnalyze}
194+
disabled={isAnalyzing}
195+
>
196+
Analyze with AI
197+
</Button>
198+
)}
199+
</Flex>
200+
)}
115201
</Card>
116202
</Flex>
117203
)}
@@ -123,11 +209,72 @@ export function RecordingView({ recordingItem }: RecordingViewProps) {
123209
Action items
124210
</Text>
125211
<Card>
126-
<Flex align="center" justify="center" py="4">
127-
<Text size="2" color="gray">
128-
Coming soon
129-
</Text>
130-
</Flex>
212+
{extractedTasks && extractedTasks.length > 0 ? (
213+
<Flex direction="column" gap="2">
214+
{extractedTasks.map((task) => (
215+
<Box key={task.title}>
216+
<Text size="2" weight="bold">
217+
{task.title}
218+
</Text>
219+
<Text size="1" color="gray">
220+
{task.description}
221+
</Text>
222+
</Box>
223+
))}
224+
</Flex>
225+
) : (
226+
<Flex
227+
direction="column"
228+
align="center"
229+
justify="center"
230+
py="4"
231+
gap="2"
232+
>
233+
{analysisStatus === "analyzing_tasks" ? (
234+
<Text size="2" color="gray">
235+
Extracting tasks...
236+
</Text>
237+
) : analysisStatus === "error" ? (
238+
<>
239+
<Text size="2" color="red">
240+
{analysisError || "Analysis failed"}
241+
</Text>
242+
<Button
243+
size="1"
244+
onClick={handleAnalyze}
245+
disabled={isAnalyzing}
246+
>
247+
Retry
248+
</Button>
249+
</>
250+
) : analysisStatus === "skipped" ? (
251+
<>
252+
<Text size="2" color="gray">
253+
Configure OpenAI API key to analyze
254+
</Text>
255+
<Button
256+
size="1"
257+
onClick={handleAnalyze}
258+
disabled={isAnalyzing}
259+
>
260+
Analyze now
261+
</Button>
262+
</>
263+
) : analysisStatus === "completed" ? (
264+
<Text size="2" color="gray">
265+
No tasks found
266+
</Text>
267+
) : (
268+
<Button
269+
size="1"
270+
onClick={handleAnalyze}
271+
disabled={isAnalyzing}
272+
>
273+
Analyze with AI
274+
</Button>
275+
)}
276+
</Flex>
277+
)}
131278
</Card>
132279
</Flex>
133280
)}
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import { createOpenAI } from "@ai-sdk/openai";
2+
import type { ExtractedTask } from "@renderer/stores/activeRecordingStore";
3+
import { generateObject } from "ai";
4+
import { z } from "zod";
5+
6+
const SUMMARY_PROMPT = `Create a very brief (3-7 words) title that summarizes what this conversation is about.
7+
8+
Transcript:`;
9+
10+
const TASK_EXTRACTION_PROMPT = `Analyze the following conversation transcript and extract any actionable tasks, feature requests, bug fixes, or work items that were discussed or requested. This includes:
11+
- Explicit action items ("we need to...", "let's build...")
12+
- Feature requests ("I want...", "please build...")
13+
- Bug reports ("this is broken...", "fix the...")
14+
- Requirements ("it should have...", "make it...")
15+
16+
For each task, provide a clear title and a description with relevant context from the conversation.
17+
18+
If there are no actionable tasks, return an empty tasks array.
19+
20+
Transcript:`;
21+
22+
export async function generateTranscriptSummary(
23+
transcriptText: string,
24+
openaiApiKey: string,
25+
): Promise<string | null> {
26+
try {
27+
const openai = createOpenAI({ apiKey: openaiApiKey });
28+
29+
const { object } = await generateObject({
30+
model: openai("gpt-4o-mini"),
31+
schema: z.object({
32+
title: z.string().describe("A brief 3-7 word summary title"),
33+
}),
34+
messages: [
35+
{
36+
role: "system",
37+
content:
38+
"You are a helpful assistant that creates concise titles for conversation transcripts. The title should be 3-7 words and capture the main topic.",
39+
},
40+
{
41+
role: "user",
42+
content: `${SUMMARY_PROMPT}\n${transcriptText}`,
43+
},
44+
],
45+
});
46+
47+
return object.title || null;
48+
} catch (error) {
49+
console.error("[AI Analysis] Failed to generate summary:", error);
50+
throw error;
51+
}
52+
}
53+
54+
export async function extractTasksFromTranscript(
55+
transcriptText: string,
56+
openaiApiKey: string,
57+
): Promise<ExtractedTask[]> {
58+
try {
59+
const openai = createOpenAI({ apiKey: openaiApiKey });
60+
61+
const schema = z.object({
62+
tasks: z.array(
63+
z.object({
64+
title: z.string().describe("Brief task title"),
65+
description: z.string().describe("Detailed description with context"),
66+
}),
67+
),
68+
});
69+
70+
const { object } = await generateObject({
71+
model: openai("gpt-4o-mini"),
72+
schema,
73+
messages: [
74+
{
75+
role: "system",
76+
content:
77+
"You are a helpful assistant that extracts actionable tasks from conversation transcripts. Be generous in identifying work items - include feature requests, requirements, and any work that needs to be done.",
78+
},
79+
{
80+
role: "user",
81+
content: `${TASK_EXTRACTION_PROMPT}\n${transcriptText}`,
82+
},
83+
],
84+
});
85+
86+
return object.tasks || [];
87+
} catch (error) {
88+
console.error("[AI Analysis] Failed to extract tasks:", error);
89+
throw error;
90+
}
91+
}

src/renderer/services/recordingService.ts

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
import {
2+
extractTasksFromTranscript,
3+
generateTranscriptSummary,
4+
} from "@renderer/services/aiAnalysisService";
15
import { useActiveRecordingStore } from "@renderer/stores/activeRecordingStore";
26
import { useAuthStore } from "@/renderer/features/auth/stores/authStore";
37

@@ -129,6 +133,8 @@ export function initializeRecordingService() {
129133
);
130134
}
131135
}
136+
137+
analyzeRecording(data.posthog_recording_id);
132138
}
133139

134140
store.updateStatus(data.posthog_recording_id, "uploading");
@@ -211,6 +217,77 @@ async function uploadPendingSegments(recordingId: string): Promise<void> {
211217
}
212218
}
213219

220+
export async function analyzeRecording(recordingId: string): Promise<void> {
221+
const store = useActiveRecordingStore.getState();
222+
const authStore = useAuthStore.getState();
223+
224+
const recording = store.getRecording(recordingId);
225+
if (!recording) {
226+
console.warn(`[AI Analysis] Recording ${recordingId} not found`);
227+
return;
228+
}
229+
230+
const openaiApiKey = authStore.openaiApiKey;
231+
if (!openaiApiKey) {
232+
console.log("[AI Analysis] No OpenAI API key, skipping analysis");
233+
store.setAnalysisStatus(recordingId, "skipped");
234+
return;
235+
}
236+
237+
if (recording.segments.length === 0) {
238+
console.log("[AI Analysis] No transcript segments, skipping analysis");
239+
store.setAnalysisStatus(recordingId, "skipped");
240+
return;
241+
}
242+
243+
const fullTranscript = recording.segments.map((s) => s.text).join(" ");
244+
245+
try {
246+
store.setAnalysisStatus(recordingId, "analyzing_summary");
247+
console.log("[AI Analysis] Generating summary...");
248+
249+
const summary = await generateTranscriptSummary(
250+
fullTranscript,
251+
openaiApiKey,
252+
);
253+
if (summary) {
254+
store.setSummary(recordingId, summary);
255+
}
256+
257+
store.setAnalysisStatus(recordingId, "analyzing_tasks");
258+
console.log("[AI Analysis] Extracting tasks...");
259+
260+
const tasks = await extractTasksFromTranscript(
261+
fullTranscript,
262+
openaiApiKey,
263+
);
264+
store.setExtractedTasks(recordingId, tasks);
265+
266+
store.setAnalysisStatus(recordingId, "completed");
267+
console.log(
268+
`[AI Analysis] Complete - summary: "${summary}", tasks: ${tasks.length}`,
269+
);
270+
271+
const client = authStore.client;
272+
if (client && (summary || tasks.length > 0)) {
273+
try {
274+
await client.updateDesktopRecordingTranscript(recordingId, {
275+
summary: summary || undefined,
276+
extracted_tasks: tasks.length > 0 ? tasks : undefined,
277+
});
278+
console.log("[AI Analysis] Updated backend with analysis results");
279+
} catch (error) {
280+
console.error("[AI Analysis] Failed to update backend:", error);
281+
}
282+
}
283+
} catch (error) {
284+
const errorMessage =
285+
error instanceof Error ? error.message : "Analysis failed";
286+
console.error("[AI Analysis] Error:", errorMessage);
287+
store.setAnalysisError(recordingId, errorMessage);
288+
}
289+
}
290+
214291
/**
215292
* Handle crash recovery - upload any pending segments and clear from IDB
216293
*

0 commit comments

Comments
 (0)