Skip to content

Commit e6d1639

Browse files
committed
More progress
1 parent 8d6f475 commit e6d1639

26 files changed

+1265
-580
lines changed

src/services/deep-research/DeepResearchService.ts

Lines changed: 365 additions & 159 deletions
Large diffs are not rendered by default.
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import { z } from "zod"
2+
3+
export interface ResearchInquiry {
4+
initialQuery?: string
5+
followUps: string[]
6+
responses: string[]
7+
query?: string
8+
learnings?: string[]
9+
urls?: string[]
10+
report?: string
11+
}
12+
13+
export type ResearchStep = {
14+
query: string
15+
breadth: number
16+
depth: number
17+
learnings?: string[]
18+
visitedUrls?: string[]
19+
onProgress: (progress: ResearchProgress) => void
20+
onNewLearnings: (learnings: ResearchLearnings & { urls: string[] }) => void
21+
}
22+
23+
export type ResearchProgress = {
24+
currentDepth: number
25+
totalDepth: number
26+
currentBreadth: number
27+
totalBreadth: number
28+
currentQuery?: string
29+
totalQueries: number
30+
completedQueries: number
31+
progressPercentage: number
32+
}
33+
34+
export type ResearchResult = {
35+
learnings: string[]
36+
visitedUrls: string[]
37+
}
38+
39+
export const researchLearningsSchema = z.object({
40+
learnings: z.array(z.string()),
41+
followUpQuestions: z.array(z.string()),
42+
})
43+
44+
export type ResearchLearnings = z.infer<typeof researchLearningsSchema>

src/services/deep-research/utils/prompt.ts

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const MIN_CHUNK_SIZE = 140
77
const encoder = getEncoding("o200k_base")
88

99
// Trim prompt to maximum context size.
10-
export function trimPrompt(prompt: string, contextSize = Number(process.env.CONTEXT_SIZE) || 128_000) {
10+
export function truncatePrompt(prompt: string, contextSize = Number(process.env.CONTEXT_SIZE) || 128_000) {
1111
if (!prompt) {
1212
return ""
1313
}
@@ -33,15 +33,22 @@ export function trimPrompt(prompt: string, contextSize = Number(process.env.CONT
3333
chunkOverlap: 0,
3434
})
3535

36-
const trimmedPrompt = splitter.splitText(prompt)[0] ?? ""
36+
const truncated = splitter.splitText(prompt)[0] ?? ""
3737

3838
// Last catch, there's a chance that the trimmed prompt is same length as
3939
// the original prompt, due to how tokens are split & innerworkings of the
4040
// splitter, handle this case by just doing a hard cut.
41-
if (trimmedPrompt.length === prompt.length) {
42-
return trimPrompt(prompt.slice(0, chunkSize), contextSize)
41+
if (truncated.length === prompt.length) {
42+
return truncatePrompt(prompt.slice(0, chunkSize), contextSize)
4343
}
4444

4545
// Recursively trim until the prompt is within the context size.
46-
return trimPrompt(trimmedPrompt, contextSize)
46+
return truncatePrompt(truncated, contextSize)
47+
}
48+
49+
export function trimPrompt(prompt: string): string {
50+
return prompt
51+
.split("\n")
52+
.map((line) => line.trim())
53+
.join("\n")
4754
}

src/services/deep-research/utils/report.ts

Lines changed: 0 additions & 36 deletions
This file was deleted.

src/services/deep-research/utils/serp.ts

Lines changed: 0 additions & 89 deletions
This file was deleted.

src/shared/ExtensionMessage.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,9 @@ export interface ExtensionMessage {
4848
| "currentCheckpointUpdated"
4949
| "research.loading"
5050
| "research.output"
51-
| "research.question"
51+
| "research.followUp"
5252
| "research.progress"
53+
| "research.learnings"
5354
| "research.result"
5455
| "research.error"
5556
text?: string

0 commit comments

Comments
 (0)