Skip to content

Commit d6769d1

Browse files
committed
More progress
1 parent 16bbcba commit d6769d1

File tree

14 files changed

+523
-194
lines changed

14 files changed

+523
-194
lines changed

package-lock.json

Lines changed: 52 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,6 +311,7 @@
311311
"@mendable/firecrawl-js": "^1.16.0",
312312
"@mistralai/mistralai": "^1.3.6",
313313
"@modelcontextprotocol/sdk": "^1.0.1",
314+
"@openrouter/ai-sdk-provider": "^0.2.1",
314315
"@types/clone-deep": "^4.0.4",
315316
"@types/pdf-parse": "^1.1.4",
316317
"@types/tmp": "^0.2.6",

src/core/webview/ClineProvider.ts

Lines changed: 24 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ type SecretKey =
6666
| "mistralApiKey"
6767
| "unboundApiKey"
6868
| "requestyApiKey"
69+
| "firecrawlApiKey"
6970

7071
type GlobalStateKey =
7172
| "apiProvider"
@@ -1505,26 +1506,31 @@ export class ClineProvider implements vscode.WebviewViewProvider {
15051506
case "research.task": {
15061507
const result = researchTaskPayloadSchema.safeParse(message.payload)
15071508

1508-
if (result.success && !this.deepResearchService) {
1509-
const { modelId, breadth, depth, query, firecrawlApiKey, openaiApiKey } =
1510-
result.data.session
1511-
this.deepResearchService = new DeepResearchService(
1512-
this,
1513-
modelId,
1514-
breadth,
1515-
depth,
1516-
2,
1517-
firecrawlApiKey,
1518-
openaiApiKey,
1509+
if (!result.success) {
1510+
console.warn(
1511+
`[ClineProvider#research.task] Invalid payload: ${JSON.stringify(message.payload)}`,
15191512
)
1520-
this.deepResearchService.append(query)
1513+
break
1514+
}
1515+
1516+
if (result.success && !this.deepResearchService) {
1517+
const { session } = result.data
1518+
this.deepResearchService = new DeepResearchService(session, this)
1519+
this.deepResearchService.append(session.query)
15211520
}
15221521

15231522
break
15241523
}
15251524
case "research.input": {
15261525
const result = researchInputPayloadSchema.safeParse(message.payload)
15271526

1527+
if (!result.success) {
1528+
console.warn(
1529+
`[ClineProvider#research.input] Invalid payload: ${JSON.stringify(message.payload)}`,
1530+
)
1531+
break
1532+
}
1533+
15281534
if (result.success && this.deepResearchService) {
15291535
const { content } = result.data.message
15301536
this.deepResearchService.append(content)
@@ -1685,6 +1691,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
16851691
requestyModelId,
16861692
requestyModelInfo,
16871693
modelTemperature,
1694+
firecrawlApiKey,
16881695
} = apiConfiguration
16891696
await this.updateGlobalState("apiProvider", apiProvider)
16901697
await this.updateGlobalState("apiModelId", apiModelId)
@@ -1730,6 +1737,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
17301737
await this.updateGlobalState("requestyModelId", requestyModelId)
17311738
await this.updateGlobalState("requestyModelInfo", requestyModelInfo)
17321739
await this.updateGlobalState("modelTemperature", modelTemperature)
1740+
await this.storeSecret("firecrawlApiKey", firecrawlApiKey)
17331741
if (this.cline) {
17341742
this.cline.api = buildApiHandler(apiConfiguration)
17351743
}
@@ -2609,6 +2617,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
26092617
requestyModelId,
26102618
requestyModelInfo,
26112619
modelTemperature,
2620+
firecrawlApiKey,
26122621
] = await Promise.all([
26132622
this.getGlobalState("apiProvider") as Promise<ApiProvider | undefined>,
26142623
this.getGlobalState("apiModelId") as Promise<string | undefined>,
@@ -2689,6 +2698,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
26892698
this.getGlobalState("requestyModelId") as Promise<string | undefined>,
26902699
this.getGlobalState("requestyModelInfo") as Promise<ModelInfo | undefined>,
26912700
this.getGlobalState("modelTemperature") as Promise<number | undefined>,
2701+
this.getSecret("firecrawlApiKey") as Promise<string | undefined>,
26922702
])
26932703

26942704
let apiProvider: ApiProvider
@@ -2751,6 +2761,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
27512761
requestyModelId,
27522762
requestyModelInfo,
27532763
modelTemperature,
2764+
firecrawlApiKey,
27542765
},
27552766
lastShownAnnouncementId,
27562767
customInstructions,
@@ -2905,6 +2916,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
29052916
"mistralApiKey",
29062917
"unboundApiKey",
29072918
"requestyApiKey",
2919+
"firecrawlApiKey",
29082920
]
29092921
for (const key of secretKeys) {
29102922
await this.storeSecret(key, undefined)

src/services/deep-research/DeepResearchService.ts

Lines changed: 90 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
import { createOpenAI } from "@ai-sdk/openai"
2+
import { createOpenRouter } from "@openrouter/ai-sdk-provider"
23
import FirecrawlApp, { SearchResponse } from "@mendable/firecrawl-js"
34
import { generateObject, LanguageModel, Message, streamText } from "ai"
45
import { z } from "zod"
56
import pLimit from "p-limit"
67

78
import { ExtensionMessage } from "../../shared/ExtensionMessage"
9+
import { ResearchTaskPayload } from "../../shared/WebviewMessage"
810
import { ClineProvider } from "../../core/webview/ClineProvider"
911

1012
import {
@@ -20,6 +22,14 @@ import {
2022
import { truncatePrompt, trimPrompt } from "./utils/prompt"
2123

2224
export class DeepResearchService {
25+
public readonly providerId: string
26+
public readonly providerApiKey: string
27+
public readonly firecrawlApiKey: string
28+
public readonly modelId: string
29+
public readonly breadth: number
30+
public readonly depth: number
31+
public readonly concurrency: number
32+
2333
private providerRef: WeakRef<ClineProvider>
2434
private firecrawl: FirecrawlApp
2535
private model: LanguageModel
@@ -30,22 +40,28 @@ export class DeepResearchService {
3040
private messages: Message[] = []
3141

3242
constructor(
43+
{ providerId, providerApiKey, firecrawlApiKey, modelId, breadth, depth }: ResearchTaskPayload["session"],
3344
clineProvider: ClineProvider,
34-
public readonly modelId: string,
35-
public readonly breadth: number,
36-
public readonly depth: number,
37-
public readonly concurrency: number,
38-
public readonly firecrawlApiKey: string,
39-
public readonly openaiApiKey: string,
4045
) {
46+
this.providerId = providerId
47+
this.providerApiKey = providerApiKey
48+
this.firecrawlApiKey = firecrawlApiKey
49+
this.modelId = modelId
50+
this.breadth = breadth
51+
this.depth = depth
52+
this.concurrency = 2
53+
4154
this.providerRef = new WeakRef(clineProvider)
4255

4356
this.firecrawl = new FirecrawlApp({ apiKey: firecrawlApiKey })
4457

45-
this.model = createOpenAI({ apiKey: openaiApiKey })(modelId, {
46-
// reasoningEffort: "medium",
47-
structuredOutputs: true,
48-
})
58+
if (providerId === "openai-native") {
59+
const openai = createOpenAI({ apiKey: providerApiKey })
60+
this.model = openai(modelId, { structuredOutputs: true })
61+
} else {
62+
const openrouter = createOpenRouter({ apiKey: providerApiKey })
63+
this.model = openrouter(modelId)
64+
}
4965
}
5066

5167
/**
@@ -72,34 +88,34 @@ export class DeepResearchService {
7288
}
7389

7490
private chatSystemPrompt() {
75-
// return trimPrompt(`
76-
// You are an expert research assistant helping to explain and clarify research findings. Follow these guidelines:
77-
78-
// - You always answer the with markdown formatting. You will be penalized if you do not answer with markdown when it would be possible.
79-
// - The markdown formatting you support: headings, bold, italic, links, tables, lists, code blocks, and blockquotes.
80-
// - You do not support images and never include images. You will be penalized if you render images.
81-
// - You also support Mermaid formatting. You will be penalized if you do not render Mermaid diagrams when it would be possible.
82-
// - The Mermaid diagrams you support: sequenceDiagram, flowChart, classDiagram, stateDiagram, erDiagram, gantt, journey, gitGraph, pie.
83-
// - Reference specific findings from the research when answering.
84-
// - Be precise and detailed in explanations.
85-
// - If asked about something outside the research scope, acknowledge this and stick to what was actually researched.
86-
// - Feel free to make connections between different parts of the research.
87-
// - When speculating or making inferences beyond the direct research, clearly label these as such.
88-
// - If asked about sources, refer to the URLs provided in the research.
89-
// - Maintain a professional, analytical tone.
90-
// - Never include images in responses.
91-
// `)
92-
9391
return trimPrompt(`
94-
You are a general answering assistant that can comply with any request.
92+
You are an expert research assistant helping to explain and clarify research findings. Follow these guidelines:
93+
94+
- You always answer the with markdown formatting. You will be penalized if you do not answer with markdown when it would be possible.
95+
- The markdown formatting you support: headings, bold, italic, links, tables, lists, code blocks, and blockquotes.
96+
- You do not support images and never include images. You will be penalized if you render images.
97+
- You also support Mermaid formatting. You will be penalized if you do not render Mermaid diagrams when it would be possible.
98+
- The Mermaid diagrams you support: sequenceDiagram, flowChart, classDiagram, stateDiagram, erDiagram, gantt, journey, gitGraph, pie.
99+
- Reference specific findings from the research when answering.
100+
- Be precise and detailed in explanations.
101+
- If asked about something outside the research scope, acknowledge this and stick to what was actually researched.
102+
- Feel free to make connections between different parts of the research.
103+
- When speculating or making inferences beyond the direct research, clearly label these as such.
104+
- If asked about sources, refer to the URLs provided in the research.
105+
- Maintain a professional, analytical tone.
106+
- Never include images in responses.
107+
`)
108+
109+
// return trimPrompt(`
110+
// You are a general answering assistant that can comply with any request.
95111

96-
You always answer the with markdown formatting. You will be penalized if you do not answer with markdown when it would be possible.
97-
The markdown formatting you support: headings, bold, italic, links, tables, lists, code blocks, and blockquotes.
98-
You do not support images and never include images. You will be penalized if you render images.
112+
// You always answer the with markdown formatting. You will be penalized if you do not answer with markdown when it would be possible.
113+
// The markdown formatting you support: headings, bold, italic, links, tables, lists, code blocks, and blockquotes.
114+
// You do not support images and never include images. You will be penalized if you render images.
99115

100-
You also support Mermaid formatting. You will be penalized if you do not render Mermaid diagrams when it would be possible.
101-
The Mermaid diagrams you support: sequenceDiagram, flowChart, classDiagram, stateDiagram, erDiagram, gantt, journey, gitGraph, pie.
102-
`)
116+
// You also support Mermaid formatting. You will be penalized if you do not render Mermaid diagrams when it would be possible.
117+
// The Mermaid diagrams you support: sequenceDiagram, flowChart, classDiagram, stateDiagram, erDiagram, gantt, journey, gitGraph, pie.
118+
// `)
103119
}
104120

105121
/**
@@ -476,13 +492,49 @@ export class DeepResearchService {
476492

477493
this.inquiry.query = query
478494

495+
const onProgressUpdated = () => {
496+
const { expectedQueries, completedQueries } = this.progress
497+
this.progress.progressPercentage = Math.round((completedQueries / expectedQueries) * 100)
498+
this.postMessage({ type: "research.progress", text: JSON.stringify(this.progress) })
499+
}
500+
501+
const onGeneratedQueries = (queries: ResearchQuery[]) =>
502+
this.postMessage({
503+
type: "research.output",
504+
text: JSON.stringify({
505+
content: `Generated ${queries.length} topics to research.\n\n${queries.map(({ query }) => `- ${query}`).join("\n")}`,
506+
annotations: [
507+
{
508+
type: "badge",
509+
data: { label: "Idea", variant: "outline" },
510+
},
511+
],
512+
}),
513+
})
514+
515+
const onExtractedLearnings = (learnings: ResearchLearnings & { urls: string[] }) =>
516+
this.postMessage({
517+
type: "research.output",
518+
text: JSON.stringify({
519+
content: `Extracted ${learnings.learnings.length} learnings from ${learnings.urls.length} sources.\n\n${learnings.urls.map((url) => `- ${url}`).join("\n")}`,
520+
annotations: [
521+
{
522+
type: "badge",
523+
data: { label: "Learning", variant: "outline" },
524+
},
525+
],
526+
}),
527+
})
528+
479529
// Calculate total expected queries across all depth levels.
480530
// At each level, the breadth is halved, so level 1 has full breadth,
481531
// level 2 has breadth/2, level 3 has breadth/4, etc.
482532
for (let i = this.depth; i > 0; i--) {
483533
this.progress.expectedQueries += Math.ceil(this.breadth / Math.pow(2, this.depth - i))
484534
}
485535

536+
onProgressUpdated()
537+
486538
const { learnings, visitedUrls } = await this.withLoading(
487539
() =>
488540
this.deepResearch({
@@ -491,37 +543,9 @@ export class DeepResearchService {
491543
depth: this.depth,
492544
learnings: [],
493545
visitedUrls: [],
494-
onProgressUpdated: () => {
495-
const { expectedQueries, completedQueries } = this.progress
496-
this.progress.progressPercentage = Math.round((completedQueries / expectedQueries) * 100)
497-
this.postMessage({ type: "research.progress", text: JSON.stringify(this.progress) })
498-
},
499-
onGeneratedQueries: (queries) =>
500-
this.postMessage({
501-
type: "research.output",
502-
text: JSON.stringify({
503-
content: `Generated ${queries.length} topics to research.\n\n${queries.map(({ query }) => `- ${query}`).join("\n")}`,
504-
annotations: [
505-
{
506-
type: "badge",
507-
data: { label: "Idea", variant: "outline" },
508-
},
509-
],
510-
}),
511-
}),
512-
onExtractedLearnings: (learnings) =>
513-
this.postMessage({
514-
type: "research.output",
515-
text: JSON.stringify({
516-
content: `Extracted ${learnings.learnings.length} learnings from ${learnings.urls.length} sources.\n\n${learnings.urls.map((url) => `- ${url}`).join("\n")}`,
517-
annotations: [
518-
{
519-
type: "badge",
520-
data: { label: "Learning", variant: "outline" },
521-
},
522-
],
523-
}),
524-
}),
546+
onProgressUpdated,
547+
onGeneratedQueries,
548+
onExtractedLearnings,
525549
}),
526550
"Researching...",
527551
)

0 commit comments

Comments
 (0)