Skip to content

Commit b5a59f2

Browse files
committed
frontend/jupyter/llm: 1-shot the cell generator
1 parent 010fe33 commit b5a59f2

File tree

3 files changed

+92
-49
lines changed

3 files changed

+92
-49
lines changed

src/packages/frontend/frame-editors/llm/llm-query-dropdown.tsx

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import { LLM_PROVIDER } from "@cocalc/util/db-schema/llm-utils";
1111
import { LLMTools } from "@cocalc/jupyter/types";
1212

1313
interface Props {
14-
llmTools?: LLMTools;
14+
llmTools?: Pick<LLMTools, "model" | "setModel">;
1515
task?: string;
1616
onClick: () => void;
1717
loading?: boolean;
@@ -87,7 +87,10 @@ export function LLMQueryDropdownButton({
8787
trigger={["click"]}
8888
icon={<Icon name="caret-down" />}
8989
onClick={onClick}
90-
menu={{ items: getItems() }}
90+
menu={{
91+
items: getItems(),
92+
style: { maxHeight: "50vh", overflow: "auto" },
93+
}}
9194
loading={loading}
9295
disabled={disabled}
9396
>

src/packages/frontend/jupyter/insert-cell/ai-cell-generator.tsx

Lines changed: 77 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import {
2222
useAsyncEffect,
2323
useFrameContext,
2424
} from "@cocalc/frontend/app-framework";
25+
import type { Message } from "@cocalc/frontend/client/types";
2526
import {
2627
LLMNameLink,
2728
Paragraph,
@@ -88,7 +89,7 @@ const EXAMPLES: readonly (readonly [string, readonly string[]])[] = [
8889
],
8990
["Perform a principal component analysis (PCA) on the dataset.", ["PCA"]],
9091
[
91-
"Conduct a time series analysis on the dataset and extapolate.",
92+
"Conduct a time series analysis on the dataset and extrapolate.",
9293
["time series"],
9394
],
9495
["Create an interactive slider for the function.", ["interactive"]],
@@ -140,7 +141,7 @@ export function AIGenerateCodeCell({
140141

141142
const prevCodeContents = getPrevCodeContents();
142143

143-
const { input } = getInput({
144+
const inputPrompt = getInput({
144145
frameActions,
145146
prompt,
146147
lang,
@@ -150,8 +151,10 @@ export function AIGenerateCodeCell({
150151
prevCodeContents,
151152
});
152153

154+
const { input } = inputPrompt;
155+
153156
useEffect(() => {
154-
if (tokens > 0 && input == "") setTokens(0);
157+
if (tokens > 0 && inputPrompt.input == "") setTokens(0);
155158
}, [input]);
156159

157160
useAsyncEffect(
@@ -164,7 +167,15 @@ export function AIGenerateCodeCell({
164167
"@cocalc/frontend/misc/llm"
165168
);
166169

167-
const tokens = numTokensUpperBound(prompt, getMaxTokens(model));
170+
const { history, system } = inputPrompt;
171+
172+
const all = [
173+
input,
174+
history.map(({ content }) => content).join(" "),
175+
system,
176+
].join(" ");
177+
178+
const tokens = numTokensUpperBound(all, getMaxTokens(model));
168179

169180
setTokens(tokens);
170181
},
@@ -265,7 +276,7 @@ export function AIGenerateCodeCell({
265276
}) {
266277
if (!prompt.trim()) return;
267278

268-
const { input, system } = getInput({
279+
const { input, history, system } = getInput({
269280
lang,
270281
kernel_name,
271282
frameActions,
@@ -292,9 +303,10 @@ export function AIGenerateCodeCell({
292303

293304
const stream = await webapp_client.openai_client.queryStream({
294305
input,
306+
history,
307+
system,
295308
project_id,
296309
path,
297-
system,
298310
tag,
299311
model,
300312
});
@@ -395,7 +407,10 @@ export function AIGenerateCodeCell({
395407
});
396408
return (
397409
<Paragraph>
398-
<Dropdown menu={{ items }} trigger={["click"]}>
410+
<Dropdown
411+
menu={{ items, style: { maxHeight: "50vh", overflow: "auto" } }}
412+
trigger={["click"]}
413+
>
399414
<Button style={{ width: "100%" }}>
400415
<Space>
401416
<Icon name="magic" />
@@ -566,6 +581,41 @@ export function AIGenerateCodeCell({
566581
);
567582
}
568583

584+
function renderPromptPreview() {
585+
if (!input?.trim()) return;
586+
587+
const { history, system } = inputPrompt;
588+
const ex = history.map(({ content }) => content).join("\n\n");
589+
const raw = [input, "Example:", ex, "System:", system].join("\n\n");
590+
591+
return (
592+
<>
593+
<Divider />
594+
<Paragraph type="secondary">
595+
A prompt to generate one or more cells based on your description and
596+
context will be sent to the <LLMNameLink model={model} /> language
597+
model.
598+
</Paragraph>
599+
<Collapse
600+
items={[
601+
{
602+
key: "1",
603+
label: (
604+
<>Click to see what will be sent to {modelToName(model)}.</>
605+
),
606+
children: (
607+
<RawPrompt
608+
input={raw}
609+
style={{ border: "none", padding: "0", margin: "0" }}
610+
/>
611+
),
612+
},
613+
]}
614+
/>
615+
</>
616+
);
617+
}
618+
569619
function renderContentDialog() {
570620
const empty = prompt.trim() == "";
571621
return (
@@ -593,32 +643,7 @@ export function AIGenerateCodeCell({
593643
</Paragraph>
594644
{renderExamples()}
595645
{empty ? undefined : renderContext()}
596-
{input?.trim() ? (
597-
<>
598-
<Divider />
599-
<Paragraph type="secondary">
600-
A prompt to generate one or more cells based on your description
601-
and context will be sent to the <LLMNameLink model={model} />{" "}
602-
language model.
603-
</Paragraph>
604-
<Collapse
605-
items={[
606-
{
607-
key: "1",
608-
label: (
609-
<>Click to see what will be sent to {modelToName(model)}.</>
610-
),
611-
children: (
612-
<RawPrompt
613-
input={input}
614-
style={{ border: "none", padding: "0", margin: "0" }}
615-
/>
616-
),
617-
},
618-
]}
619-
/>
620-
</>
621-
) : undefined}
646+
{renderPromptPreview()}
622647
<Paragraph style={{ textAlign: "center", marginTop: "30px" }}>
623648
<Space size="large">
624649
<Button onClick={() => setShowAICellGen(null)}>Cancel</Button>
@@ -694,6 +719,10 @@ interface GetInputProps {
694719
kernel_name: string;
695720
}
696721

722+
function getInputPrompt(prompt: string): string {
723+
return `The new cell should do the following:\n\n${prompt}`;
724+
}
725+
697726
function getInput({
698727
frameActions,
699728
prompt,
@@ -703,23 +732,33 @@ function getInput({
703732
}: GetInputProps): {
704733
input: string;
705734
system: string;
735+
history: Message[];
706736
} {
707737
if (!prompt?.trim()) {
708-
return { input: "", system: "" };
738+
return { input: "", system: "", history: [] };
709739
}
710740
if (frameActions.current == null) {
711741
console.warn(
712742
"Unable to create cell due to frameActions not being defined.",
713743
);
714-
return { input: "", system: "" };
744+
return { input: "", system: "", history: [] };
715745
}
716746
const prevCode = prevCodeContents
717-
? `\n\nThe context after which to insert the cells is:\n\n<context>\n${prevCodeContents}\n\</context>`
747+
? `The context after which to insert the cells is:\n\n<context>\n${prevCodeContents}\n\</context>\n\n`
718748
: "";
719749

750+
const history: Message[] = [
751+
{ role: "user", content: getInputPrompt("Show the value of foo.") },
752+
{
753+
role: "assistant",
754+
content: `This is the value of foo:\n\n\`\`\`${lang}\nprint(foo)\n\`\`\``,
755+
},
756+
];
757+
720758
return {
721-
input: `Create a new code cell for a Jupyter Notebook.\n\nKernel: "${kernel_name}".\n\nProgramming language: "${lang}".\n\The entire code cell must be in a single code block. Enclose this block in triple backticks. Do not say what the output will be. Add comments as code comments. ${prevCode}\n\nThe new cell should do the following:\n\n${prompt}`,
722-
system: `Return a single code block in the language "${lang}". Be brief.`,
759+
input: `${prevCode}${getInputPrompt(prompt)}`,
760+
history,
761+
system: `Create one or more code cells in a Jupyter Notebook.\n\nKernel: "${kernel_name}".\n\nProgramming language: "${lang}".\n\nEach code cell must be wrapped in triple backticks. Do not say what the output will be. Be brief.`,
723762
};
724763
}
725764

src/packages/frontend/project/page/home-page/ai-generate-document.tsx

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ import { Actions as CodeEditorActions } from "@cocalc/frontend/frame-editors/cod
5252
import { AI_GEN_TEXT } from "@cocalc/frontend/frame-editors/frame-tree/commands/const";
5353
import { JupyterEditorActions } from "@cocalc/frontend/frame-editors/jupyter-editor/actions";
5454
import { Actions as LatexActions } from "@cocalc/frontend/frame-editors/latex-editor/actions";
55+
import { LLMQueryDropdownButton } from "@cocalc/frontend/frame-editors/llm/llm-query-dropdown";
5556
import LLMSelector, {
5657
modelToName,
5758
} from "@cocalc/frontend/frame-editors/llm/llm-selector";
@@ -780,15 +781,15 @@ function AIGenerateDocument({
780781
{renderPromptPreview()}
781782
{
782783
<Paragraph style={{ textAlign: "center", marginTop: "15px" }}>
783-
<Button
784-
type="primary"
785-
size="large"
786-
onClick={generate}
787-
disabled={!fullPrompt || !!error || querying || !prompt?.trim()}
788-
>
789-
<Icon name="paper-plane" /> Create {docName} content using{" "}
790-
{modelToName(model)}
791-
</Button>
784+
<Space>
785+
<LLMQueryDropdownButton
786+
disabled={!fullPrompt || !!error || querying || !prompt?.trim()}
787+
loading={querying}
788+
onClick={generate}
789+
llmTools={{ model, setModel }}
790+
task={`Create ${docName} using`}
791+
/>
792+
</Space>
792793
</Paragraph>
793794
}
794795
{fullPrompt && tokens > 0 ? (

0 commit comments

Comments
 (0)