Skip to content

Commit a7b3b01

Browse files
committed
frontend/llm: refactor and fix OpenAI vs Custom OpenAI
1 parent 8453ad7 commit a7b3b01

File tree

8 files changed

+144
-139
lines changed

8 files changed

+144
-139
lines changed
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import { redux } from "@cocalc/frontend/app-framework";
2+
import { Text } from "@cocalc/frontend/components";
3+
4+
export function getCustomLLMGroup() {
5+
const customize = redux.getStore("customize");
6+
const site_name = customize.get("site_name");
7+
const organization_name = customize.get("organization_name") ?? "";
8+
return {
9+
title: `These language models on ${site_name} are managed by ${organization_name}`,
10+
label: (
11+
<>
12+
<Text strong>{site_name} language models</Text> – managed by{" "}
13+
{organization_name}
14+
</>
15+
),
16+
};
17+
}

src/packages/frontend/frame-editors/llm/llm-query-dropdown.tsx

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-
77
import { modelToName } from "@cocalc/frontend/frame-editors/llm/llm-selector";
88
import { useAvailableLLMs } from "@cocalc/frontend/frame-editors/llm/use-llm-menu-options";
99
import { useProjectContext } from "@cocalc/frontend/project/context";
10-
import { LLM_PROVIDER } from "@cocalc/util/db-schema/llm-utils";
1110
import { LLMTools } from "@cocalc/jupyter/types";
11+
import { LLM_PROVIDER } from "@cocalc/util/db-schema/llm-utils";
1212

1313
interface Props {
1414
llmTools?: Pick<LLMTools, "model" | "setModel">;
@@ -26,7 +26,7 @@ export function LLMQueryDropdownButton({
2626
disabled = false,
2727
}: Props) {
2828
const { project_id } = useProjectContext();
29-
const models = useAvailableLLMs(project_id);
29+
const modelsByService = useAvailableLLMs(project_id);
3030

3131
function renderOkText() {
3232
if (llmTools == null) return <></>;
@@ -40,14 +40,17 @@ export function LLMQueryDropdownButton({
4040
function getItems(): MenuProps["items"] {
4141
const ret: MenuProps["items"] = [];
4242
let first = true;
43-
for (const [service, entry] of Object.entries(models)) {
43+
for (const [service, entry] of Object.entries(modelsByService)) {
4444
const { models } = entry;
4545
if (models.length === 0) continue;
4646

4747
if (!first) ret.push({ type: "divider" });
4848
first = false;
4949

50-
const { name, short } = LLM_PROVIDER[service];
50+
const { name, short } =
51+
service === "custom"
52+
? { name: entry.name, short: entry.desc }
53+
: LLM_PROVIDER[service];
5154
ret.push({
5255
type: "group",
5356
label: (
@@ -57,6 +60,7 @@ export function LLMQueryDropdownButton({
5760
</>
5861
),
5962
});
63+
6064
for (const model of models) {
6165
const { name, title, desc, price } = model;
6266
ret.push({

src/packages/frontend/frame-editors/llm/llm-selector.tsx

Lines changed: 44 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,9 @@ import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-
88
import {
99
ANTHROPIC_MODELS,
1010
GOOGLE_MODELS,
11+
LANGUAGE_MODEL_SERVICES,
1112
LLMServiceName,
13+
LLMServicesAvailable,
1214
LLM_DESCR,
1315
LLM_PROVIDER,
1416
LLM_USERNAMES,
@@ -27,8 +29,9 @@ import {
2729
toCustomOpenAIModel,
2830
toOllamaModel,
2931
} from "@cocalc/util/db-schema/llm-utils";
30-
import type { CustomLLMPublic } from "@cocalc/util/types/llm";
3132
import { round2up } from "@cocalc/util/misc";
33+
import type { CustomLLMPublic } from "@cocalc/util/types/llm";
34+
import { getCustomLLMGroup } from "./components";
3235

3336
type SizeType = ConfigProviderProps["componentSize"];
3437

@@ -54,36 +57,16 @@ export default function LLMSelector({
5457
// ATTN: you cannot use useProjectContext because this component is used outside a project context
5558
// when it is opened via an error in the gutter of a latex document. (I don't know why, maybe fixable)
5659
const projectsStore = redux.getStore("projects");
57-
const showOpenAI = projectsStore.hasLanguageModelEnabled(
58-
project_id,
59-
undefined,
60-
"openai",
61-
);
62-
const showGoogle = projectsStore.hasLanguageModelEnabled(
63-
project_id,
64-
undefined,
65-
"google",
66-
);
67-
const showMistral = projectsStore.hasLanguageModelEnabled(
68-
project_id,
69-
undefined,
70-
"mistralai",
71-
);
72-
const showAnthropic = projectsStore.hasLanguageModelEnabled(
73-
project_id,
74-
undefined,
75-
"anthropic",
76-
);
77-
const showOllama = projectsStore.hasLanguageModelEnabled(
78-
project_id,
79-
undefined,
80-
"ollama",
81-
);
82-
const showCustomOpenAI = projectsStore.hasLanguageModelEnabled(
83-
project_id,
84-
undefined,
85-
"custom_openai",
86-
);
60+
61+
const show = LANGUAGE_MODEL_SERVICES.reduce((cur, svc) => {
62+
cur[svc] = projectsStore.hasLanguageModelEnabled(
63+
project_id,
64+
undefined,
65+
svc,
66+
);
67+
return cur;
68+
}, {}) as LLMServicesAvailable;
69+
8770
const ollama = useTypedRedux("customize", "ollama");
8871
const custom_openai = useTypedRedux("customize", "custom_openai");
8972
const selectableLLMs = useTypedRedux("customize", "selectable_llms");
@@ -117,24 +100,29 @@ export default function LLMSelector({
117100

118101
function makeLLMGroup(
119102
ret: NonNullable<SelectProps["options"]>,
120-
service: LLMServiceName,
103+
service: LLMServiceName | "custom",
121104
options,
122105
) {
123106
// there could be "undefined" in the list of options
124107
options = options?.filter((o) => !!o) as SelectProps["options"];
125108
if (options?.length === 0) return;
126-
const info = LLM_PROVIDER[service];
127-
const label = (
128-
<>
129-
<Text strong>{info.name}</Text>{info.short}
130-
</>
131-
);
132-
const title = info.desc;
133-
ret.push({ label, title, options });
109+
110+
if (service === "custom") {
111+
const { title, label } = getCustomLLMGroup();
112+
ret.push({ label, title, options });
113+
} else {
114+
const { name, desc, short } = LLM_PROVIDER[service];
115+
const label = (
116+
<>
117+
<Text strong>{name}</Text>{short}
118+
</>
119+
);
120+
ret.push({ label, title: desc, options });
121+
}
134122
}
135123

136124
function appendOpenAI(ret: NonNullable<SelectProps["options"]>): void {
137-
if (!showOpenAI) return;
125+
if (!show.openai) return;
138126
makeLLMGroup(
139127
ret,
140128
"openai",
@@ -143,7 +131,7 @@ export default function LLMSelector({
143131
}
144132

145133
function appendGoogle(ret: NonNullable<SelectProps["options"]>): void {
146-
if (!showGoogle) return;
134+
if (!show.google) return;
147135
makeLLMGroup(
148136
ret,
149137
"google",
@@ -152,7 +140,7 @@ export default function LLMSelector({
152140
}
153141

154142
function appendMistral(ret: NonNullable<SelectProps["options"]>): void {
155-
if (!showMistral) return;
143+
if (!show.mistralai) return;
156144
makeLLMGroup(
157145
ret,
158146
"mistralai",
@@ -161,18 +149,17 @@ export default function LLMSelector({
161149
}
162150

163151
function appendAnthropic(ret: NonNullable<SelectProps["options"]>): void {
164-
if (!showAnthropic) return;
152+
if (!show.anthropic) return;
165153
makeLLMGroup(
166154
ret,
167155
"anthropic",
168156
ANTHROPIC_MODELS.map((m) => makeLLMOption(m, LLM_DESCR[m])),
169157
);
170158
}
171159

172-
function appendOllama(ret: NonNullable<SelectProps["options"]>): void {
173-
if (!showOllama || !ollama) return;
160+
function appendOllama(options: NonNullable<SelectProps["options"]>): void {
161+
if (!show.ollama || !ollama) return;
174162

175-
const options: NonNullable<SelectProps["options"]> = [];
176163
for (const [key, config] of Object.entries<CustomLLMPublic>(
177164
ollama.toJS(),
178165
)) {
@@ -200,13 +187,13 @@ export default function LLMSelector({
200187
),
201188
});
202189
}
203-
makeLLMGroup(ret, "ollama", options);
204190
}
205191

206-
function appendCustomOpenAI(ret: NonNullable<SelectProps["options"]>): void {
207-
if (!showCustomOpenAI || !custom_openai) return;
192+
function appendCustomOpenAI(
193+
options: NonNullable<SelectProps["options"]>,
194+
): void {
195+
if (!show.custom_openai || !custom_openai) return;
208196

209-
const options: NonNullable<SelectProps["options"]> = [];
210197
for (const [key, config] of Object.entries<CustomLLMPublic>(
211198
custom_openai.toJS(),
212199
)) {
@@ -234,7 +221,6 @@ export default function LLMSelector({
234221
),
235222
});
236223
}
237-
makeLLMGroup(ret, "custom_openai", options);
238224
}
239225

240226
function getOptions(): SelectProps["options"] {
@@ -243,8 +229,12 @@ export default function LLMSelector({
243229
appendGoogle(ret);
244230
appendMistral(ret);
245231
appendAnthropic(ret);
246-
appendOllama(ret);
247-
appendCustomOpenAI(ret);
232+
const custom: NonNullable<SelectProps["options"]> = [];
233+
appendOllama(custom);
234+
appendCustomOpenAI(custom);
235+
if (custom.length > 0) {
236+
makeLLMGroup(ret, "custom", custom);
237+
}
248238
return ret;
249239
}
250240

src/packages/frontend/frame-editors/llm/use-llm-menu-options.tsx

Lines changed: 52 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,20 @@ import { redux, useTypedRedux } from "@cocalc/frontend/app-framework";
44
import {
55
ANTHROPIC_MODELS,
66
GOOGLE_MODELS,
7+
LANGUAGE_MODEL_SERVICES,
78
LLMServiceName,
9+
LLMServicesAvailable,
810
LLM_DESCR,
911
LLM_PROVIDER,
1012
LanguageModel,
1113
MISTRAL_MODELS,
1214
MODELS_OPENAI,
15+
toCustomOpenAIModel,
1316
toOllamaModel,
1417
} from "@cocalc/util/db-schema/llm-utils";
15-
import { LLMModelPrice, modelToName } from "./llm-selector";
1618
import { CustomLLMPublic } from "@cocalc/util/types/llm";
19+
import { getCustomLLMGroup } from "./components";
20+
import { LLMModelPrice, modelToName } from "./llm-selector";
1721

1822
interface Model {
1923
name: LanguageModel;
@@ -27,36 +31,24 @@ export function useAvailableLLMs(project_id: string) {
2731
// ATTN: you cannot use useProjectContext because this component is used outside a project context
2832
// when it is opened via an error in the gutter of a latex document. (I don't know why, maybe fixable)
2933
const projectsStore = redux.getStore("projects");
30-
const haveOpenAI = projectsStore.hasLanguageModelEnabled(
31-
project_id,
32-
undefined,
33-
"openai",
34-
);
35-
const haveGoogle = projectsStore.hasLanguageModelEnabled(
36-
project_id,
37-
undefined,
38-
"google",
39-
);
40-
const haveMistral = projectsStore.hasLanguageModelEnabled(
41-
project_id,
42-
undefined,
43-
"mistralai",
44-
);
45-
const haveAnthropic = projectsStore.hasLanguageModelEnabled(
46-
project_id,
47-
undefined,
48-
"anthropic",
49-
);
50-
const haveOllama = projectsStore.hasLanguageModelEnabled(
51-
project_id,
52-
undefined,
53-
"ollama",
54-
);
34+
const have = LANGUAGE_MODEL_SERVICES.reduce((cur, svc) => {
35+
cur[svc] = projectsStore.hasLanguageModelEnabled(
36+
project_id,
37+
undefined,
38+
svc,
39+
);
40+
return cur;
41+
}, {}) as LLMServicesAvailable;
5542
const ollama = useTypedRedux("customize", "ollama");
43+
const custom_openai = useTypedRedux("customize", "custom_openai");
5644
const selectableLLMs = useTypedRedux("customize", "selectable_llms");
5745

5846
const providers: {
59-
[key in LLMServiceName]?: { name: string; desc: string; models: Model[] };
47+
[key in LLMServiceName | "custom"]?: {
48+
name: string | JSX.Element;
49+
desc: string;
50+
models: Model[];
51+
};
6052
} = {};
6153

6254
function add(service: LLMServiceName, models) {
@@ -77,27 +69,48 @@ export function useAvailableLLMs(project_id: string) {
7769
};
7870
}
7971

80-
if (haveOpenAI) add("openai", MODELS_OPENAI);
81-
if (haveGoogle) add("google", GOOGLE_MODELS);
82-
if (haveMistral) add("mistralai", MISTRAL_MODELS);
83-
if (haveAnthropic) add("anthropic", ANTHROPIC_MODELS);
84-
if (haveOllama && ollama) {
85-
const models: Model[] = [];
86-
for (const [key, config] of Object.entries<CustomLLMPublic>(ollama.toJS())) {
72+
if (have.openai) add("openai", MODELS_OPENAI);
73+
if (have.google) add("google", GOOGLE_MODELS);
74+
if (have.mistralai) add("mistralai", MISTRAL_MODELS);
75+
if (have.anthropic) add("anthropic", ANTHROPIC_MODELS);
76+
77+
const custom: Model[] = [];
78+
if (have.ollama && ollama) {
79+
for (const [key, config] of Object.entries<CustomLLMPublic>(
80+
ollama.toJS(),
81+
)) {
8782
const { display, desc = "" } = config;
8883
const ollamaModel = toOllamaModel(key);
89-
models.push({
84+
custom.push({
9085
name: ollamaModel,
9186
title: display,
9287
desc,
9388
price: <LLMModelPrice model={ollamaModel} />,
9489
});
9590
}
91+
}
92+
93+
if (have.custom_openai && custom_openai) {
94+
for (const [key, config] of Object.entries<CustomLLMPublic>(
95+
custom_openai.toJS(),
96+
)) {
97+
const { display, desc = "" } = config;
98+
const customOpenAIModel = toCustomOpenAIModel(key);
99+
custom.push({
100+
name: customOpenAIModel,
101+
title: display,
102+
desc,
103+
price: <LLMModelPrice model={customOpenAIModel} />,
104+
});
105+
}
106+
}
96107

97-
providers["ollama"] = {
98-
models,
99-
name: "ollama",
100-
desc: "Ollama",
108+
if (custom.length > 0) {
109+
const { title, label } = getCustomLLMGroup();
110+
providers["custom"] = {
111+
models: custom,
112+
name: label,
113+
desc: title,
101114
};
102115
}
103116

0 commit comments

Comments
 (0)