Skip to content

Commit 17c41ef

Browse files
authored
Merge pull request #7600 from sagemathinc/llm-upd-20240531
llm/npm: updating packages
2 parents fb55c84 + 077d5bf commit 17c41ef

File tree

12 files changed

+487
-479
lines changed

12 files changed

+487
-479
lines changed

src/packages/frontend/admin/llm/index.tsx

Lines changed: 72 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,12 @@ import {
99
import { Paragraph, Title } from "@cocalc/frontend/components";
1010
import { LLMModelName } from "@cocalc/frontend/components/llm-name";
1111
import {
12-
LanguageModelCore,
1312
LLMServiceName,
1413
LLM_PROVIDER,
14+
LanguageModelCore,
1515
USER_SELECTABLE_LLMS_BY_VENDOR,
1616
isCoreLanguageModel,
17+
toCustomOpenAIModel,
1718
toOllamaModel,
1819
} from "@cocalc/util/db-schema/llm-utils";
1920
import { getRandomColor, trunc_middle } from "@cocalc/util/misc";
@@ -26,6 +27,7 @@ export function TestLLMAdmin() {
2627
const globallyEnabledLLMs = customize.getEnabledLLMs();
2728
const selectableLLMs = useTypedRedux("customize", "selectable_llms");
2829
const ollama = useTypedRedux("customize", "ollama");
30+
const custom_openai = useTypedRedux("customize", "custom_openai");
2931
const [test, setTest] = useState<number | null>(0);
3032
// TODO: this is used to trigger sending queries – makes no sense that all of them disable it. fix this.
3133
const [querying, setQuerying] = useState<boolean>();
@@ -66,6 +68,70 @@ export function TestLLMAdmin() {
6668
);
6769
}
6870

71+
function renderCustomOpenAI() {
72+
return (
73+
<Col key={"custom_openai"} md={12} xs={24}>
74+
<Title level={5}>Custom OpenAI</Title>
75+
{Object.entries(custom_openai?.toJS() ?? {}).map(([key, _val]) => {
76+
const model = toCustomOpenAIModel(key);
77+
78+
return (
79+
<Row
80+
gutter={[10, 20]}
81+
style={llmStyle(model)}
82+
key={`custom_openai-${key}`}
83+
>
84+
<Col md={24}>
85+
<Space>
86+
<Value val={true} /> <LLMModelName model={model} />
87+
</Space>
88+
</Col>
89+
<Col md={24}>
90+
<TestLLM
91+
test={test}
92+
model={model}
93+
queryState={[querying, setQuerying]}
94+
/>
95+
</Col>
96+
</Row>
97+
);
98+
})}
99+
</Col>
100+
);
101+
}
102+
103+
function renderOllama() {
104+
return (
105+
<Col key={"ollama"} md={12} xs={24}>
106+
<Title level={5}>Ollama</Title>
107+
{Object.entries(ollama?.toJS() ?? {}).map(([key, _val]) => {
108+
const model = toOllamaModel(key);
109+
110+
return (
111+
<Row
112+
gutter={[10, 20]}
113+
style={llmStyle(model)}
114+
key={`ollama-${key}`}
115+
>
116+
<Col md={24}>
117+
<Space>
118+
<Value val={true} /> <LLMModelName model={model} />
119+
</Space>
120+
</Col>
121+
<Col md={24}>
122+
<TestLLM
123+
test={test}
124+
model={model}
125+
queryState={[querying, setQuerying]}
126+
/>
127+
</Col>
128+
</Row>
129+
);
130+
})}
131+
</Col>
132+
);
133+
}
134+
69135
return (
70136
<div>
71137
<Paragraph>
@@ -108,7 +174,7 @@ export function TestLLMAdmin() {
108174
<Row gutter={[10, 10]}>
109175
{Object.entries(USER_SELECTABLE_LLMS_BY_VENDOR).map(
110176
([vendor, llms]) =>
111-
vendor !== "ollama" ? (
177+
vendor !== "ollama" && vendor !== "custom_openai" ? (
112178
<Col key={vendor} md={12} xs={24}>
113179
<Title level={5}>{LLM_PROVIDER[vendor].name}</Title>
114180
{llms
@@ -117,38 +183,15 @@ export function TestLLMAdmin() {
117183
</Col>
118184
) : undefined,
119185
)}
120-
<Col key={"ollama"} md={12} xs={24}>
121-
<Title level={5}>Ollama</Title>
122-
{Object.entries(ollama?.toJS() ?? {}).map(([key, val]) => {
123-
const model = toOllamaModel(val.model);
124-
125-
return (
126-
<Row
127-
gutter={[10, 20]}
128-
style={llmStyle(model)}
129-
key={`ollama-${key}`}
130-
>
131-
<Col md={24}>
132-
<Space>
133-
<Value val={true} /> <LLMModelName model={model} />
134-
</Space>
135-
</Col>
136-
<Col md={24}>
137-
<TestLLM
138-
test={test}
139-
model={model}
140-
queryState={[querying, setQuerying]}
141-
/>
142-
</Col>
143-
</Row>
144-
);
145-
})}
146-
</Col>
186+
{renderOllama()}
187+
{renderCustomOpenAI()}
147188
</Row>
148189
</Paragraph>
149190

150191
<Title level={5}>Ollama configuration</Title>
151192
<Value val={ollama} />
193+
<Title level={5}>Custom OpenAI API</Title>
194+
<Value val={custom_openai} />
152195
</div>
153196
);
154197
}

src/packages/frontend/components/llm-name.tsx

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,6 @@ export function LLMModelName(
1919
const custom_openai = useTypedRedux("customize", "custom_openai");
2020

2121
function renderTitle() {
22-
if (isLanguageModel(model)) {
23-
return modelToName(model);
24-
}
25-
2622
if (isOllamaLLM(model)) {
2723
const om = ollama?.get(fromOllamaModel(model));
2824
if (om) {
@@ -37,6 +33,10 @@ export function LLMModelName(
3733
}
3834
}
3935

36+
if (isLanguageModel(model)) {
37+
return modelToName(model);
38+
}
39+
4040
return model;
4141
}
4242

src/packages/package.json

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,8 @@
2020
"undici@<5.28.3": "^5.28.4",
2121
"postcss@<8.4.31": "^8.4.31",
2222
"retry-request@<7.0.1": "^7.0.2",
23-
"@langchain/core": "^0.1.63",
24-
"katex@<0.16.9": "^0.16.10",
25-
"@mistralai/mistralai": "^0.2.0"
23+
"@langchain/core": "^0.2.5",
24+
"katex@<0.16.9": "^0.16.10"
2625
}
2726
}
2827
}

0 commit comments

Comments
 (0)