Skip to content

Commit bd23cda

Browse files
authored
Merge pull request #7585 from sagemathinc/llm-fix-custom-defaults
db-schema/server-settings: fix fallback for custom LLM configs
2 parents 3a4e712 + bd3298d commit bd23cda

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/packages/util/db-schema/site-settings-extras.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,7 @@ export const EXTRAS: SettingsExtras = {
313313
ollama_configuration: {
314314
name: "Ollama Configuration",
315315
desc: 'Configure Ollama endpoints. e.g. Ollama has "gemma" installed and is available at localhost:11434: `{"gemma" : {"baseUrl": "http://localhost:11434/" , cocalc: {display: "Gemma", desc: "Google\'s Gemma Model"}}`',
316-
default: "",
316+
default: "{}",
317317
multiline: 5,
318318
show: ollama_enabled,
319319
to_val: from_json,
@@ -325,7 +325,7 @@ export const EXTRAS: SettingsExtras = {
325325
custom_openai_configuration: {
326326
name: "Custom OpenAI Endpoints",
327327
desc: 'Configure OpenAI endpoints, queried via [@langchain/openai (Node.js)](https://js.langchain.com/v0.1/docs/integrations/llms/openai/). e.g. `{"myllm" : {"baseUrl": "http://1.2.3.4:5678/" , apiKey: "key...", cocalc: {display: "My LLM", desc: "My custom LLM"}}, "gpt-4o-high": {baseUrl: "https://api.openai.com/v1", temperature: 1, "openAIApiKey": "sk-...", "model": "gpt-4o", cocalc: {display: "High GPT-4 Omni", desc: "GPT 4 Omni with a high temperature"}}}`',
328-
default: "",
328+
default: "{}",
329329
multiline: 5,
330330
show: custom_openai_enabled,
331331
to_val: from_json,

0 commit comments

Comments
 (0)