@@ -8,7 +8,9 @@ import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-
8
8
import {
9
9
ANTHROPIC_MODELS ,
10
10
GOOGLE_MODELS ,
11
+ LANGUAGE_MODEL_SERVICES ,
11
12
LLMServiceName ,
13
+ LLMServicesAvailable ,
12
14
LLM_DESCR ,
13
15
LLM_PROVIDER ,
14
16
LLM_USERNAMES ,
@@ -27,8 +29,9 @@ import {
27
29
toCustomOpenAIModel ,
28
30
toOllamaModel ,
29
31
} from "@cocalc/util/db-schema/llm-utils" ;
30
- import type { CustomLLMPublic } from "@cocalc/util/types/llm" ;
31
32
import { round2up } from "@cocalc/util/misc" ;
33
+ import type { CustomLLMPublic } from "@cocalc/util/types/llm" ;
34
+ import { getCustomLLMGroup } from "./components" ;
32
35
33
36
type SizeType = ConfigProviderProps [ "componentSize" ] ;
34
37
@@ -54,36 +57,16 @@ export default function LLMSelector({
54
57
// ATTN: you cannot use useProjectContext because this component is used outside a project context
55
58
// when it is opened via an error in the gutter of a latex document. (I don't know why, maybe fixable)
56
59
const projectsStore = redux . getStore ( "projects" ) ;
57
- const showOpenAI = projectsStore . hasLanguageModelEnabled (
58
- project_id ,
59
- undefined ,
60
- "openai" ,
61
- ) ;
62
- const showGoogle = projectsStore . hasLanguageModelEnabled (
63
- project_id ,
64
- undefined ,
65
- "google" ,
66
- ) ;
67
- const showMistral = projectsStore . hasLanguageModelEnabled (
68
- project_id ,
69
- undefined ,
70
- "mistralai" ,
71
- ) ;
72
- const showAnthropic = projectsStore . hasLanguageModelEnabled (
73
- project_id ,
74
- undefined ,
75
- "anthropic" ,
76
- ) ;
77
- const showOllama = projectsStore . hasLanguageModelEnabled (
78
- project_id ,
79
- undefined ,
80
- "ollama" ,
81
- ) ;
82
- const showCustomOpenAI = projectsStore . hasLanguageModelEnabled (
83
- project_id ,
84
- undefined ,
85
- "custom_openai" ,
86
- ) ;
60
+
61
+ const show = LANGUAGE_MODEL_SERVICES . reduce ( ( cur , svc ) => {
62
+ cur [ svc ] = projectsStore . hasLanguageModelEnabled (
63
+ project_id ,
64
+ undefined ,
65
+ svc ,
66
+ ) ;
67
+ return cur ;
68
+ } , { } ) as LLMServicesAvailable ;
69
+
87
70
const ollama = useTypedRedux ( "customize" , "ollama" ) ;
88
71
const custom_openai = useTypedRedux ( "customize" , "custom_openai" ) ;
89
72
const selectableLLMs = useTypedRedux ( "customize" , "selectable_llms" ) ;
@@ -117,24 +100,29 @@ export default function LLMSelector({
117
100
118
101
function makeLLMGroup (
119
102
ret : NonNullable < SelectProps [ "options" ] > ,
120
- service : LLMServiceName ,
103
+ service : LLMServiceName | "custom" ,
121
104
options ,
122
105
) {
123
106
// there could be "undefined" in the list of options
124
107
options = options ?. filter ( ( o ) => ! ! o ) as SelectProps [ "options" ] ;
125
108
if ( options ?. length === 0 ) return ;
126
- const info = LLM_PROVIDER [ service ] ;
127
- const label = (
128
- < >
129
- < Text strong > { info . name } </ Text > – { info . short }
130
- </ >
131
- ) ;
132
- const title = info . desc ;
133
- ret . push ( { label, title, options } ) ;
109
+
110
+ if ( service === "custom" ) {
111
+ const { title, label } = getCustomLLMGroup ( ) ;
112
+ ret . push ( { label, title, options } ) ;
113
+ } else {
114
+ const { name, desc, short } = LLM_PROVIDER [ service ] ;
115
+ const label = (
116
+ < >
117
+ < Text strong > { name } </ Text > – { short }
118
+ </ >
119
+ ) ;
120
+ ret . push ( { label, title : desc , options } ) ;
121
+ }
134
122
}
135
123
136
124
function appendOpenAI ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
137
- if ( ! showOpenAI ) return ;
125
+ if ( ! show . openai ) return ;
138
126
makeLLMGroup (
139
127
ret ,
140
128
"openai" ,
@@ -143,7 +131,7 @@ export default function LLMSelector({
143
131
}
144
132
145
133
function appendGoogle ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
146
- if ( ! showGoogle ) return ;
134
+ if ( ! show . google ) return ;
147
135
makeLLMGroup (
148
136
ret ,
149
137
"google" ,
@@ -152,7 +140,7 @@ export default function LLMSelector({
152
140
}
153
141
154
142
function appendMistral ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
155
- if ( ! showMistral ) return ;
143
+ if ( ! show . mistralai ) return ;
156
144
makeLLMGroup (
157
145
ret ,
158
146
"mistralai" ,
@@ -161,18 +149,17 @@ export default function LLMSelector({
161
149
}
162
150
163
151
function appendAnthropic ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
164
- if ( ! showAnthropic ) return ;
152
+ if ( ! show . anthropic ) return ;
165
153
makeLLMGroup (
166
154
ret ,
167
155
"anthropic" ,
168
156
ANTHROPIC_MODELS . map ( ( m ) => makeLLMOption ( m , LLM_DESCR [ m ] ) ) ,
169
157
) ;
170
158
}
171
159
172
- function appendOllama ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
173
- if ( ! showOllama || ! ollama ) return ;
160
+ function appendOllama ( options : NonNullable < SelectProps [ "options" ] > ) : void {
161
+ if ( ! show . ollama || ! ollama ) return ;
174
162
175
- const options : NonNullable < SelectProps [ "options" ] > = [ ] ;
176
163
for ( const [ key , config ] of Object . entries < CustomLLMPublic > (
177
164
ollama . toJS ( ) ,
178
165
) ) {
@@ -200,13 +187,13 @@ export default function LLMSelector({
200
187
) ,
201
188
} ) ;
202
189
}
203
- makeLLMGroup ( ret , "ollama" , options ) ;
204
190
}
205
191
206
- function appendCustomOpenAI ( ret : NonNullable < SelectProps [ "options" ] > ) : void {
207
- if ( ! showCustomOpenAI || ! custom_openai ) return ;
192
+ function appendCustomOpenAI (
193
+ options : NonNullable < SelectProps [ "options" ] > ,
194
+ ) : void {
195
+ if ( ! show . custom_openai || ! custom_openai ) return ;
208
196
209
- const options : NonNullable < SelectProps [ "options" ] > = [ ] ;
210
197
for ( const [ key , config ] of Object . entries < CustomLLMPublic > (
211
198
custom_openai . toJS ( ) ,
212
199
) ) {
@@ -234,7 +221,6 @@ export default function LLMSelector({
234
221
) ,
235
222
} ) ;
236
223
}
237
- makeLLMGroup ( ret , "custom_openai" , options ) ;
238
224
}
239
225
240
226
function getOptions ( ) : SelectProps [ "options" ] {
@@ -243,8 +229,12 @@ export default function LLMSelector({
243
229
appendGoogle ( ret ) ;
244
230
appendMistral ( ret ) ;
245
231
appendAnthropic ( ret ) ;
246
- appendOllama ( ret ) ;
247
- appendCustomOpenAI ( ret ) ;
232
+ const custom : NonNullable < SelectProps [ "options" ] > = [ ] ;
233
+ appendOllama ( custom ) ;
234
+ appendCustomOpenAI ( custom ) ;
235
+ if ( custom . length > 0 ) {
236
+ makeLLMGroup ( ret , "custom" , custom ) ;
237
+ }
248
238
return ret ;
249
239
}
250
240
0 commit comments