|
| 1 | +import { ILLMConfig } from "../services/chat-history-manager"; |
| 2 | + |
1 | 3 | export const USER_MESSAGE = " ☕️ Hold on while CodeBuddy "; |
2 | 4 | export enum OLA_ACTIONS { |
3 | 5 | comment = "CodeBuddy.commentCode", |
@@ -28,7 +30,7 @@ export enum COMMON { |
28 | 30 | } |
29 | 31 | export const GROQ_CONFIG = { |
30 | 32 | temperature: 0.1, |
31 | | - max_tokens: 8192, |
| 33 | + max_tokens: 6000, |
32 | 34 | top_p: 1, |
33 | 35 | stream: false, |
34 | 36 | stop: ["thanks"], |
@@ -361,3 +363,48 @@ const FILE_TYPE_PROMPTS: Record<string, string> = { |
361 | 363 | json: "Parse this JSON file and provide a structured analysis:", |
362 | 364 | default: "Extract all relevant information from this file:", |
363 | 365 | }; |
| 366 | + |
| 367 | +export const LLM_CONFIGS: Record<string, ILLMConfig> = { |
| 368 | + gemini: { |
| 369 | + botRole: "model", |
| 370 | + userRole: "user", |
| 371 | + formatMessage: (role, content) => ({ role, parts: [{ text: content }] }), |
| 372 | + }, |
| 373 | + groq: { |
| 374 | + botRole: "system", |
| 375 | + userRole: "user", |
| 376 | + formatMessage: (role, content) => ({ role, content }), |
| 377 | + }, |
| 378 | + anthropic: { |
| 379 | + botRole: "assistant", |
| 380 | + userRole: "user", |
| 381 | + formatMessage: (role, content) => ({ role, content }), |
| 382 | + }, |
| 383 | +}; |
| 384 | + |
| 385 | +//Note, this kind of configuration can be used by orchestrator on where to direct the requests |
| 386 | +// export const LLM_CONFIGS: Record<string, LLMConfig> = { |
| 387 | +// gemini: { |
| 388 | +// botRole: "model", |
| 389 | +// userRole: "user", |
| 390 | +// chatFormat: "markdown", |
| 391 | +// multiModalModel: "gemini-pro-vision", |
| 392 | +// codeInterpreter: true, |
| 393 | +// retriever: true, |
| 394 | +// fileUpload: true, |
| 395 | +// webSearch: true, |
| 396 | +// functionCalling: true, |
| 397 | +// codeInterpreterModel: "gemini-pro-vision", |
| 398 | +// retrieverModel: "gemini-pro", |
| 399 | +// webSearchModel: "gemini-pro", |
| 400 | +// functionCallingModel: "gemini-pro", |
| 401 | +// fileUploadModel: "gemini-pro-vision", |
| 402 | +// fileUploadPrompt: (fileType: string) => { |
| 403 | +// const prompt = FILE_TYPE_PROMPTS[fileType] || FILE_TYPE_PROMPTS.default; |
| 404 | +// return `${prompt} \n\n ${fileType} file content:`; |
| 405 | +// }, |
| 406 | +// fileUploadHandler: (fileType: string) => { |
| 407 | +// return fileType === "pdf" ? "Upload PDF file" : "Upload file"; |
| 408 | +// }, |
| 409 | +// }, |
| 410 | +// }; |
0 commit comments