Skip to content

Commit 2017cef

Browse files
allow non-OpenAI models in ai-bot (#9315)
Signed-off-by: Mohamed Dardouri <[email protected]> Co-authored-by: Kristina <[email protected]>
1 parent 714d9dc commit 2017cef

File tree

1 file changed

+13
-2
lines changed

1 file changed

+13
-2
lines changed

services/ai-bot/pod-ai-bot/src/controller.ts

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ import { WorkspaceInfoRecord } from '@hcengineering/server-ai-bot'
4242
import { getAccountClient } from '@hcengineering/server-client'
4343
import { generateToken } from '@hcengineering/server-token'
4444
import { htmlToMarkup, jsonToHTML, jsonToMarkup, markupToJSON } from '@hcengineering/text'
45-
import { encodingForModel } from 'js-tiktoken'
45+
import { encodingForModel, getEncoding } from 'js-tiktoken'
4646
import OpenAI from 'openai'
4747

4848
import chunter from '@hcengineering/chunter'
@@ -66,7 +66,18 @@ export class AIControl {
6666
readonly storageAdapter: StorageAdapter
6767

6868
private readonly openai?: OpenAI
69-
private readonly openaiEncoding = encodingForModel(config.OpenAIModel)
69+
70+
// Try to obtain the encoding for the configured model. If the model is not recognised by js-tiktoken
71+
// (e.g. non-OpenAI models such as Together AI Llama derivatives) we gracefully fall back to the
72+
// universal `cl100k_base` encoding. This prevents a runtime "Unknown model" error while still
73+
// giving us a reasonable token count estimate for summaries.
74+
private readonly openaiEncoding = (() => {
75+
try {
76+
return encodingForModel(config.OpenAIModel)
77+
} catch (err) {
78+
return getEncoding('cl100k_base')
79+
}
80+
})()
7081

7182
constructor (
7283
readonly personUuid: AccountUuid,

0 commit comments

Comments
 (0)