Skip to content

Commit 9c371f7

Browse files
committed
improve chatglm support (#696, #464)
1 parent 12ef5e8 commit 9c371f7

File tree

5 files changed

+18
-116
lines changed

5 files changed

+18
-116
lines changed

build.mjs

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -250,14 +250,6 @@ async function runWebpack(isWithoutKatex, isWithoutTiktoken, minimal, callback)
250250
search: 'await generateAnswersWithChatGLMApi',
251251
replace: '//',
252252
},
253-
{
254-
search: 'chatglmTurbo',
255-
replace: '//',
256-
},
257-
{
258-
search: "'chatglmTurbo",
259-
replace: '//',
260-
},
261253
],
262254
},
263255
}

src/config/index.mjs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ export const claudeApiModelKeys = [
6363
'claude3SonnetApi',
6464
'claude3OpusApi',
6565
]
66-
export const chatglmApiModelKeys = ['chatglmTurbo']
66+
export const chatglmApiModelKeys = ['chatglmTurbo', 'chatglm4', 'chatglmEmohaa', 'chatglmCharGLM3']
6767
export const githubThirdPartyApiModelKeys = ['waylaidwandererApi']
6868
export const poeWebModelKeys = [
6969
'poeAiWebSage', //poe.com/Assistant
@@ -145,7 +145,10 @@ export const Models = {
145145

146146
bardWebFree: { value: '', desc: 'Gemini (Web)' },
147147

148-
chatglmTurbo: { value: 'chatglm_turbo', desc: 'ChatGLM (ChatGLM-Turbo)' },
148+
chatglmTurbo: { value: 'GLM-4-Air', desc: 'ChatGLM (GLM-4-Air)' },
149+
chatglm4: { value: 'GLM-4-0520', desc: 'ChatGLM (GLM-4-0520)' },
150+
chatglmEmohaa: { value: 'Emohaa', desc: 'ChatGLM (Emohaa)' },
151+
chatglmCharGLM3: { value: 'CharGLM-3', desc: 'ChatGLM (CharGLM-3)' },
149152

150153
chatgptFree35Mobile: { value: 'text-davinci-002-render-sha-mobile', desc: 'ChatGPT (Mobile)' },
151154
chatgptPlus4Mobile: { value: 'gpt-4-mobile', desc: 'ChatGPT (Mobile, GPT-4)' },

src/services/apis/chatglm-api.mjs

Lines changed: 10 additions & 103 deletions
Original file line numberDiff line numberDiff line change
@@ -1,39 +1,6 @@
1-
import { Models, getUserConfig } from '../../config/index.mjs'
2-
import { pushRecord, setAbortController } from './shared.mjs'
3-
import { isEmpty } from 'lodash-es'
1+
import { getUserConfig } from '../../config/index.mjs'
42
import { getToken } from '../../utils/jwt-token-generator.mjs'
5-
import { createParser } from '../../utils/eventsource-parser.mjs'
6-
7-
async function fetchSSE(resource, options) {
8-
const { onMessage, onStart, onEnd, onError, ...fetchOptions } = options
9-
const resp = await fetch(resource, fetchOptions).catch(async (err) => {
10-
await onError(err)
11-
})
12-
if (!resp) return
13-
if (!resp.ok) {
14-
await onError(resp)
15-
return
16-
}
17-
18-
const parser = createParser((event) => {
19-
if (event.type === 'event') {
20-
onMessage(event)
21-
}
22-
})
23-
24-
let hasStarted = false
25-
const reader = resp.body.getReader()
26-
let result
27-
while (!(result = await reader.read()).done) {
28-
const chunk = result.value
29-
if (!hasStarted) {
30-
hasStarted = true
31-
await onStart(new TextDecoder().decode(chunk))
32-
}
33-
parser.feed(chunk)
34-
}
35-
await onEnd()
36-
}
3+
import { generateAnswersWithChatgptApiCompat } from './openai-api.mjs'
374

385
/**
396
* @param {Runtime.Port} port
@@ -42,74 +9,14 @@ async function fetchSSE(resource, options) {
429
* @param {string} modelName
4310
*/
4411
export async function generateAnswersWithChatGLMApi(port, question, session, modelName) {
45-
const { controller, messageListener, disconnectListener } = setAbortController(port)
12+
const baseUrl = 'https://open.bigmodel.cn/api/paas/v4'
4613
const config = await getUserConfig()
47-
48-
const prompt = []
49-
for (const record of session.conversationRecords.slice(-config.maxConversationContextLength)) {
50-
prompt.push({ role: 'user', content: record.question })
51-
prompt.push({ role: 'assistant', content: record.answer })
52-
}
53-
prompt.push({ role: 'user', content: question })
54-
55-
let answer = ''
56-
await fetchSSE(
57-
`https://open.bigmodel.cn/api/paas/v3/model-api/${Models[modelName].value}/sse-invoke`,
58-
{
59-
method: 'POST',
60-
signal: controller.signal,
61-
headers: {
62-
'Content-Type': 'application/json; charset=UTF-8',
63-
Accept: 'text/event-stream',
64-
Authorization: getToken(config.chatglmApiKey),
65-
},
66-
body: JSON.stringify({
67-
prompt: prompt,
68-
// temperature: config.temperature,
69-
// top_t: 0.7,
70-
// request_id: string
71-
// incremental: true,
72-
// return_type: "json_string",
73-
// ref: {"enable": "true", "search_query": "history"},
74-
}),
75-
onMessage(event) {
76-
console.debug('sse event', event)
77-
78-
// Handle different types of events
79-
switch (event.event) {
80-
case 'add':
81-
// In the case of an "add" event, append the completion to the answer
82-
if (event.data) {
83-
answer += event.data
84-
port.postMessage({ answer: answer, done: false, session: null })
85-
}
86-
break
87-
case 'error':
88-
case 'interrupted':
89-
case 'finish':
90-
pushRecord(session, question, answer)
91-
console.debug('conversation history', { content: session.conversationRecords })
92-
port.postMessage({ answer: null, done: true, session: session })
93-
break
94-
default:
95-
break
96-
}
97-
},
98-
async onStart() {},
99-
async onEnd() {
100-
port.postMessage({ done: true })
101-
port.onMessage.removeListener(messageListener)
102-
port.onDisconnect.removeListener(disconnectListener)
103-
},
104-
async onError(resp) {
105-
port.onMessage.removeListener(messageListener)
106-
port.onDisconnect.removeListener(disconnectListener)
107-
if (resp instanceof Error) throw resp
108-
const error = await resp.json().catch(() => ({}))
109-
throw new Error(
110-
!isEmpty(error) ? JSON.stringify(error) : `${resp.status} ${resp.statusText}`,
111-
)
112-
},
113-
},
14+
return generateAnswersWithChatgptApiCompat(
15+
baseUrl,
16+
port,
17+
question,
18+
session,
19+
getToken(config.chatglmApiKey),
20+
modelName,
11421
)
11522
}

src/services/apis/moonshot-api.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,6 @@ export async function generateAnswersWithMoonshotCompletionApi(
1414
apiKey,
1515
modelName,
1616
) {
17-
const baseUrl = 'https://api.moonshot.cn'
17+
const baseUrl = 'https://api.moonshot.cn/v1'
1818
return generateAnswersWithChatgptApiCompat(baseUrl, port, question, session, apiKey, modelName)
1919
}

src/services/apis/openai-api.mjs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ export async function generateAnswersWithGptCompletionApi(
109109
export async function generateAnswersWithChatgptApi(port, question, session, apiKey, modelName) {
110110
const config = await getUserConfig()
111111
return generateAnswersWithChatgptApiCompat(
112-
config.customOpenAiApiUrl,
112+
config.customOpenAiApiUrl + '/v1',
113113
port,
114114
question,
115115
session,
@@ -144,7 +144,7 @@ export async function generateAnswersWithChatgptApiCompat(
144144
console.debug('conversation history', { content: session.conversationRecords })
145145
port.postMessage({ answer: null, done: true, session: session })
146146
}
147-
await fetchSSE(`${baseUrl}/v1/chat/completions`, {
147+
await fetchSSE(`${baseUrl}/chat/completions`, {
148148
method: 'POST',
149149
signal: controller.signal,
150150
headers: {

0 commit comments

Comments
 (0)