Skip to content

Commit 93160b8

Browse files
committed
fix: cannot stop when using azure openai api
1 parent 47b1df3 commit 93160b8

File tree

4 files changed

+57
-109
lines changed

4 files changed

+57
-109
lines changed

package-lock.json

Lines changed: 7 additions & 41 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@
2222
"@nem035/gpt-3-encoder": "^1.1.7",
2323
"@picocss/pico": "^1.5.7",
2424
"@primer/octicons-react": "^18.2.0",
25-
"@vespaiach/axios-fetch-adapter": "^0.3.1",
26-
"azure-openai": "^0.9.4",
2725
"countries-list": "^2.6.1",
2826
"eventsource-parser": "^0.1.0",
2927
"expiry-map": "^2.0.0",
Lines changed: 46 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
import { Configuration, OpenAIApi } from 'azure-openai'
21
import { getUserConfig, maxResponseTokenLength } from '../../config/index.mjs'
32
import { getChatSystemPromptBase, pushRecord, setAbortController } from './shared.mjs'
43
import { getConversationPairs } from '../../utils/get-conversation-pairs'
5-
import fetchAdapter from '@vespaiach/axios-fetch-adapter'
4+
import { fetchSSE } from '../../utils/fetch-sse'
5+
import { isEmpty } from 'lodash-es'
66

77
/**
88
* @param {Runtime.Port} port
@@ -17,72 +17,54 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
1717
prompt.unshift({ role: 'system', content: await getChatSystemPromptBase() })
1818
prompt.push({ role: 'user', content: question })
1919

20-
const openAiApi = new OpenAIApi(
21-
new Configuration({
22-
apiKey: config.azureApiKey,
23-
azure: {
24-
apiKey: config.azureApiKey,
25-
endpoint: config.azureEndpoint,
26-
deploymentName: config.azureDeploymentName,
20+
let answer = ''
21+
await fetchSSE(
22+
`${config.azureEndpoint.replace(/\/$/, '')}/openai/deployments/${
23+
config.azureDeploymentName
24+
}/chat/completions?api-version=2023-03-15-preview`,
25+
{
26+
method: 'POST',
27+
signal: controller.signal,
28+
headers: {
29+
'Content-Type': 'application/json',
30+
'api-key': config.azureApiKey,
2731
},
28-
}),
29-
)
30-
31-
const response = await openAiApi
32-
.createChatCompletion(
33-
{
32+
body: JSON.stringify({
3433
messages: prompt,
3534
stream: true,
3635
max_tokens: maxResponseTokenLength,
36+
}),
37+
onMessage(message) {
38+
console.debug('sse message', message)
39+
let data
40+
try {
41+
data = JSON.parse(message)
42+
} catch (error) {
43+
console.debug('json error', error)
44+
return
45+
}
46+
if ('content' in data.choices[0].delta) {
47+
answer += data.choices[0].delta.content
48+
port.postMessage({ answer: answer, done: false, session: null })
49+
}
50+
if (data.choices[0].finish_reason === 'stop') {
51+
pushRecord(session, question, answer)
52+
console.debug('conversation history', { content: session.conversationRecords })
53+
port.postMessage({ answer: null, done: true, session: session })
54+
}
3755
},
38-
{
39-
signal: controller.signal,
40-
responseType: 'stream',
41-
adapter: fetchAdapter,
56+
async onStart() {},
57+
async onEnd() {
58+
port.onMessage.removeListener(messageListener)
4259
},
43-
)
44-
.catch((err) => {
45-
port.onMessage.removeListener(messageListener)
46-
throw err
47-
})
48-
49-
let chunkData = ''
50-
const step = 1500
51-
let length = 0
52-
for await (const chunk of response.data) {
53-
chunkData += chunk
54-
length += 1
55-
if (length % step !== 0 && !chunkData.endsWith('[DONE]')) continue
56-
57-
const lines = chunkData
58-
.toString('utf8')
59-
.split('\n')
60-
.filter((line) => line.trim().startsWith('data: '))
61-
62-
let answer = ''
63-
let message = ''
64-
let data
65-
for (const line of lines) {
66-
message = line.replace(/^data: /, '')
67-
try {
68-
data = JSON.parse(message)
69-
} catch (error) {
70-
continue
71-
}
72-
if ('content' in data.choices[0].delta) answer += data.choices[0].delta.content
73-
}
74-
if (data) {
75-
console.debug('sse message', data)
76-
port.postMessage({ answer: answer, done: false, session: null })
77-
}
78-
if (message === '[DONE]') {
79-
console.debug('sse message', '[DONE]')
80-
pushRecord(session, question, answer)
81-
console.debug('conversation history', { content: session.conversationRecords })
82-
port.postMessage({ answer: null, done: true, session: session })
83-
break
84-
}
85-
}
86-
87-
port.onMessage.removeListener(messageListener)
60+
async onError(resp) {
61+
port.onMessage.removeListener(messageListener)
62+
if (resp instanceof Error) throw resp
63+
const error = await resp.json().catch(() => ({}))
64+
throw new Error(
65+
!isEmpty(error) ? JSON.stringify(error) : `${resp.status} ${resp.statusText}`,
66+
)
67+
},
68+
},
69+
)
8870
}

src/background/apis/openai-api.mjs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,10 @@ export async function generateAnswersWithChatgptApi(port, question, session, api
125125
console.debug('json error', error)
126126
return
127127
}
128-
if ('content' in data.choices[0].delta) answer += data.choices[0].delta.content
129-
port.postMessage({ answer: answer, done: false, session: null })
128+
if ('content' in data.choices[0].delta) {
129+
answer += data.choices[0].delta.content
130+
port.postMessage({ answer: answer, done: false, session: null })
131+
}
130132
},
131133
async onStart() {},
132134
async onEnd() {

0 commit comments

Comments
 (0)