Skip to content

Commit 7f3d235

Browse files
committed
feat: use axios
1 parent 7c4a1c9 commit 7f3d235

File tree

2 files changed

+47
-3
lines changed

2 files changed

+47
-3
lines changed

packages/plugins/robot/src/client/OpenAICompatibleProvider.ts

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@ import type {
77
import { BaseModelProvider, handleSSEStream } from '@opentiny/tiny-robot-kit'
88
import { toRaw } from 'vue'
99
import { formatMessages } from '../utils/common-utils'
10+
import { getMetaApi, META_SERVICE } from '@opentiny/tiny-engine-meta-register'
11+
import { processSSEStream } from '../js/utils'
1012

1113
type ProviderConfig = Omit<AIModelConfig, 'provider' | 'providerImplementation'>
1214

@@ -80,6 +82,47 @@ export class OpenAICompatibleProvider extends BaseModelProvider {
8082
async chatStream(request: ChatCompletionRequest, handler: StreamHandler): Promise<void> {
8183
const { signal, ...options } = request.options || {}
8284

85+
try {
86+
// 验证请求参数
87+
const messages = formatMessages(toRaw(request.messages))
88+
89+
const requestData = await this.beforeRequest({
90+
model: request.options?.model || this.config.defaultModel || this.defaultModel,
91+
messages,
92+
...options,
93+
stream: true
94+
})
95+
96+
let lastResponseLength = 0
97+
const requestOptions = {
98+
method: 'POST',
99+
url: this.baseURL,
100+
data: requestData,
101+
headers: {
102+
'Content-Type': 'application/json',
103+
Accept: 'text/event-stream'
104+
},
105+
onDownloadProgress: (progressEvent: { currentTarget: { responseText: any } }) => {
106+
const currentResponse = progressEvent.currentTarget.responseText
107+
const newData = currentResponse.substring(lastResponseLength)
108+
lastResponseLength = currentResponse.length
109+
processSSEStream(newData, handler)
110+
}
111+
// signal
112+
}
113+
if (this.apiKey) {
114+
Object.assign(requestOptions.headers, { Authorization: `Bearer ${this.apiKey}` })
115+
}
116+
await getMetaApi(META_SERVICE.Http).stream(requestOptions)
117+
} catch (error: unknown) {
118+
if (signal?.aborted) return
119+
handler.onError(error)
120+
}
121+
}
122+
123+
async chatStreamWithFetch(request: ChatCompletionRequest, handler: StreamHandler): Promise<void> {
124+
const { signal, ...options } = request.options || {}
125+
83126
try {
84127
// 验证请求参数
85128
const messages = formatMessages(toRaw(request.messages))

packages/plugins/robot/src/client/index.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,12 @@ let provider: OpenAICompatibleProvider | null = null
1414
const { robotSettingState } = useRobot()
1515

1616
const beforeRequest = async (requestParams: any) => {
17-
const tools = (await useMcp().getLLMTools()) || []
18-
if (!requestParams.tools && tools.length) {
17+
const isAgentMode = config.apiUrl === '/app-center/api/ai/chat'
18+
const tools = (await useMcp().listTools()) || []
19+
if (!requestParams.tools && tools.length && !isAgentMode) {
1920
Object.assign(requestParams, { tools })
2021
}
21-
if (config.apiUrl === '/app-center/api/ai/chat') {
22+
if (isAgentMode) {
2223
requestParams.apiKey = robotSettingState.selectedModel.apiKey
2324
}
2425
requestParams.baseUrl = robotSettingState.selectedModel.baseUrl

0 commit comments

Comments
 (0)