File tree Expand file tree Collapse file tree 1 file changed +24
-1
lines changed
packages/core/src/ai-model/service-caller Expand file tree Collapse file tree 1 file changed +24
-1
lines changed Original file line number Diff line number Diff line change @@ -131,6 +131,29 @@ async function createChatClient({
131
131
[ MIDSCENE_API_TYPE ] : AIActionTypeValue . toString ( ) ,
132
132
} ,
133
133
dangerouslyAllowBrowser : true ,
134
+ /**
135
+ * By default, OpenAI uses the built-in fetch function of Node.js as the implementation of fetch.
136
+ * However, the built-in error handling logic of OpenAI does not throw out all the text in the HTTP response.
137
+ * This will prevent users from seeing the most crucial error messages, especially when using third-party models.
138
+ */
139
+ fetch : async ( ...args ) => {
140
+ const result = await fetch ( ...args ) ;
141
+ if ( ! result . ok ) {
142
+ const clone = result . clone ( ) ;
143
+ try {
144
+ const text = await clone . text ( ) ;
145
+ console . log (
146
+ `call AI model service error with status code ${ result . status } and response text: ${ text } ` ,
147
+ ) ;
148
+ } catch ( e ) {
149
+ console . log (
150
+ `call AI model service error with status code ${ result . status } but get response text failed.` ,
151
+ e ,
152
+ ) ;
153
+ }
154
+ }
155
+ return result ;
156
+ } ,
134
157
} ) ;
135
158
}
136
159
@@ -468,7 +491,7 @@ export async function callAI(
468
491
isStreamed : ! ! isStreaming ,
469
492
} ;
470
493
} catch ( e : any ) {
471
- console . error ( ' call AI error' , e ) ;
494
+ console . error ( 'call AI model service error' , e ) ;
472
495
const newError = new Error (
473
496
`failed to call ${ isStreaming ? 'streaming ' : '' } AI model service: ${ e . message } . Trouble shooting: https://midscenejs.com/model-provider.html` ,
474
497
{
You can’t perform that action at this time.
0 commit comments