Skip to content

Commit 3e35e36

Browse files
authored
Revert "chore(core): log full error response when an error occurs at calling AI model service(#1269)" (#1274)
1 parent 08abf2d commit 3e35e36

File tree

1 file changed

+1
-24
lines changed
  • packages/core/src/ai-model/service-caller

1 file changed

+1
-24
lines changed

packages/core/src/ai-model/service-caller/index.ts

Lines changed: 1 addition & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -131,29 +131,6 @@ async function createChatClient({
131131
[MIDSCENE_API_TYPE]: AIActionTypeValue.toString(),
132132
},
133133
dangerouslyAllowBrowser: true,
134-
/**
135-
* By default, OpenAI uses the built-in fetch function of Node.js as the implementation of fetch.
136-
* However, the built-in error handling logic of OpenAI does not throw out all the text in the HTTP response.
137-
* This will prevent users from seeing the most crucial error messages, especially when using third-party models.
138-
*/
139-
fetch: async (...args) => {
140-
const result = await fetch(...args);
141-
if (!result.ok) {
142-
const clone = result.clone();
143-
try {
144-
const text = await clone.text();
145-
console.log(
146-
`call AI model service error with status code ${result.status} and response text: ${text}`,
147-
);
148-
} catch (e) {
149-
console.log(
150-
`call AI model service error with status code ${result.status} but get response text failed.`,
151-
e,
152-
);
153-
}
154-
}
155-
return result;
156-
},
157134
});
158135
}
159136

@@ -491,7 +468,7 @@ export async function callAI(
491468
isStreamed: !!isStreaming,
492469
};
493470
} catch (e: any) {
494-
console.error('call AI model service error', e);
471+
console.error(' call AI error', e);
495472
const newError = new Error(
496473
`failed to call ${isStreaming ? 'streaming ' : ''}AI model service: ${e.message}. Trouble shooting: https://midscenejs.com/model-provider.html`,
497474
{

0 commit comments

Comments
 (0)