Skip to content

Commit b08ce56

Browse files
authored
Merge pull request ChatGPTNextWeb#5819 from ConnectAI-E/fix-gemini-summary
Fix gemini summary
2 parents a392daa + b41c012 commit b08ce56

File tree

1 file changed

+9
-3
lines changed

1 file changed

+9
-3
lines changed

app/client/platforms/google.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import { RequestPayload } from "./openai";
2929
import { fetch } from "@/app/utils/stream";
3030

3131
export class GeminiProApi implements LLMApi {
32-
path(path: string): string {
32+
path(path: string, shouldStream = false): string {
3333
const accessStore = useAccessStore.getState();
3434

3535
let baseUrl = "";
@@ -51,15 +51,18 @@ export class GeminiProApi implements LLMApi {
5151
console.log("[Proxy Endpoint] ", baseUrl, path);
5252

5353
let chatPath = [baseUrl, path].join("/");
54+
if (shouldStream) {
55+
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
56+
}
5457

55-
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
5658
return chatPath;
5759
}
5860
extractMessage(res: any) {
5961
console.log("[Response] gemini-pro response: ", res);
6062

6163
return (
6264
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
65+
res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text ||
6366
res?.error?.message ||
6467
""
6568
);
@@ -166,7 +169,10 @@ export class GeminiProApi implements LLMApi {
166169
options.onController?.(controller);
167170
try {
168171
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
169-
const chatPath = this.path(Google.ChatPath(modelConfig.model));
172+
const chatPath = this.path(
173+
Google.ChatPath(modelConfig.model),
174+
shouldStream,
175+
);
170176

171177
const chatPayload = {
172178
method: "POST",

0 commit comments

Comments
 (0)