From 7eda14f13882be635c9e6e5b8077617df8c5339b Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 13 Nov 2024 14:24:44 +0800 Subject: [PATCH 1/3] =?UTF-8?q?fix:=20[#5308]=20gemini=E5=AF=B9=E8=AF=9D?= =?UTF-8?q?=E6=80=BB=E7=BB=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/client/platforms/google.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 53ff00aeed0..d5aa93af196 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -51,8 +51,10 @@ export class GeminiProApi implements LLMApi { console.log("[Proxy Endpoint] ", baseUrl, path); let chatPath = [baseUrl, path].join("/"); + if (!chatPath.includes("gemini-pro")) { + chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; + } - chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; return chatPath; } extractMessage(res: any) { @@ -60,6 +62,7 @@ export class GeminiProApi implements LLMApi { return ( res?.candidates?.at(0)?.content?.parts.at(0)?.text || + res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text || res?.error?.message || "" ); @@ -167,6 +170,7 @@ export class GeminiProApi implements LLMApi { try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb const chatPath = this.path(Google.ChatPath(modelConfig.model)); + console.log("[Chat Path] ", chatPath); const chatPayload = { method: "POST", From 0628ddfc6f36479650d50281e3fa0ba1a847f777 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 13 Nov 2024 14:27:41 +0800 Subject: [PATCH 2/3] chore: update --- app/client/platforms/google.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index d5aa93af196..30f35359e89 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -170,7 +170,6 @@ export class GeminiProApi implements LLMApi { try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb const chatPath = this.path(Google.ChatPath(modelConfig.model)); - console.log("[Chat Path] ", chatPath); const chatPayload = { method: "POST", From b41c012d27d5495bec12f6aa6f9537ebb6873083 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 13 Nov 2024 15:12:46 +0800 Subject: [PATCH 3/3] chore: shouldStream --- app/client/platforms/google.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 30f35359e89..a7bce4fc2d0 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -29,7 +29,7 @@ import { RequestPayload } from "./openai"; import { fetch } from "@/app/utils/stream"; export class GeminiProApi implements LLMApi { - path(path: string): string { + path(path: string, shouldStream = false): string { const accessStore = useAccessStore.getState(); let baseUrl = ""; @@ -51,7 +51,7 @@ export class GeminiProApi implements LLMApi { console.log("[Proxy Endpoint] ", baseUrl, path); let chatPath = [baseUrl, path].join("/"); - if (!chatPath.includes("gemini-pro")) { + if (shouldStream) { chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; } @@ -169,7 +169,10 @@ export class GeminiProApi implements LLMApi { options.onController?.(controller); try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb - const chatPath = this.path(Google.ChatPath(modelConfig.model)); + const chatPath = this.path( + Google.ChatPath(modelConfig.model), + shouldStream, + ); const chatPayload = { method: "POST",