diff --git a/README.md b/README.md
index be5e91d65c7..5887369ff40 100644
--- a/README.md
+++ b/README.md
@@ -100,6 +100,7 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
## What's New
+- 🚀 v2.15.4 The Application supports using Tauri fetch LLM API, MORE SECURITY! [#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 Now supports Plugins! Read this: [NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 Now supports Artifacts & SD
- 🚀 v2.10.1 support Google Gemini Pro model.
@@ -137,6 +138,7 @@ For enterprise inquiries, please contact: **business@nextchat.dev**
## 最新动态
+- 🚀 v2.15.4 客户端支持Tauri本地直接调用大模型API,更安全![#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 现在支持插件功能了!了解更多:[NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 现在支持 Artifacts & SD 了。
- 🚀 v2.10.1 现在支持 Gemini Pro 模型。
diff --git a/app/client/platforms/alibaba.ts b/app/client/platforms/alibaba.ts
index 4ade9ebb98f..86229a14705 100644
--- a/app/client/platforms/alibaba.ts
+++ b/app/client/platforms/alibaba.ts
@@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
+import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -178,6 +179,7 @@ export class QwenApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/client/platforms/anthropic.ts b/app/client/platforms/anthropic.ts
index 7826838a61e..1a83bd53aa1 100644
--- a/app/client/platforms/anthropic.ts
+++ b/app/client/platforms/anthropic.ts
@@ -8,7 +8,7 @@ import {
ChatMessageTool,
} from "@/app/store";
import { getClientConfig } from "@/app/config/client";
-import { DEFAULT_API_HOST } from "@/app/constant";
+import { ANTHROPIC_BASE_URL } from "@/app/constant";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { preProcessImageContent, stream } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
@@ -388,9 +388,7 @@ export class ClaudeApi implements LLMApi {
if (baseUrl.trim().length === 0) {
const isApp = !!getClientConfig()?.isApp;
- baseUrl = isApp
- ? DEFAULT_API_HOST + "/api/proxy/anthropic"
- : ApiPath.Anthropic;
+ baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
diff --git a/app/client/platforms/baidu.ts b/app/client/platforms/baidu.ts
index c360417c602..2511a696b9b 100644
--- a/app/client/platforms/baidu.ts
+++ b/app/client/platforms/baidu.ts
@@ -24,6 +24,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
+import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -197,6 +198,7 @@ export class ErnieApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/client/platforms/bytedance.ts b/app/client/platforms/bytedance.ts
index a6e2d426ee3..000a9e278db 100644
--- a/app/client/platforms/bytedance.ts
+++ b/app/client/platforms/bytedance.ts
@@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
+import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -165,6 +166,7 @@ export class DoubaoApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts
index 3c2607271bf..dcf300a0f1b 100644
--- a/app/client/platforms/google.ts
+++ b/app/client/platforms/google.ts
@@ -9,7 +9,7 @@ import {
} from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
-import { DEFAULT_API_HOST } from "@/app/constant";
+import { GEMINI_BASE_URL } from "@/app/constant";
import Locale from "../../locales";
import {
EventStreamContentType,
@@ -22,6 +22,7 @@ import {
isVisionModel,
} from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";
+import { fetch } from "@/app/utils/stream";
export class GeminiProApi implements LLMApi {
path(path: string): string {
@@ -34,7 +35,7 @@ export class GeminiProApi implements LLMApi {
const isApp = !!getClientConfig()?.isApp;
if (baseUrl.length === 0) {
- baseUrl = isApp ? DEFAULT_API_HOST + `/api/proxy/google` : ApiPath.Google;
+ baseUrl = isApp ? GEMINI_BASE_URL : ApiPath.Google;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
@@ -213,6 +214,7 @@ export class GeminiProApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/client/platforms/iflytek.ts b/app/client/platforms/iflytek.ts
index 3931672e661..55a39d0ccca 100644
--- a/app/client/platforms/iflytek.ts
+++ b/app/client/platforms/iflytek.ts
@@ -1,7 +1,7 @@
"use client";
import {
ApiPath,
- DEFAULT_API_HOST,
+ IFLYTEK_BASE_URL,
Iflytek,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
@@ -22,6 +22,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
+import { fetch } from "@/app/utils/stream";
import { RequestPayload } from "./openai";
@@ -40,7 +41,7 @@ export class SparkApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Iflytek;
- baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
+ baseUrl = isApp ? IFLYTEK_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
@@ -149,6 +150,7 @@ export class SparkApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/client/platforms/moonshot.ts b/app/client/platforms/moonshot.ts
index 6b197974571..e0ef3494fe7 100644
--- a/app/client/platforms/moonshot.ts
+++ b/app/client/platforms/moonshot.ts
@@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
- DEFAULT_API_HOST,
+ MOONSHOT_BASE_URL,
Moonshot,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
@@ -40,7 +40,7 @@ export class MoonshotApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Moonshot;
- baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
+ baseUrl = isApp ? MOONSHOT_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 0a8d6203ae5..a2263361143 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
- DEFAULT_API_HOST,
+ OPENAI_BASE_URL,
DEFAULT_MODELS,
OpenaiPath,
Azure,
@@ -98,7 +98,7 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
- baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
+ baseUrl = isApp ? OPENAI_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
diff --git a/app/client/platforms/tencent.ts b/app/client/platforms/tencent.ts
index 3e8f1a45957..3610fac0a48 100644
--- a/app/client/platforms/tencent.ts
+++ b/app/client/platforms/tencent.ts
@@ -1,5 +1,5 @@
"use client";
-import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
+import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
@@ -22,6 +22,7 @@ import mapKeys from "lodash-es/mapKeys";
import mapValues from "lodash-es/mapValues";
import isArray from "lodash-es/isArray";
import isObject from "lodash-es/isObject";
+import { fetch } from "@/app/utils/stream";
export interface OpenAIListModelResponse {
object: string;
@@ -70,9 +71,7 @@ export class HunyuanApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
- baseUrl = isApp
- ? DEFAULT_API_HOST + "/api/proxy/tencent"
- : ApiPath.Tencent;
+ baseUrl = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
}
if (baseUrl.endsWith("/")) {
@@ -179,6 +178,7 @@ export class HunyuanApi implements LLMApi {
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
+ fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/components/chat.tsx b/app/components/chat.tsx
index 3d519dee722..b45d36f9587 100644
--- a/app/components/chat.tsx
+++ b/app/components/chat.tsx
@@ -1815,6 +1815,7 @@ function _Chat() {
{message?.tools?.map((tool) => (
{tool.isError === false ? (
diff --git a/app/constant.ts b/app/constant.ts
index a54a973daa6..a06b8f05062 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -11,7 +11,6 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
export const STABILITY_BASE_URL = "https://api.stability.ai";
-export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const OPENAI_BASE_URL = "https://api.openai.com";
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
diff --git a/app/store/access.ts b/app/store/access.ts
index 9fcd227e7c0..dec3a725886 100644
--- a/app/store/access.ts
+++ b/app/store/access.ts
@@ -1,9 +1,18 @@
import {
- ApiPath,
- DEFAULT_API_HOST,
GoogleSafetySettingsThreshold,
ServiceProvider,
StoreKey,
+ ApiPath,
+ OPENAI_BASE_URL,
+ ANTHROPIC_BASE_URL,
+ GEMINI_BASE_URL,
+ BAIDU_BASE_URL,
+ BYTEDANCE_BASE_URL,
+ ALIBABA_BASE_URL,
+ TENCENT_BASE_URL,
+ MOONSHOT_BASE_URL,
+ STABILITY_BASE_URL,
+ IFLYTEK_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
@@ -15,45 +24,25 @@ let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
const isApp = getClientConfig()?.buildMode === "export";
-const DEFAULT_OPENAI_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/openai"
- : ApiPath.OpenAI;
+const DEFAULT_OPENAI_URL = isApp ? OPENAI_BASE_URL : ApiPath.OpenAI;
-const DEFAULT_GOOGLE_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/google"
- : ApiPath.Google;
+const DEFAULT_GOOGLE_URL = isApp ? GEMINI_BASE_URL : ApiPath.Google;
-const DEFAULT_ANTHROPIC_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/anthropic"
- : ApiPath.Anthropic;
+const DEFAULT_ANTHROPIC_URL = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
-const DEFAULT_BAIDU_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/baidu"
- : ApiPath.Baidu;
+const DEFAULT_BAIDU_URL = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
-const DEFAULT_BYTEDANCE_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/bytedance"
- : ApiPath.ByteDance;
+const DEFAULT_BYTEDANCE_URL = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
-const DEFAULT_ALIBABA_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/alibaba"
- : ApiPath.Alibaba;
+const DEFAULT_ALIBABA_URL = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
-const DEFAULT_TENCENT_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/tencent"
- : ApiPath.Tencent;
+const DEFAULT_TENCENT_URL = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
-const DEFAULT_MOONSHOT_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/moonshot"
- : ApiPath.Moonshot;
+const DEFAULT_MOONSHOT_URL = isApp ? MOONSHOT_BASE_URL : ApiPath.Moonshot;
-const DEFAULT_STABILITY_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/stability"
- : ApiPath.Stability;
+const DEFAULT_STABILITY_URL = isApp ? STABILITY_BASE_URL : ApiPath.Stability;
-const DEFAULT_IFLYTEK_URL = isApp
- ? DEFAULT_API_HOST + "/api/proxy/iflytek"
- : ApiPath.Iflytek;
+const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
const DEFAULT_ACCESS_STATE = {
accessCode: "",
diff --git a/app/store/chat.ts b/app/store/chat.ts
index 968d8cb6422..931cad76883 100644
--- a/app/store/chat.ts
+++ b/app/store/chat.ts
@@ -37,6 +37,7 @@ export type ChatMessageTool = {
};
content?: string;
isError?: boolean;
+ errorMsg?: string;
};
export type ChatMessage = RequestMessage & {
diff --git a/app/store/plugin.ts b/app/store/plugin.ts
index 40abdc8d9f4..b3d9f6d8ce6 100644
--- a/app/store/plugin.ts
+++ b/app/store/plugin.ts
@@ -7,7 +7,7 @@ import yaml from "js-yaml";
import { adapter, getOperationId } from "../utils";
import { useAccessStore } from "./access";
-const isApp = getClientConfig()?.isApp;
+const isApp = getClientConfig()?.isApp !== false;
export type Plugin = {
id: string;
diff --git a/app/store/sync.ts b/app/store/sync.ts
index 9db60d5f410..8477c1e4ba7 100644
--- a/app/store/sync.ts
+++ b/app/store/sync.ts
@@ -12,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
import { showToast } from "../components/ui-lib";
import Locale from "../locales";
import { createSyncClient, ProviderType } from "../utils/cloud";
-import { corsPath } from "../utils/cors";
export interface WebDavConfig {
server: string;
@@ -26,7 +25,7 @@ export type SyncStore = GetStoreState
;
const DEFAULT_SYNC_STATE = {
provider: ProviderType.WebDAV,
useProxy: true,
- proxyUrl: corsPath(ApiPath.Cors),
+ proxyUrl: ApiPath.Cors as string,
webdav: {
endpoint: "",
diff --git a/app/utils.ts b/app/utils.ts
index 6b2f65952c7..b3d27cbce2c 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -2,8 +2,9 @@ import { useEffect, useState } from "react";
import { showToast } from "./components/ui-lib";
import Locale from "./locales";
import { RequestMessage } from "./client/api";
-import { ServiceProvider, REQUEST_TIMEOUT_MS } from "./constant";
-import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
+import { ServiceProvider } from "./constant";
+// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
+import { fetch as tauriStreamFetch } from "./utils/stream";
export function trimTopic(topic: string) {
// Fix an issue where double quotes still show in the Indonesian language
@@ -292,30 +293,23 @@ export function fetch(
options?: Record,
): Promise {
if (window.__TAURI__) {
- const payload = options?.body || options?.data;
- return tauriFetch(url, {
- ...options,
- body:
- payload &&
- ({
- type: "Text",
- payload,
- } as any),
- timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
- responseType:
- options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
- } as any);
+ return tauriStreamFetch(url, options);
}
return window.fetch(url, options);
}
export function adapter(config: Record) {
- const { baseURL, url, params, ...rest } = config;
+ const { baseURL, url, params, data: body, ...rest } = config;
const path = baseURL ? `${baseURL}${url}` : url;
const fetchUrl = params
? `${path}?${new URLSearchParams(params as any).toString()}`
: path;
- return fetch(fetchUrl as string, { ...rest, responseType: "text" });
+ return fetch(fetchUrl as string, { ...rest, body }).then((res) => {
+ const { status, headers, statusText } = res;
+ return res
+ .text()
+ .then((data: string) => ({ status, statusText, headers, data }));
+ });
}
export function safeLocalStorage(): {
diff --git a/app/utils/chat.ts b/app/utils/chat.ts
index 7f3bb23c58e..46f23263895 100644
--- a/app/utils/chat.ts
+++ b/app/utils/chat.ts
@@ -10,6 +10,7 @@ import {
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "./format";
+import { fetch as tauriFetch } from "./stream";
export function compressImage(file: Blob, maxSize: number): Promise {
return new Promise((resolve, reject) => {
@@ -221,7 +222,7 @@ export function stream(
),
)
.then((res) => {
- const content = JSON.stringify(res.data);
+ let content = res.data || res?.statusText;
if (res.status >= 300) {
return Promise.reject(content);
}
@@ -236,7 +237,11 @@ export function stream(
return content;
})
.catch((e) => {
- options?.onAfterTool?.({ ...tool, isError: true });
+ options?.onAfterTool?.({
+ ...tool,
+ isError: true,
+ errorMsg: e.toString(),
+ });
return e.toString();
})
.then((content) => ({
@@ -287,6 +292,7 @@ export function stream(
REQUEST_TIMEOUT_MS,
);
fetchEventSource(chatPath, {
+ fetch: tauriFetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
diff --git a/app/utils/cors.ts b/app/utils/cors.ts
deleted file mode 100644
index f5e5ce6f0a2..00000000000
--- a/app/utils/cors.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import { getClientConfig } from "../config/client";
-import { DEFAULT_API_HOST } from "../constant";
-
-export function corsPath(path: string) {
- const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
-
- if (baseUrl === "" && path === "") {
- return "";
- }
- if (!path.startsWith("/")) {
- path = "/" + path;
- }
-
- if (!path.endsWith("/")) {
- path += "/";
- }
-
- return `${baseUrl}${path}`;
-}
diff --git a/app/utils/stream.ts b/app/utils/stream.ts
new file mode 100644
index 00000000000..313ec4dd5b8
--- /dev/null
+++ b/app/utils/stream.ts
@@ -0,0 +1,104 @@
+// using tauri command to send request
+// see src-tauri/src/stream.rs, and src-tauri/src/main.rs
+// 1. invoke('stream_fetch', {url, method, headers, body}), get response with headers.
+// 2. listen event: `stream-response` multi times to get body
+
+type ResponseEvent = {
+ id: number;
+ payload: {
+ request_id: number;
+ status?: number;
+ chunk?: number[];
+ };
+};
+
+type StreamResponse = {
+ request_id: number;
+ status: number;
+ status_text: string;
+ headers: Record;
+};
+
+export function fetch(url: string, options?: RequestInit): Promise {
+ if (window.__TAURI__) {
+ const {
+ signal,
+ method = "GET",
+ headers: _headers = {},
+ body = [],
+ } = options || {};
+ let unlisten: Function | undefined;
+ let request_id = 0;
+ const ts = new TransformStream();
+ const writer = ts.writable.getWriter();
+
+ let closed = false;
+ const close = () => {
+ if (closed) return;
+ closed = true;
+ unlisten && unlisten();
+ writer.ready.then(() => {
+ writer.close().catch((e) => console.error(e));
+ });
+ };
+
+ if (signal) {
+ signal.addEventListener("abort", () => close());
+ }
+ // @ts-ignore 2. listen response multi times, and write to Response.body
+ window.__TAURI__.event
+ .listen("stream-response", (e: ResponseEvent) => {
+ const { request_id: rid, chunk, status } = e?.payload || {};
+ if (request_id != rid) {
+ return;
+ }
+ if (chunk) {
+ writer.ready.then(() => {
+ writer.write(new Uint8Array(chunk));
+ });
+ } else if (status === 0) {
+ // end of body
+ close();
+ }
+ })
+ .then((u: Function) => (unlisten = u));
+
+ const headers: Record = {
+ Accept: "application/json, text/plain, */*",
+ "Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
+ "User-Agent": navigator.userAgent,
+ };
+ for (const item of new Headers(_headers || {})) {
+ headers[item[0]] = item[1];
+ }
+ return window.__TAURI__
+ .invoke("stream_fetch", {
+ method: method.toUpperCase(),
+ url,
+ headers,
+ // TODO FormData
+ body:
+ typeof body === "string"
+ ? Array.from(new TextEncoder().encode(body))
+ : [],
+ })
+ .then((res: StreamResponse) => {
+ request_id = res.request_id;
+ const { status, status_text: statusText, headers } = res;
+ const response = new Response(ts.readable, {
+ status,
+ statusText,
+ headers,
+ });
+ if (status >= 300) {
+ setTimeout(close, 100);
+ }
+ return response;
+ })
+ .catch((e) => {
+ console.error("stream error", e);
+ throw e;
+ });
+ }
+ return window.fetch(url, options);
+}
diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock
index 47d12e1190b..c9baffc0acc 100644
--- a/src-tauri/Cargo.lock
+++ b/src-tauri/Cargo.lock
@@ -348,9 +348,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
[[package]]
name = "bytes"
-version = "1.4.0"
+version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
+checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
dependencies = [
"serde",
]
@@ -942,9 +942,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]]
name = "form_urlencoded"
-version = "1.1.0"
+version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
dependencies = [
"percent-encoding",
]
@@ -970,9 +970,9 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
+checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
[[package]]
name = "futures-executor"
@@ -987,9 +987,9 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
+checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
[[package]]
name = "futures-lite"
@@ -1008,9 +1008,9 @@ dependencies = [
[[package]]
name = "futures-macro"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
+checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
@@ -1019,21 +1019,21 @@ dependencies = [
[[package]]
name = "futures-sink"
-version = "0.3.29"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817"
+checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
[[package]]
name = "futures-task"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
+checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
[[package]]
name = "futures-util"
-version = "0.3.28"
+version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
+checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
dependencies = [
"futures-core",
"futures-io",
@@ -1555,9 +1555,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
-version = "0.3.0"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -1986,6 +1986,10 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
name = "nextchat"
version = "0.1.0"
dependencies = [
+ "bytes",
+ "futures-util",
+ "percent-encoding",
+ "reqwest",
"serde",
"serde_json",
"tauri",
@@ -2281,9 +2285,9 @@ checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
[[package]]
name = "percent-encoding"
-version = "2.2.0"
+version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "phf"
@@ -2545,9 +2549,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
-version = "1.0.58"
+version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa1fb82fc0c281dd9671101b66b771ebbe1eaf967b96ac8740dcba4b70005ca8"
+checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [
"unicode-ident",
]
@@ -3889,9 +3893,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
[[package]]
name = "url"
-version = "2.3.1"
+version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
dependencies = [
"form_urlencoded",
"idna",
diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml
index 387584491ba..8a11c3b6f98 100644
--- a/src-tauri/Cargo.toml
+++ b/src-tauri/Cargo.toml
@@ -37,6 +37,10 @@ tauri = { version = "1.5.4", features = [ "http-all",
"window-unminimize",
] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
+percent-encoding = "2.3.1"
+reqwest = "0.11.18"
+futures-util = "0.3.30"
+bytes = "1.7.2"
[features]
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs
index ed3ec32f37b..d04969c043b 100644
--- a/src-tauri/src/main.rs
+++ b/src-tauri/src/main.rs
@@ -1,8 +1,11 @@
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
+mod stream;
+
fn main() {
tauri::Builder::default()
+ .invoke_handler(tauri::generate_handler![stream::stream_fetch])
.plugin(tauri_plugin_window_state::Builder::default().build())
.run(tauri::generate_context!())
.expect("error while running tauri application");
diff --git a/src-tauri/src/stream.rs b/src-tauri/src/stream.rs
new file mode 100644
index 00000000000..d2c0726b0b4
--- /dev/null
+++ b/src-tauri/src/stream.rs
@@ -0,0 +1,134 @@
+//
+//
+
+use std::time::Duration;
+use std::error::Error;
+use std::sync::atomic::{AtomicU32, Ordering};
+use std::collections::HashMap;
+use futures_util::{StreamExt};
+use reqwest::Client;
+use reqwest::header::{HeaderName, HeaderMap};
+
+static REQUEST_COUNTER: AtomicU32 = AtomicU32::new(0);
+
+#[derive(Debug, Clone, serde::Serialize)]
+pub struct StreamResponse {
+ request_id: u32,
+ status: u16,
+ status_text: String,
+ headers: HashMap
+}
+
+#[derive(Clone, serde::Serialize)]
+pub struct EndPayload {
+ request_id: u32,
+ status: u16,
+}
+
+#[derive(Clone, serde::Serialize)]
+pub struct ChunkPayload {
+ request_id: u32,
+ chunk: bytes::Bytes,
+}
+
+#[tauri::command]
+pub async fn stream_fetch(
+ window: tauri::Window,
+ method: String,
+ url: String,
+ headers: HashMap,
+ body: Vec,
+) -> Result {
+
+ let event_name = "stream-response";
+ let request_id = REQUEST_COUNTER.fetch_add(1, Ordering::SeqCst);
+
+ let mut _headers = HeaderMap::new();
+ for (key, value) in &headers {
+ _headers.insert(key.parse::().unwrap(), value.parse().unwrap());
+ }
+
+ // println!("method: {:?}", method);
+ // println!("url: {:?}", url);
+ // println!("headers: {:?}", headers);
+ // println!("headers: {:?}", _headers);
+
+ let method = method.parse::().map_err(|err| format!("failed to parse method: {}", err))?;
+ let client = Client::builder()
+ .default_headers(_headers)
+ .redirect(reqwest::redirect::Policy::limited(3))
+ .connect_timeout(Duration::new(3, 0))
+ .build()
+ .map_err(|err| format!("failed to generate client: {}", err))?;
+
+ let mut request = client.request(
+ method.clone(),
+ url.parse::().map_err(|err| format!("failed to parse url: {}", err))?
+ );
+
+ if method == reqwest::Method::POST || method == reqwest::Method::PUT || method == reqwest::Method::PATCH {
+ let body = bytes::Bytes::from(body);
+ // println!("body: {:?}", body);
+ request = request.body(body);
+ }
+
+ // println!("client: {:?}", client);
+ // println!("request: {:?}", request);
+
+ let response_future = request.send();
+
+ let res = response_future.await;
+ let response = match res {
+ Ok(res) => {
+ // get response and emit to client
+ let mut headers = HashMap::new();
+ for (name, value) in res.headers() {
+ headers.insert(
+ name.as_str().to_string(),
+ std::str::from_utf8(value.as_bytes()).unwrap().to_string()
+ );
+ }
+ let status = res.status().as_u16();
+
+ tauri::async_runtime::spawn(async move {
+ let mut stream = res.bytes_stream();
+
+ while let Some(chunk) = stream.next().await {
+ match chunk {
+ Ok(bytes) => {
+ // println!("chunk: {:?}", bytes);
+ if let Err(e) = window.emit(event_name, ChunkPayload{ request_id, chunk: bytes }) {
+ println!("Failed to emit chunk payload: {:?}", e);
+ }
+ }
+ Err(err) => {
+ println!("Error chunk: {:?}", err);
+ }
+ }
+ }
+ if let Err(e) = window.emit(event_name, EndPayload{ request_id, status: 0 }) {
+ println!("Failed to emit end payload: {:?}", e);
+ }
+ });
+
+ StreamResponse {
+ request_id,
+ status,
+ status_text: "OK".to_string(),
+ headers,
+ }
+ }
+ Err(err) => {
+ println!("Error response: {:?}", err.source().expect("REASON").to_string());
+ StreamResponse {
+ request_id,
+ status: 599,
+ status_text: err.source().expect("REASON").to_string(),
+ headers: HashMap::new(),
+ }
+ }
+ };
+ // println!("Response: {:?}", response);
+ Ok(response)
+}
+
diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json
index eb0d411cba6..cc137ee8afd 100644
--- a/src-tauri/tauri.conf.json
+++ b/src-tauri/tauri.conf.json
@@ -9,7 +9,7 @@
},
"package": {
"productName": "NextChat",
- "version": "2.15.3"
+ "version": "2.15.4"
},
"tauri": {
"allowlist": {