Skip to content

Commit

Permalink
Merge pull request #5565 from ConnectAI-E/feature/using-tauri-fetch
Browse files Browse the repository at this point in the history
Feat: using tauri fetch api in App
  • Loading branch information
lloydzhou authored Oct 9, 2024
2 parents 8ad63a6 + 6c1cbe1 commit 5b4d423
Show file tree
Hide file tree
Showing 25 changed files with 344 additions and 115 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ For enterprise inquiries, please contact: **[email protected]**

## What's New

- 🚀 v2.15.4 The Application supports using Tauri fetch LLM API, MORE SECURITY! [#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 Now supports Plugins! Read this: [NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 Now supports Artifacts & SD
- 🚀 v2.10.1 support Google Gemini Pro model.
Expand Down Expand Up @@ -137,6 +138,7 @@ For enterprise inquiries, please contact: **[email protected]**

## 最新动态

- 🚀 v2.15.4 客户端支持Tauri本地直接调用大模型API,更安全![#5379](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5379)
- 🚀 v2.15.0 现在支持插件功能了!了解更多:[NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 现在支持 Artifacts & SD 了。
- 🚀 v2.10.1 现在支持 Gemini Pro 模型。
Expand Down
2 changes: 2 additions & 0 deletions app/client/platforms/alibaba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";

export interface OpenAIListModelResponse {
object: string;
Expand Down Expand Up @@ -178,6 +179,7 @@ export class QwenApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
6 changes: 2 additions & 4 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
ChatMessageTool,
} from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { ANTHROPIC_BASE_URL } from "@/app/constant";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { preProcessImageContent, stream } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
Expand Down Expand Up @@ -388,9 +388,7 @@ export class ClaudeApi implements LLMApi {
if (baseUrl.trim().length === 0) {
const isApp = !!getClientConfig()?.isApp;

baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/anthropic"
: ApiPath.Anthropic;
baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
}

if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
Expand Down
2 changes: 2 additions & 0 deletions app/client/platforms/baidu.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";

export interface OpenAIListModelResponse {
object: string;
Expand Down Expand Up @@ -197,6 +198,7 @@ export class ErnieApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
2 changes: 2 additions & 0 deletions app/client/platforms/bytedance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";

export interface OpenAIListModelResponse {
object: string;
Expand Down Expand Up @@ -165,6 +166,7 @@ export class DoubaoApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
6 changes: 4 additions & 2 deletions app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import {
} from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { GEMINI_BASE_URL } from "@/app/constant";
import Locale from "../../locales";
import {
EventStreamContentType,
Expand All @@ -22,6 +22,7 @@ import {
isVisionModel,
} from "@/app/utils";
import { preProcessImageContent } from "@/app/utils/chat";
import { fetch } from "@/app/utils/stream";

export class GeminiProApi implements LLMApi {
path(path: string): string {
Expand All @@ -34,7 +35,7 @@ export class GeminiProApi implements LLMApi {

const isApp = !!getClientConfig()?.isApp;
if (baseUrl.length === 0) {
baseUrl = isApp ? DEFAULT_API_HOST + `/api/proxy/google` : ApiPath.Google;
baseUrl = isApp ? GEMINI_BASE_URL : ApiPath.Google;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
Expand Down Expand Up @@ -213,6 +214,7 @@ export class GeminiProApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
6 changes: 4 additions & 2 deletions app/client/platforms/iflytek.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
IFLYTEK_BASE_URL,
Iflytek,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
Expand All @@ -22,6 +22,7 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent } from "@/app/utils";
import { fetch } from "@/app/utils/stream";

import { RequestPayload } from "./openai";

Expand All @@ -40,7 +41,7 @@ export class SparkApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Iflytek;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? IFLYTEK_BASE_URL : apiPath;
}

if (baseUrl.endsWith("/")) {
Expand Down Expand Up @@ -149,6 +150,7 @@ export class SparkApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
4 changes: 2 additions & 2 deletions app/client/platforms/moonshot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
DEFAULT_API_HOST,
MOONSHOT_BASE_URL,
Moonshot,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
Expand Down Expand Up @@ -40,7 +40,7 @@ export class MoonshotApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Moonshot;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? MOONSHOT_BASE_URL : apiPath;
}

if (baseUrl.endsWith("/")) {
Expand Down
4 changes: 2 additions & 2 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
DEFAULT_API_HOST,
OPENAI_BASE_URL,
DEFAULT_MODELS,
OpenaiPath,
Azure,
Expand Down Expand Up @@ -98,7 +98,7 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
baseUrl = isApp ? OPENAI_BASE_URL : apiPath;
}

if (baseUrl.endsWith("/")) {
Expand Down
8 changes: 4 additions & 4 deletions app/client/platforms/tencent.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"use client";
import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";

import {
Expand All @@ -22,6 +22,7 @@ import mapKeys from "lodash-es/mapKeys";
import mapValues from "lodash-es/mapValues";
import isArray from "lodash-es/isArray";
import isObject from "lodash-es/isObject";
import { fetch } from "@/app/utils/stream";

export interface OpenAIListModelResponse {
object: string;
Expand Down Expand Up @@ -70,9 +71,7 @@ export class HunyuanApi implements LLMApi {

if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/tencent"
: ApiPath.Tencent;
baseUrl = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
}

if (baseUrl.endsWith("/")) {
Expand Down Expand Up @@ -179,6 +178,7 @@ export class HunyuanApi implements LLMApi {
controller.signal.onabort = finish;

fetchEventSource(chatPath, {
fetch: fetch as any,
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
Expand Down
1 change: 1 addition & 0 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -1815,6 +1815,7 @@ function _Chat() {
{message?.tools?.map((tool) => (
<div
key={tool.id}
title={tool?.errorMsg}
className={styles["chat-message-tool"]}
>
{tool.isError === false ? (
Expand Down
1 change: 0 additions & 1 deletion app/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";

export const STABILITY_BASE_URL = "https://api.stability.ai";

export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const OPENAI_BASE_URL = "https://api.openai.com";
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";

Expand Down
53 changes: 21 additions & 32 deletions app/store/access.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,18 @@
import {
ApiPath,
DEFAULT_API_HOST,
GoogleSafetySettingsThreshold,
ServiceProvider,
StoreKey,
ApiPath,
OPENAI_BASE_URL,
ANTHROPIC_BASE_URL,
GEMINI_BASE_URL,
BAIDU_BASE_URL,
BYTEDANCE_BASE_URL,
ALIBABA_BASE_URL,
TENCENT_BASE_URL,
MOONSHOT_BASE_URL,
STABILITY_BASE_URL,
IFLYTEK_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
Expand All @@ -15,45 +24,25 @@ let fetchState = 0; // 0 not fetch, 1 fetching, 2 done

const isApp = getClientConfig()?.buildMode === "export";

const DEFAULT_OPENAI_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;
const DEFAULT_OPENAI_URL = isApp ? OPENAI_BASE_URL : ApiPath.OpenAI;

const DEFAULT_GOOGLE_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/google"
: ApiPath.Google;
const DEFAULT_GOOGLE_URL = isApp ? GEMINI_BASE_URL : ApiPath.Google;

const DEFAULT_ANTHROPIC_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/anthropic"
: ApiPath.Anthropic;
const DEFAULT_ANTHROPIC_URL = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;

const DEFAULT_BAIDU_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/baidu"
: ApiPath.Baidu;
const DEFAULT_BAIDU_URL = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;

const DEFAULT_BYTEDANCE_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/bytedance"
: ApiPath.ByteDance;
const DEFAULT_BYTEDANCE_URL = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;

const DEFAULT_ALIBABA_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/alibaba"
: ApiPath.Alibaba;
const DEFAULT_ALIBABA_URL = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;

const DEFAULT_TENCENT_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/tencent"
: ApiPath.Tencent;
const DEFAULT_TENCENT_URL = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;

const DEFAULT_MOONSHOT_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/moonshot"
: ApiPath.Moonshot;
const DEFAULT_MOONSHOT_URL = isApp ? MOONSHOT_BASE_URL : ApiPath.Moonshot;

const DEFAULT_STABILITY_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/stability"
: ApiPath.Stability;
const DEFAULT_STABILITY_URL = isApp ? STABILITY_BASE_URL : ApiPath.Stability;

const DEFAULT_IFLYTEK_URL = isApp
? DEFAULT_API_HOST + "/api/proxy/iflytek"
: ApiPath.Iflytek;
const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;

const DEFAULT_ACCESS_STATE = {
accessCode: "",
Expand Down
1 change: 1 addition & 0 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ export type ChatMessageTool = {
};
content?: string;
isError?: boolean;
errorMsg?: string;
};

export type ChatMessage = RequestMessage & {
Expand Down
2 changes: 1 addition & 1 deletion app/store/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import yaml from "js-yaml";
import { adapter, getOperationId } from "../utils";
import { useAccessStore } from "./access";

const isApp = getClientConfig()?.isApp;
const isApp = getClientConfig()?.isApp !== false;

export type Plugin = {
id: string;
Expand Down
3 changes: 1 addition & 2 deletions app/store/sync.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
import { showToast } from "../components/ui-lib";
import Locale from "../locales";
import { createSyncClient, ProviderType } from "../utils/cloud";
import { corsPath } from "../utils/cors";

export interface WebDavConfig {
server: string;
Expand All @@ -26,7 +25,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
const DEFAULT_SYNC_STATE = {
provider: ProviderType.WebDAV,
useProxy: true,
proxyUrl: corsPath(ApiPath.Cors),
proxyUrl: ApiPath.Cors as string,

webdav: {
endpoint: "",
Expand Down
28 changes: 11 additions & 17 deletions app/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ import { useEffect, useState } from "react";
import { showToast } from "./components/ui-lib";
import Locale from "./locales";
import { RequestMessage } from "./client/api";
import { ServiceProvider, REQUEST_TIMEOUT_MS } from "./constant";
import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
import { ServiceProvider } from "./constant";
// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
import { fetch as tauriStreamFetch } from "./utils/stream";

export function trimTopic(topic: string) {
// Fix an issue where double quotes still show in the Indonesian language
Expand Down Expand Up @@ -292,30 +293,23 @@ export function fetch(
options?: Record<string, unknown>,
): Promise<any> {
if (window.__TAURI__) {
const payload = options?.body || options?.data;
return tauriFetch(url, {
...options,
body:
payload &&
({
type: "Text",
payload,
} as any),
timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
responseType:
options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
} as any);
return tauriStreamFetch(url, options);
}
return window.fetch(url, options);
}

export function adapter(config: Record<string, unknown>) {
const { baseURL, url, params, ...rest } = config;
const { baseURL, url, params, data: body, ...rest } = config;
const path = baseURL ? `${baseURL}${url}` : url;
const fetchUrl = params
? `${path}?${new URLSearchParams(params as any).toString()}`
: path;
return fetch(fetchUrl as string, { ...rest, responseType: "text" });
return fetch(fetchUrl as string, { ...rest, body }).then((res) => {
const { status, headers, statusText } = res;
return res
.text()
.then((data: string) => ({ status, statusText, headers, data }));
});
}

export function safeLocalStorage(): {
Expand Down
Loading

0 comments on commit 5b4d423

Please sign in to comment.