diff --git a/README.md b/README.md index 633124ec751..d496d68edfc 100644 --- a/README.md +++ b/README.md @@ -245,13 +245,17 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model User `-all` to disable all default models, `+all` to enable all default models. -### `WHITE_WEBDEV_ENDPOINTS` (可选) +### `WHITE_WEBDEV_ENDPOINTS` (optional) You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format: - Each address must be a complete endpoint > `https://xxxx/yyy` - Multiple addresses are connected by ', ' +### `DEFAULT_INPUT_TEMPLATE` (optional) + +Customize the default template used to initialize the User Input Preprocessing configuration item in Settings. + ## Requirements NodeJS >= 18, Docker >= 20 diff --git a/README_CN.md b/README_CN.md index 10b5fd03596..6811102b675 100644 --- a/README_CN.md +++ b/README_CN.md @@ -156,6 +156,9 @@ anthropic claude Api Url. 用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。 +### `DEFAULT_INPUT_TEMPLATE` (可选) +自定义默认的 template,用于初始化『设置』中的『用户输入预处理』配置项 + ## 开发 点击下方按钮,开始二次开发: diff --git a/app/components/chat.tsx b/app/components/chat.tsx index c8a79870c86..06119250465 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -59,9 +59,10 @@ import { getMessageTextContent, getMessageImages, isVisionModel, - compressImage, } from "../utils"; +import { compressImage } from "@/app/utils/chat"; + import dynamic from "next/dynamic"; import { ChatControllerPool } from "../client/controller"; diff --git a/app/config/build.ts b/app/config/build.ts index 7a93ad02cd7..b2b1ad49da1 100644 --- a/app/config/build.ts +++ b/app/config/build.ts @@ -1,4 +1,5 @@ import tauriConfig from "../../src-tauri/tauri.conf.json"; +import { DEFAULT_INPUT_TEMPLATE } from "../constant"; export const getBuildConfig = () => { if (typeof process === "undefined") { @@ -38,6 +39,7 @@ export const getBuildConfig = () => { ...commitInfo, buildMode, isApp, + template: process.env.DEFAULT_INPUT_TEMPLATE ?? DEFAULT_INPUT_TEMPLATE, }; }; diff --git a/app/config/server.ts b/app/config/server.ts index b5d754ddedd..b7c85ce6a5f 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -34,6 +34,9 @@ declare global { // google tag manager GTM_ID?: string; + + // custom template for preprocessing user input + DEFAULT_INPUT_TEMPLATE?: string; } } } diff --git a/app/store/chat.ts b/app/store/chat.ts index a5412eaa914..27a7114a3b5 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -428,14 +428,13 @@ export const useChatStore = createPersistStore( getMemoryPrompt() { const session = get().currentSession(); - return { - role: "system", - content: - session.memoryPrompt.length > 0 - ? Locale.Store.Prompt.History(session.memoryPrompt) - : "", - date: "", - } as ChatMessage; + if (session.memoryPrompt.length) { + return { + role: "system", + content: Locale.Store.Prompt.History(session.memoryPrompt), + date: "", + } as ChatMessage; + } }, getMessagesWithMemory() { @@ -471,16 +470,15 @@ export const useChatStore = createPersistStore( systemPrompts.at(0)?.content ?? "empty", ); } - + const memoryPrompt = get().getMemoryPrompt(); // long term memory const shouldSendLongTermMemory = modelConfig.sendMemory && session.memoryPrompt && session.memoryPrompt.length > 0 && session.lastSummarizeIndex > clearContextIndex; - const longTermMemoryPrompts = shouldSendLongTermMemory - ? [get().getMemoryPrompt()] - : []; + const longTermMemoryPrompts = + shouldSendLongTermMemory && memoryPrompt ? [memoryPrompt] : []; const longTermMemoryStartIndex = session.lastSummarizeIndex; // short term memory @@ -605,9 +603,11 @@ export const useChatStore = createPersistStore( Math.max(0, n - modelConfig.historyMessageCount), ); } - - // add memory prompt - toBeSummarizedMsgs.unshift(get().getMemoryPrompt()); + const memoryPrompt = get().getMemoryPrompt(); + if (memoryPrompt) { + // add memory prompt + toBeSummarizedMsgs.unshift(memoryPrompt); + } const lastSummarizeIndex = session.messages.length; diff --git a/app/store/config.ts b/app/store/config.ts index 6f2f558a042..94cfcd8ecaa 100644 --- a/app/store/config.ts +++ b/app/store/config.ts @@ -1,5 +1,4 @@ import { LLMModel } from "../client/api"; -import { isMacOS } from "../utils"; import { getClientConfig } from "../config/client"; import { DEFAULT_INPUT_TEMPLATE, @@ -25,6 +24,8 @@ export enum Theme { Light = "light", } +const config = getClientConfig(); + export const DEFAULT_CONFIG = { lastUpdate: Date.now(), // timestamp, to merge state @@ -32,7 +33,7 @@ export const DEFAULT_CONFIG = { avatar: "1f603", fontSize: 14, theme: Theme.Auto as Theme, - tightBorder: !!getClientConfig()?.isApp, + tightBorder: !!config?.isApp, sendPreviewBubble: true, enableAutoGenerateTitle: true, sidebarWidth: DEFAULT_SIDEBAR_WIDTH, @@ -56,7 +57,7 @@ export const DEFAULT_CONFIG = { historyMessageCount: 4, compressMessageLengthThreshold: 1000, enableInjectSystemPrompts: true, - template: DEFAULT_INPUT_TEMPLATE, + template: config?.template ?? DEFAULT_INPUT_TEMPLATE, }, }; @@ -132,7 +133,7 @@ export const useAppConfig = createPersistStore( }), { name: StoreKey.Config, - version: 3.8, + version: 3.9, migrate(persistedState, version) { const state = persistedState as ChatConfig; @@ -163,6 +164,13 @@ export const useAppConfig = createPersistStore( state.lastUpdate = Date.now(); } + if (version < 3.9) { + state.modelConfig.template = + state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE + ? state.modelConfig.template + : config?.template ?? DEFAULT_INPUT_TEMPLATE; + } + return state as any; }, }, diff --git a/app/utils.ts b/app/utils.ts index 64e3c5406d1..8f7adc7e2a2 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -83,48 +83,6 @@ export async function downloadAs(text: string, filename: string) { } } -export function compressImage(file: File, maxSize: number): Promise { - return new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.onload = (readerEvent: any) => { - const image = new Image(); - image.onload = () => { - let canvas = document.createElement("canvas"); - let ctx = canvas.getContext("2d"); - let width = image.width; - let height = image.height; - let quality = 0.9; - let dataUrl; - - do { - canvas.width = width; - canvas.height = height; - ctx?.clearRect(0, 0, canvas.width, canvas.height); - ctx?.drawImage(image, 0, 0, width, height); - dataUrl = canvas.toDataURL("image/jpeg", quality); - - if (dataUrl.length < maxSize) break; - - if (quality > 0.5) { - // Prioritize quality reduction - quality -= 0.1; - } else { - // Then reduce the size - width *= 0.9; - height *= 0.9; - } - } while (dataUrl.length > maxSize); - - resolve(dataUrl); - }; - image.onerror = reject; - image.src = readerEvent.target.result; - }; - reader.onerror = reject; - reader.readAsDataURL(file); - }); -} - export function readFromFile() { return new Promise((res, rej) => { const fileInput = document.createElement("input"); diff --git a/app/utils/chat.ts b/app/utils/chat.ts new file mode 100644 index 00000000000..991d06b7320 --- /dev/null +++ b/app/utils/chat.ts @@ -0,0 +1,54 @@ +import heic2any from "heic2any"; + +export function compressImage(file: File, maxSize: number): Promise { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onload = (readerEvent: any) => { + const image = new Image(); + image.onload = () => { + let canvas = document.createElement("canvas"); + let ctx = canvas.getContext("2d"); + let width = image.width; + let height = image.height; + let quality = 0.9; + let dataUrl; + + do { + canvas.width = width; + canvas.height = height; + ctx?.clearRect(0, 0, canvas.width, canvas.height); + ctx?.drawImage(image, 0, 0, width, height); + dataUrl = canvas.toDataURL("image/jpeg", quality); + + if (dataUrl.length < maxSize) break; + + if (quality > 0.5) { + // Prioritize quality reduction + quality -= 0.1; + } else { + // Then reduce the size + width *= 0.9; + height *= 0.9; + } + } while (dataUrl.length > maxSize); + + resolve(dataUrl); + }; + image.onerror = reject; + image.src = readerEvent.target.result; + }; + reader.onerror = reject; + + if (file.type.includes("heic")) { + heic2any({ blob: file, toType: "image/jpeg" }) + .then((blob) => { + reader.readAsDataURL(blob as Blob); + }) + .catch((e) => { + reject(e); + }); + } + + reader.readAsDataURL(file); + }); +} diff --git a/app/utils/cloud/upstash.ts b/app/utils/cloud/upstash.ts index bf6147bd467..8d84adbde3d 100644 --- a/app/utils/cloud/upstash.ts +++ b/app/utils/cloud/upstash.ts @@ -93,14 +93,17 @@ export function createUpstashClient(store: SyncStore) { } let url; - if (proxyUrl.length > 0 || proxyUrl === "/") { - let u = new URL(proxyUrl + "/api/upstash/" + path); + const pathPrefix = "/api/upstash/"; + + try { + let u = new URL(proxyUrl + pathPrefix + path); // add query params u.searchParams.append("endpoint", config.endpoint); url = u.toString(); - } else { - url = "/api/upstash/" + path + "?endpoint=" + config.endpoint; + } catch (e) { + url = pathPrefix + path + "?endpoint=" + config.endpoint; } + return url; }, }; diff --git a/package.json b/package.json index e64730eac0d..4d06b0b14e4 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "@vercel/speed-insights": "^1.0.2", "emoji-picker-react": "^4.9.2", "fuse.js": "^7.0.0", + "heic2any": "^0.0.4", "html-to-image": "^1.11.11", "mermaid": "^10.6.1", "nanoid": "^5.0.3", diff --git a/yarn.lock b/yarn.lock index 09270d14fb0..72df8cafc54 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3669,6 +3669,11 @@ heap@^0.2.6: resolved "https://registry.npmmirror.com/heap/-/heap-0.2.7.tgz#1e6adf711d3f27ce35a81fe3b7bd576c2260a8fc" integrity sha512-2bsegYkkHO+h/9MGbn6KWcE45cHZgPANo5LXF7EvWdT0yT2EguSVO1nDgU5c8+ZOPwp2vMNa7YFsJhVcDR9Sdg== +heic2any@^0.0.4: + version "0.0.4" + resolved "https://registry.npmmirror.com/heic2any/-/heic2any-0.0.4.tgz#eddb8e6fec53c8583a6e18b65069bb5e8d19028a" + integrity sha512-3lLnZiDELfabVH87htnRolZ2iehX9zwpRyGNz22GKXIu0fznlblf0/ftppXKNqS26dqFSeqfIBhAmAj/uSp0cA== + highlight.js@~11.7.0: version "11.7.0" resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-11.7.0.tgz#3ff0165bc843f8c9bce1fd89e2fda9143d24b11e"