diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx index 2084cbb37..5db66537c 100644 --- a/app/components/chat/BaseChat.tsx +++ b/app/components/chat/BaseChat.tsx @@ -119,6 +119,9 @@ export const BaseChat = React.forwardRef( useEffect(() => { // Load API keys from cookies on component mount + + let parsedApiKeys: Record | undefined = {}; + try { const storedApiKeys = Cookies.get('apiKeys'); @@ -127,6 +130,7 @@ export const BaseChat = React.forwardRef( if (typeof parsedKeys === 'object' && parsedKeys !== null) { setApiKeys(parsedKeys); + parsedApiKeys = parsedKeys; } } } catch (error) { @@ -155,7 +159,7 @@ export const BaseChat = React.forwardRef( Cookies.remove('providers'); } - initializeModelList(providerSettings).then((modelList) => { + initializeModelList({ apiKeys: parsedApiKeys, providerSettings }).then((modelList) => { setModelList(modelList); }); diff --git a/app/components/settings/debug/DebugTab.tsx b/app/components/settings/debug/DebugTab.tsx index cf2341b55..b0cde7dd7 100644 --- a/app/components/settings/debug/DebugTab.tsx +++ b/app/components/settings/debug/DebugTab.tsx @@ -2,6 +2,7 @@ import React, { useCallback, useEffect, useState } from 'react'; import { useSettings } from '~/lib/hooks/useSettings'; import commit from '~/commit.json'; import { toast } from 'react-toastify'; +import { providerBaseUrlEnvKeys } from '~/utils/constants'; interface ProviderStatus { name: string; @@ -236,7 +237,7 @@ const checkProviderStatus = async (url: string | null, providerName: string): Pr } // Try different endpoints based on provider - const checkUrls = [`${url}/api/health`, `${url}/v1/models`]; + const checkUrls = [`${url}/api/health`, url.endsWith('v1') ? `${url}/models` : `${url}/v1/models`]; console.log(`[Debug] Checking additional endpoints:`, checkUrls); const results = await Promise.all( @@ -321,14 +322,16 @@ export default function DebugTab() { .filter(([, provider]) => LOCAL_PROVIDERS.includes(provider.name)) .map(async ([, provider]) => { const envVarName = - provider.name.toLowerCase() === 'ollama' - ? 'OLLAMA_API_BASE_URL' - : provider.name.toLowerCase() === 'lmstudio' - ? 'LMSTUDIO_API_BASE_URL' - : `REACT_APP_${provider.name.toUpperCase()}_URL`; + providerBaseUrlEnvKeys[provider.name].baseUrlKey || `REACT_APP_${provider.name.toUpperCase()}_URL`; // Access environment variables through import.meta.env - const url = import.meta.env[envVarName] || provider.settings.baseUrl || null; // Ensure baseUrl is used + let settingsUrl = provider.settings.baseUrl; + + if (settingsUrl && settingsUrl.trim().length === 0) { + settingsUrl = undefined; + } + + const url = settingsUrl || import.meta.env[envVarName] || null; // Ensure baseUrl is used console.log(`[Debug] Using URL for ${provider.name}:`, url, `(from ${envVarName})`); const status = await checkProviderStatus(url, provider.name); diff --git a/app/components/settings/providers/ProvidersTab.tsx b/app/components/settings/providers/ProvidersTab.tsx index 281b4c80d..20e66efea 100644 --- a/app/components/settings/providers/ProvidersTab.tsx +++ b/app/components/settings/providers/ProvidersTab.tsx @@ -7,6 +7,7 @@ import { logStore } from '~/lib/stores/logs'; // Import a default fallback icon import DefaultIcon from '/icons/Default.svg'; // Adjust the path as necessary +import { providerBaseUrlEnvKeys } from '~/utils/constants'; export default function ProvidersTab() { const { providers, updateProviderSettings, isLocalModel } = useSettings(); @@ -47,60 +48,77 @@ export default function ProvidersTab() { className="w-full bg-white dark:bg-bolt-elements-background-depth-4 relative px-2 py-1.5 rounded-md focus:outline-none placeholder-bolt-elements-textTertiary text-bolt-elements-textPrimary dark:text-bolt-elements-textPrimary border border-bolt-elements-borderColor" /> - {filteredProviders.map((provider) => ( -
-
-
- { - // Fallback to default icon on error - e.currentTarget.src = DefaultIcon; + {filteredProviders.map((provider) => { + const envBaseUrlKey = providerBaseUrlEnvKeys[provider.name].baseUrlKey; + const envBaseUrl = envBaseUrlKey ? import.meta.env[envBaseUrlKey] : undefined; + + return ( +
+
+
+ { + // Fallback to default icon on error + e.currentTarget.src = DefaultIcon; + }} + alt={`${provider.name} icon`} + className="w-6 h-6 dark:invert" + /> + {provider.name} +
+ { + updateProviderSettings(provider.name, { ...provider.settings, enabled }); + + if (enabled) { + logStore.logProvider(`Provider ${provider.name} enabled`, { provider: provider.name }); + } else { + logStore.logProvider(`Provider ${provider.name} disabled`, { provider: provider.name }); + } }} - alt={`${provider.name} icon`} - className="w-6 h-6 dark:invert" /> - {provider.name}
- { - updateProviderSettings(provider.name, { ...provider.settings, enabled }); + {/* Base URL input for configurable providers */} + {URL_CONFIGURABLE_PROVIDERS.includes(provider.name) && provider.settings.enabled && ( +
+ {envBaseUrl && ( + + )} + + { + let newBaseUrl: string | undefined = e.target.value; + + if (newBaseUrl && newBaseUrl.trim().length === 0) { + newBaseUrl = undefined; + } - if (enabled) { - logStore.logProvider(`Provider ${provider.name} enabled`, { provider: provider.name }); - } else { - logStore.logProvider(`Provider ${provider.name} disabled`, { provider: provider.name }); - } - }} - /> + updateProviderSettings(provider.name, { ...provider.settings, baseUrl: newBaseUrl }); + logStore.logProvider(`Base URL updated for ${provider.name}`, { + provider: provider.name, + baseUrl: newBaseUrl, + }); + }} + placeholder={`Enter ${provider.name} base URL`} + className="w-full bg-white dark:bg-bolt-elements-background-depth-4 relative px-2 py-1.5 rounded-md focus:outline-none placeholder-bolt-elements-textTertiary text-bolt-elements-textPrimary dark:text-bolt-elements-textPrimary border border-bolt-elements-borderColor" + /> +
+ )}
- {/* Base URL input for configurable providers */} - {URL_CONFIGURABLE_PROVIDERS.includes(provider.name) && provider.settings.enabled && ( -
- - { - const newBaseUrl = e.target.value; - updateProviderSettings(provider.name, { ...provider.settings, baseUrl: newBaseUrl }); - logStore.logProvider(`Base URL updated for ${provider.name}`, { - provider: provider.name, - baseUrl: newBaseUrl, - }); - }} - placeholder={`Enter ${provider.name} base URL`} - className="w-full bg-white dark:bg-bolt-elements-background-depth-4 relative px-2 py-1.5 rounded-md focus:outline-none placeholder-bolt-elements-textTertiary text-bolt-elements-textPrimary dark:text-bolt-elements-textPrimary border border-bolt-elements-borderColor" - /> -
- )} -
- ))} + ); + })}
); } diff --git a/app/entry.server.tsx b/app/entry.server.tsx index a44917f02..5e92d21ec 100644 --- a/app/entry.server.tsx +++ b/app/entry.server.tsx @@ -14,7 +14,7 @@ export default async function handleRequest( remixContext: EntryContext, _loadContext: AppLoadContext, ) { - await initializeModelList(); + await initializeModelList({}); const readable = await renderToReadableStream(, { signal: request.signal, diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index e82d08eb8..83b46462f 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -3,6 +3,8 @@ * Preventing TS checks with files presented in the video for a better presentation. */ import { env } from 'node:process'; +import type { IProviderSetting } from '~/types/model'; +import { getProviderBaseUrlAndKey } from '~/utils/constants'; export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record) { /** @@ -15,7 +17,20 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re return userApiKeys[provider]; } - // Fall back to environment variables + const { apiKey } = getProviderBaseUrlAndKey({ + provider, + apiKeys: userApiKeys, + providerSettings: undefined, + serverEnv: cloudflareEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: '', + }); + + if (apiKey) { + return apiKey; + } + + // Fall back to hardcoded environment variables names switch (provider) { case 'Anthropic': return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY; @@ -50,16 +65,43 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re } } -export function getBaseURL(cloudflareEnv: Env, provider: string) { +export function getBaseURL(cloudflareEnv: Env, provider: string, providerSettings?: Record) { + const { baseUrl } = getProviderBaseUrlAndKey({ + provider, + apiKeys: {}, + providerSettings, + serverEnv: cloudflareEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: '', + }); + + if (baseUrl) { + return baseUrl; + } + + let settingBaseUrl = providerSettings?.[provider].baseUrl; + + if (settingBaseUrl && settingBaseUrl.length == 0) { + settingBaseUrl = undefined; + } + switch (provider) { case 'Together': - return env.TOGETHER_API_BASE_URL || cloudflareEnv.TOGETHER_API_BASE_URL || 'https://api.together.xyz/v1'; + return ( + settingBaseUrl || + env.TOGETHER_API_BASE_URL || + cloudflareEnv.TOGETHER_API_BASE_URL || + 'https://api.together.xyz/v1' + ); case 'OpenAILike': - return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; + return settingBaseUrl || env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; case 'LMStudio': - return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'; + return ( + settingBaseUrl || env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234' + ); case 'Ollama': { - let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434'; + let baseUrl = + settingBaseUrl || env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434'; if (env.RUNNING_IN_DOCKER === 'true') { baseUrl = baseUrl.replace('localhost', 'host.docker.internal'); diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 1a5aab7eb..308e27d45 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -140,7 +140,7 @@ export function getPerplexityModel(apiKey: OptionalApiKey, model: string) { export function getModel( provider: string, model: string, - env: Env, + serverEnv: Env, apiKeys?: Record, providerSettings?: Record, ) { @@ -148,9 +148,12 @@ export function getModel( * let apiKey; // Declare first * let baseURL; */ + // console.log({provider,model}); - const apiKey = getAPIKey(env, provider, apiKeys); // Then assign - const baseURL = providerSettings?.[provider].baseUrl || getBaseURL(env, provider); + const apiKey = getAPIKey(serverEnv, provider, apiKeys); // Then assign + const baseURL = getBaseURL(serverEnv, provider, providerSettings); + + // console.log({apiKey,baseURL}); switch (provider) { case 'Anthropic': diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts index 74cdd9d4f..6bbf56851 100644 --- a/app/lib/.server/llm/stream-text.ts +++ b/app/lib/.server/llm/stream-text.ts @@ -151,10 +151,13 @@ export async function streamText(props: { providerSettings?: Record; promptId?: string; }) { - const { messages, env, options, apiKeys, files, providerSettings, promptId } = props; + const { messages, env: serverEnv, options, apiKeys, files, providerSettings, promptId } = props; + + // console.log({serverEnv}); + let currentModel = DEFAULT_MODEL; let currentProvider = DEFAULT_PROVIDER.name; - const MODEL_LIST = await getModelList(apiKeys || {}, providerSettings); + const MODEL_LIST = await getModelList({ apiKeys, providerSettings, serverEnv: serverEnv as any }); const processedMessages = messages.map((message) => { if (message.role === 'user') { const { model, provider, content } = extractPropertiesFromMessage(message); @@ -196,7 +199,7 @@ export async function streamText(props: { } return _streamText({ - model: getModel(currentProvider, currentModel, env, apiKeys, providerSettings) as any, + model: getModel(currentProvider, currentModel, serverEnv, apiKeys, providerSettings) as any, system: systemPrompt, maxTokens: dynamicMaxTokens, messages: convertToCoreMessages(processedMessages as any), diff --git a/app/lib/hooks/useEditChatDescription.ts b/app/lib/hooks/useEditChatDescription.ts index 5230d6cf9..25147a020 100644 --- a/app/lib/hooks/useEditChatDescription.ts +++ b/app/lib/hooks/useEditChatDescription.ts @@ -92,6 +92,7 @@ export function useEditChatDescription({ } const lengthValid = trimmedDesc.length > 0 && trimmedDesc.length <= 100; + // Allow letters, numbers, spaces, and common punctuation but exclude characters that could cause issues const characterValid = /^[a-zA-Z0-9\s\-_.,!?()[\]{}'"]+$/.test(trimmedDesc); diff --git a/app/types/model.ts b/app/types/model.ts index 3bfbfde92..b449363b3 100644 --- a/app/types/model.ts +++ b/app/types/model.ts @@ -3,7 +3,12 @@ import type { ModelInfo } from '~/utils/types'; export type ProviderInfo = { staticModels: ModelInfo[]; name: string; - getDynamicModels?: (apiKeys?: Record, providerSettings?: IProviderSetting) => Promise; + getDynamicModels?: ( + providerName: string, + apiKeys?: Record, + providerSettings?: IProviderSetting, + serverEnv?: Record, + ) => Promise; getApiKeyLink?: string; labelForGetApiKey?: string; icon?: string; diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 642599541..b80b3c8fe 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -220,7 +220,6 @@ const PROVIDER_LIST: ProviderInfo[] = [ ], getApiKeyLink: 'https://huggingface.co/settings/tokens', }, - { name: 'OpenAI', staticModels: [ @@ -319,44 +318,130 @@ const PROVIDER_LIST: ProviderInfo[] = [ }, ]; +export const providerBaseUrlEnvKeys: Record = { + Anthropic: { + apiTokenKey: 'ANTHROPIC_API_KEY', + }, + OpenAI: { + apiTokenKey: 'OPENAI_API_KEY', + }, + Groq: { + apiTokenKey: 'GROQ_API_KEY', + }, + HuggingFace: { + apiTokenKey: 'HuggingFace_API_KEY', + }, + OpenRouter: { + apiTokenKey: 'OPEN_ROUTER_API_KEY', + }, + Google: { + apiTokenKey: 'GOOGLE_GENERATIVE_AI_API_KEY', + }, + OpenAILike: { + baseUrlKey: 'OPENAI_LIKE_API_BASE_URL', + apiTokenKey: 'OPENAI_LIKE_API_KEY', + }, + Together: { + baseUrlKey: 'TOGETHER_API_BASE_URL', + apiTokenKey: 'TOGETHER_API_KEY', + }, + Deepseek: { + apiTokenKey: 'DEEPSEEK_API_KEY', + }, + Mistral: { + apiTokenKey: 'MISTRAL_API_KEY', + }, + LMStudio: { + baseUrlKey: 'LMSTUDIO_API_BASE_URL', + }, + xAI: { + apiTokenKey: 'XAI_API_KEY', + }, + Cohere: { + apiTokenKey: 'COHERE_API_KEY', + }, + Perplexity: { + apiTokenKey: 'PERPLEXITY_API_KEY', + }, + Ollama: { + baseUrlKey: 'OLLAMA_API_BASE_URL', + }, +}; + +export const getProviderBaseUrlAndKey = (options: { + provider: string; + apiKeys?: Record; + providerSettings?: IProviderSetting; + serverEnv?: Record; + defaultBaseUrlKey: string; + defaultApiTokenKey: string; +}) => { + const { provider, apiKeys, providerSettings, serverEnv, defaultBaseUrlKey, defaultApiTokenKey } = options; + let settingsBaseUrl = providerSettings?.baseUrl; + + if (settingsBaseUrl && settingsBaseUrl.length == 0) { + settingsBaseUrl = undefined; + } + + const baseUrlKey = providerBaseUrlEnvKeys[provider]?.baseUrlKey || defaultBaseUrlKey; + const baseUrl = settingsBaseUrl || serverEnv?.[baseUrlKey] || process.env[baseUrlKey] || import.meta.env[baseUrlKey]; + + const apiTokenKey = providerBaseUrlEnvKeys[provider]?.apiTokenKey || defaultApiTokenKey; + const apiKey = + apiKeys?.[provider] || serverEnv?.[apiTokenKey] || process.env[apiTokenKey] || import.meta.env[apiTokenKey]; + + return { + baseUrl, + apiKey, + }; +}; export const DEFAULT_PROVIDER = PROVIDER_LIST[0]; const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat(); export let MODEL_LIST: ModelInfo[] = [...staticModels]; -export async function getModelList( - apiKeys: Record, - providerSettings?: Record, -) { +export async function getModelList(options: { + apiKeys?: Record; + providerSettings?: Record; + serverEnv?: Record; +}) { + const { apiKeys, providerSettings, serverEnv } = options; + MODEL_LIST = [ ...( await Promise.all( PROVIDER_LIST.filter( (p): p is ProviderInfo & { getDynamicModels: () => Promise } => !!p.getDynamicModels, - ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])), + ).map((p) => p.getDynamicModels(p.name, apiKeys, providerSettings?.[p.name], serverEnv)), ) ).flat(), ...staticModels, ]; + return MODEL_LIST; } -async function getTogetherModels(apiKeys?: Record, settings?: IProviderSetting): Promise { +async function getTogetherModels( + name: string, + apiKeys?: Record, + settings?: IProviderSetting, + serverEnv: Record = {}, +): Promise { try { - const baseUrl = settings?.baseUrl || import.meta.env.TOGETHER_API_BASE_URL || ''; - const provider = 'Together'; + const { baseUrl, apiKey } = getProviderBaseUrlAndKey({ + provider: name, + apiKeys, + providerSettings: settings, + serverEnv, + defaultBaseUrlKey: 'TOGETHER_API_BASE_URL', + defaultApiTokenKey: 'TOGETHER_API_KEY', + }); if (!baseUrl) { return []; } - let apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? ''; - - if (apiKeys && apiKeys[provider]) { - apiKey = apiKeys[provider]; - } - if (!apiKey) { return []; } @@ -374,7 +459,7 @@ async function getTogetherModels(apiKeys?: Record, settings?: IP label: `${m.display_name} - in:$${m.pricing.input.toFixed( 2, )} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`, - provider, + provider: name, maxTokenAllowed: 8000, })); } catch (e) { @@ -383,24 +468,40 @@ async function getTogetherModels(apiKeys?: Record, settings?: IP } } -const getOllamaBaseUrl = (settings?: IProviderSetting) => { - const defaultBaseUrl = settings?.baseUrl || import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434'; +const getOllamaBaseUrl = (name: string, settings?: IProviderSetting, serverEnv: Record = {}) => { + const { baseUrl } = getProviderBaseUrlAndKey({ + provider: name, + providerSettings: settings, + serverEnv, + defaultBaseUrlKey: 'OLLAMA_API_BASE_URL', + defaultApiTokenKey: '', + }); // Check if we're in the browser if (typeof window !== 'undefined') { // Frontend always uses localhost - return defaultBaseUrl; + return baseUrl; } // Backend: Check if we're running in Docker const isDocker = process.env.RUNNING_IN_DOCKER === 'true'; - return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl; + return isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl; }; -async function getOllamaModels(apiKeys?: Record, settings?: IProviderSetting): Promise { +async function getOllamaModels( + name: string, + _apiKeys?: Record, + settings?: IProviderSetting, + serverEnv: Record = {}, +): Promise { try { - const baseUrl = getOllamaBaseUrl(settings); + const baseUrl = getOllamaBaseUrl(name, settings, serverEnv); + + if (!baseUrl) { + return []; + } + const response = await fetch(`${baseUrl}/api/tags`); const data = (await response.json()) as OllamaApiResponse; @@ -419,22 +520,25 @@ async function getOllamaModels(apiKeys?: Record, settings?: IPro } async function getOpenAILikeModels( + name: string, apiKeys?: Record, settings?: IProviderSetting, + serverEnv: Record = {}, ): Promise { try { - const baseUrl = settings?.baseUrl || import.meta.env.OPENAI_LIKE_API_BASE_URL || ''; + const { baseUrl, apiKey } = getProviderBaseUrlAndKey({ + provider: name, + apiKeys, + providerSettings: settings, + serverEnv, + defaultBaseUrlKey: 'OPENAI_LIKE_API_BASE_URL', + defaultApiTokenKey: 'OPENAI_LIKE_API_KEY', + }); if (!baseUrl) { return []; } - let apiKey = ''; - - if (apiKeys && apiKeys.OpenAILike) { - apiKey = apiKeys.OpenAILike; - } - const response = await fetch(`${baseUrl}/models`, { headers: { Authorization: `Bearer ${apiKey}`, @@ -445,7 +549,7 @@ async function getOpenAILikeModels( return res.data.map((model: any) => ({ name: model.id, label: model.id, - provider: 'OpenAILike', + provider: name, })); } catch (e) { console.error('Error getting OpenAILike models:', e); @@ -486,9 +590,26 @@ async function getOpenRouterModels(): Promise { })); } -async function getLMStudioModels(_apiKeys?: Record, settings?: IProviderSetting): Promise { +async function getLMStudioModels( + name: string, + apiKeys?: Record, + settings?: IProviderSetting, + serverEnv: Record = {}, +): Promise { try { - const baseUrl = settings?.baseUrl || import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'; + const { baseUrl } = getProviderBaseUrlAndKey({ + provider: name, + apiKeys, + providerSettings: settings, + serverEnv, + defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL', + defaultApiTokenKey: '', + }); + + if (!baseUrl) { + return []; + } + const response = await fetch(`${baseUrl}/v1/models`); const data = (await response.json()) as any; @@ -503,29 +624,37 @@ async function getLMStudioModels(_apiKeys?: Record, settings?: I } } -async function initializeModelList(providerSettings?: Record): Promise { - let apiKeys: Record = {}; +async function initializeModelList(options: { + env?: Record; + providerSettings?: Record; + apiKeys?: Record; +}): Promise { + const { providerSettings, apiKeys: providedApiKeys, env } = options; + let apiKeys: Record = providedApiKeys || {}; - try { - const storedApiKeys = Cookies.get('apiKeys'); + if (!providedApiKeys) { + try { + const storedApiKeys = Cookies.get('apiKeys'); - if (storedApiKeys) { - const parsedKeys = JSON.parse(storedApiKeys); + if (storedApiKeys) { + const parsedKeys = JSON.parse(storedApiKeys); - if (typeof parsedKeys === 'object' && parsedKeys !== null) { - apiKeys = parsedKeys; + if (typeof parsedKeys === 'object' && parsedKeys !== null) { + apiKeys = parsedKeys; + } } + } catch (error: any) { + logStore.logError('Failed to fetch API keys from cookies', error); + logger.warn(`Failed to fetch apikeys from cookies: ${error?.message}`); } - } catch (error: any) { - logStore.logError('Failed to fetch API keys from cookies', error); - logger.warn(`Failed to fetch apikeys from cookies: ${error?.message}`); } + MODEL_LIST = [ ...( await Promise.all( PROVIDER_LIST.filter( (p): p is ProviderInfo & { getDynamicModels: () => Promise } => !!p.getDynamicModels, - ).map((p) => p.getDynamicModels(apiKeys, providerSettings?.[p.name])), + ).map((p) => p.getDynamicModels(p.name, apiKeys, providerSettings?.[p.name], env)), ) ).flat(), ...staticModels, @@ -534,6 +663,7 @@ async function initializeModelList(providerSettings?: Record { chrome129IssuePlugin(), config.mode === 'production' && optimizeCssModules({ apply: 'build' }), ], - envPrefix: ["VITE_", "OPENAI_LIKE_API_", "OLLAMA_API_BASE_URL", "LMSTUDIO_API_BASE_URL","TOGETHER_API_BASE_URL"], + envPrefix: ["VITE_","OPENAI_LIKE_API_BASE_URL", "OLLAMA_API_BASE_URL", "LMSTUDIO_API_BASE_URL","TOGETHER_API_BASE_URL"], css: { preprocessorOptions: { scss: {