diff --git a/.env.example b/.env.example index 7306f3617..b76965aa0 100644 --- a/.env.example +++ b/.env.example @@ -16,6 +16,14 @@ HuggingFace_API_KEY= # You only need this environment variable set if you want to use GPT models OPENAI_API_KEY= +# Get your Azure Open AI API Key by following these instructions - +# https://docs.microsoft.com/en-us/azure/cognitive-services/openai/quickstarts +# You only need this environment variable set if you want to use Azure OpenAI models +AZURE_OPENAI_API_KEY= +AZURE_OPENAI_API_BASE_URL= +AZURE_OPENAI_RESOURCE_NAME= +AZURE_OPENAI_API_VERSION= + # Get your Anthropic API Key in your account settings - # https://console.anthropic.com/settings/keys # You only need this environment variable set if you want to use Claude models @@ -74,7 +82,7 @@ XAI_API_KEY= VITE_LOG_LEVEL=debug # Example Context Values for qwen2.5-coder:32b -# +# # DEFAULT_NUM_CTX=32768 # Consumes 36GB of VRAM # DEFAULT_NUM_CTX=24576 # Consumes 32GB of VRAM # DEFAULT_NUM_CTX=12288 # Consumes 26GB of VRAM diff --git a/Dockerfile b/Dockerfile index 06541d303..a69590e54 100644 --- a/Dockerfile +++ b/Dockerfile @@ -41,7 +41,11 @@ ENV WRANGLER_SEND_METRICS=false \ TOGETHER_API_KEY=${TOGETHER_API_KEY} \ TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \ VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} + DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} \ + AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY} \ + AZURE_OPENAI_API_BASE_URL=${AZURE_OPENAI_API_BASE_URL} \ + AZURE_OPENAI_RESOURCE_NAME=${AZURE_OPENAI_RESOURCE_NAME} \ + AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION} # Pre-configure wrangler to disable metrics RUN mkdir -p /root/.config/.wrangler && \ @@ -56,8 +60,12 @@ FROM base AS bolt-ai-development # Define the same environment variables for development ARG GROQ_API_KEY -ARG HuggingFace +ARG HuggingFace ARG OPENAI_API_KEY +ARG AZURE_OPENAI_API_KEY +ARG AZURE_OPENAI_API_BASE_URL +ARG AZURE_OPENAI_RESOURCE_NAME +ARG AZURE_OPENAI_API_VERSION ARG ANTHROPIC_API_KEY ARG OPEN_ROUTER_API_KEY ARG GOOGLE_GENERATIVE_AI_API_KEY @@ -77,7 +85,11 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \ TOGETHER_API_KEY=${TOGETHER_API_KEY} \ TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \ VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \ - DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} + DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX} \ + AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY} \ + AZURE_OPENAI_API_BASE_URL=${AZURE_OPENAI_API_BASE_URL} \ + AZURE_OPENAI_RESOURCE_NAME=${AZURE_OPENAI_RESOURCE_NAME} \ + AZURE_OPENAI_API_VERSION=${AZURE_OPENAI_API_VERSION} RUN mkdir -p ${WORKDIR}/run CMD pnpm run dev --host diff --git a/app/components/chat/BaseChat.module.scss b/app/components/chat/BaseChat.module.scss index 4908e34e0..77bfa74ee 100644 --- a/app/components/chat/BaseChat.module.scss +++ b/app/components/chat/BaseChat.module.scss @@ -12,6 +12,14 @@ opacity: 0; } } + + &[data-chat-started='true'] { + --textarea-max-height: 400px; + } + + &[data-chat-started='false'] { + --textarea-max-height: 200px; + } } .Chat { @@ -31,9 +39,10 @@ .PromptEffectLine { width: calc(100% - var(--prompt-container-offset) + var(--prompt-line-stroke-width)); height: calc(100% - var(--prompt-container-offset) + var(--prompt-line-stroke-width)); - x: calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2); - y: calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2); - rx: calc(8px - var(--prompt-line-stroke-width)); + transform: translate( + calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2), + calc(var(--prompt-container-offset) / 2 - var(--prompt-line-stroke-width) / 2) + ); fill: transparent; stroke-width: var(--prompt-line-stroke-width); stroke: url(#line-gradient); @@ -45,3 +54,16 @@ fill: url(#shine-gradient); mix-blend-mode: overlay; } + +.chatTextarea { + min-height: 76px; + max-height: var(--textarea-max-height, 400px); + + &[data-drag-active="true"] { + border: 2px solid #1488fc; + } + + &[data-drag-active="false"] { + border: 1px solid var(--bolt-elements-borderColor); + } +} diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx index a77932c74..253ddb874 100644 --- a/app/components/chat/BaseChat.tsx +++ b/app/components/chat/BaseChat.tsx @@ -27,8 +27,6 @@ import { ModelSelector } from '~/components/chat/ModelSelector'; import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition'; import type { IProviderSetting, ProviderInfo } from '~/types/model'; -const TEXTAREA_MIN_HEIGHT = 76; - interface BaseChatProps { textareaRef?: React.RefObject | undefined; messageRef?: RefCallback | undefined; @@ -89,7 +87,6 @@ export const BaseChat = React.forwardRef( }, ref, ) => { - const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200; const [apiKeys, setApiKeys] = useState>(() => { const savedKeys = Cookies.get('apiKeys'); @@ -278,6 +275,7 @@ export const BaseChat = React.forwardRef( ref={ref} className={classNames(styles.BaseChat, 'relative flex h-full w-full overflow-hidden')} data-chat-visible={showChat} + data-chat-started={chatStarted} > {() => }
@@ -340,7 +338,12 @@ export const BaseChat = React.forwardRef( - +
@@ -387,22 +390,23 @@ export const BaseChat = React.forwardRef( 'w-full pl-4 pt-4 pr-16 focus:outline-none resize-none text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary bg-transparent text-sm', 'transition-all duration-200', 'hover:border-bolt-elements-focus', + styles.chatTextarea, )} onDragEnter={(e) => { e.preventDefault(); - e.currentTarget.style.border = '2px solid #1488fc'; + e.currentTarget.dataset.dragActive = 'true'; }} onDragOver={(e) => { e.preventDefault(); - e.currentTarget.style.border = '2px solid #1488fc'; + e.currentTarget.dataset.dragActive = 'true'; }} onDragLeave={(e) => { e.preventDefault(); - e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)'; + e.currentTarget.dataset.dragActive = 'false'; }} onDrop={(e) => { e.preventDefault(); - e.currentTarget.style.border = '1px solid var(--bolt-elements-borderColor)'; + e.currentTarget.dataset.dragActive = 'false'; const files = Array.from(e.dataTransfer.files); files.forEach((file) => { @@ -418,6 +422,7 @@ export const BaseChat = React.forwardRef( } }); }} + data-drag-active="false" onKeyDown={(event) => { if (event.key === 'Enter') { if (event.shiftKey) { @@ -439,10 +444,6 @@ export const BaseChat = React.forwardRef( handleInputChange?.(event); }} onPaste={handlePaste} - style={{ - minHeight: TEXTAREA_MIN_HEIGHT, - maxHeight: TEXTAREA_MAX_HEIGHT, - }} placeholder="How can Bolt help you today?" translate="no" /> diff --git a/app/components/chat/Chat.client.tsx b/app/components/chat/Chat.client.tsx index cd651cb12..a0c3e5859 100644 --- a/app/components/chat/Chat.client.tsx +++ b/app/components/chat/Chat.client.tsx @@ -48,7 +48,12 @@ export function Chat() { { return ( - ); diff --git a/app/components/chat/ModelSelector.tsx b/app/components/chat/ModelSelector.tsx index bd41eb4d4..421d66726 100644 --- a/app/components/chat/ModelSelector.tsx +++ b/app/components/chat/ModelSelector.tsx @@ -1,7 +1,7 @@ import type { ProviderInfo } from '~/types/model'; import type { ModelInfo } from '~/utils/types'; -import { useEffect, useState } from 'react'; -import Cookies from 'js-cookie'; +import { useEffect } from 'react'; +import { useSettings } from '~/lib/hooks/useSettings'; interface ModelSelectorProps { model?: string; @@ -19,65 +19,29 @@ export const ModelSelector = ({ provider, setProvider, modelList, + // eslint-disable-next-line @typescript-eslint/no-unused-vars providerList, }: ModelSelectorProps) => { - // Load enabled providers from cookies - const [enabledProviders, setEnabledProviders] = useState(() => { - const savedProviders = Cookies.get('providers'); + const { activeProviders } = useSettings(); - if (savedProviders) { - try { - const parsedProviders = JSON.parse(savedProviders); - return providerList.filter((p) => parsedProviders[p.name]); - } catch (error) { - console.error('Failed to parse providers from cookies:', error); - return providerList; - } - } - - return providerList; - }); - - // Update enabled providers when cookies change useEffect(() => { - // Function to update providers from cookies - const updateProvidersFromCookies = () => { - const savedProviders = Cookies.get('providers'); - - if (savedProviders) { - try { - const parsedProviders = JSON.parse(savedProviders); - const newEnabledProviders = providerList.filter((p) => parsedProviders[p.name]); - setEnabledProviders(newEnabledProviders); - - // If current provider is disabled, switch to first enabled provider - if (provider && !parsedProviders[provider.name] && newEnabledProviders.length > 0) { - const firstEnabledProvider = newEnabledProviders[0]; - setProvider?.(firstEnabledProvider); + // If current provider is disabled or not in active providers, switch to first active provider + if ((provider && !activeProviders.find((p) => p.name === provider.name)) || !provider) { + if (activeProviders.length > 0) { + const firstEnabledProvider = activeProviders[0]; + setProvider?.(firstEnabledProvider); - // Also update the model to the first available one for the new provider - const firstModel = modelList.find((m) => m.provider === firstEnabledProvider.name); + // Also update the model to the first available one for the new provider + const firstModel = modelList.find((m) => m.provider === firstEnabledProvider.name); - if (firstModel) { - setModel?.(firstModel.name); - } - } - } catch (error) { - console.error('Failed to parse providers from cookies:', error); + if (firstModel) { + setModel?.(firstModel.name); } } - }; - - // Initial update - updateProvidersFromCookies(); - - // Set up an interval to check for cookie changes - const interval = setInterval(updateProvidersFromCookies, 1000); - - return () => clearInterval(interval); - }, [providerList, provider, setProvider, modelList, setModel]); + } + }, [activeProviders, provider, setProvider, modelList, setModel]); - if (enabledProviders.length === 0) { + if (activeProviders.length === 0) { return (

@@ -91,15 +55,16 @@ export const ModelSelector = ({ return (