Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix: Error while selecting providers and models. Anthropic was always the default provider even after disabling it. #693

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions app/components/chat/AudioLevelIndicator.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import React from 'react';
import { classNames } from '~/utils/classNames';

interface AudioLevelIndicatorProps {
level: number; // 0 to 1
size?: 'sm' | 'md' | 'lg';
className?: string;
}

export const AudioLevelIndicator: React.FC<AudioLevelIndicatorProps> = ({ level, size = 'md', className }) => {
const bars = 5;
const activeBarCount = Math.floor(level * bars);

const sizeClasses = {
sm: 'h-2 gap-0.5',
md: 'h-3 gap-1',
lg: 'h-4 gap-1',
};

const barSizeClasses = {
sm: 'w-0.5',
md: 'w-1',
lg: 'w-1.5',
};

// Fixed height classes for each bar
const barHeights = [
'h-[20%]', // Bar 1
'h-[40%]', // Bar 2
'h-[60%]', // Bar 3
'h-[80%]', // Bar 4
'h-[100%]', // Bar 5
];

return (
<div className={classNames('flex items-end transition-all duration-100', sizeClasses[size], className)}>
{Array.from({ length: bars }).map((_, i) => (
<div
key={i}
className={classNames(
'transition-all duration-100',
barSizeClasses[size],
barHeights[i],
i < activeBarCount ? 'bg-bolt-elements-item-contentAccent' : 'bg-gray-300 dark:bg-gray-600',
)}
/>
))}
</div>
);
};
142 changes: 42 additions & 100 deletions app/components/chat/BaseChat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
* Preventing TS checks with files presented in the video for a better presentation.
*/
import type { Message } from 'ai';
import React, { type RefCallback, useEffect, useState } from 'react';
import React, { type RefCallback, useEffect, useState, useCallback } from 'react';
import { ClientOnly } from 'remix-utils/client-only';
import { Menu } from '~/components/sidebar/Menu.client';
import { IconButton } from '~/components/ui/IconButton';
Expand All @@ -15,6 +15,8 @@ import { SendButton } from './SendButton.client';
import { APIKeyManager } from './APIKeyManager';
import Cookies from 'js-cookie';
import * as Tooltip from '@radix-ui/react-tooltip';
import { useSpeechRecognition, SUPPORTED_LANGUAGES } from './hooks/useSpeechRecognition';
import { toast } from 'sonner';

import styles from './BaseChat.module.scss';
import { ExportChatButton } from '~/components/chat/chatExportAndImport/ExportChatButton';
Expand Down Expand Up @@ -106,117 +108,52 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
});
const [modelList, setModelList] = useState(MODEL_LIST);
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
const [isListening, setIsListening] = useState(false);
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
const [transcript, setTranscript] = useState('');

useEffect(() => {
console.log(transcript);
}, [transcript]);

useEffect(() => {
// Load API keys from cookies on component mount
try {
const storedApiKeys = Cookies.get('apiKeys');

if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys);

if (typeof parsedKeys === 'object' && parsedKeys !== null) {
setApiKeys(parsedKeys);
}
const loadModels = async () => {
try {
const updatedModelList = await initializeModelList();
setModelList(updatedModelList);
} catch (error) {
console.error('Failed to initialize model list:', error);
}
} catch (error) {
console.error('Error loading API keys from cookies:', error);

// Clear invalid cookie data
Cookies.remove('apiKeys');
}

let providerSettings: Record<string, IProviderSetting> | undefined = undefined;

try {
const savedProviderSettings = Cookies.get('providers');
};

if (savedProviderSettings) {
const parsedProviderSettings = JSON.parse(savedProviderSettings);
loadModels();
}, [apiKeys]);

if (typeof parsedProviderSettings === 'object' && parsedProviderSettings !== null) {
providerSettings = parsedProviderSettings;
}
const handleTranscriptChange = useCallback(
(transcript: string) => {
if (handleInputChange) {
const syntheticEvent = {
target: { value: transcript },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
} catch (error) {
console.error('Error loading Provider Settings from cookies:', error);

// Clear invalid cookie data
Cookies.remove('providers');
}

initializeModelList(providerSettings).then((modelList) => {
setModelList(modelList);
});

if (typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window)) {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;

recognition.onresult = (event) => {
const transcript = Array.from(event.results)
.map((result) => result[0])
.map((result) => result.transcript)
.join('');

setTranscript(transcript);

if (handleInputChange) {
const syntheticEvent = {
target: { value: transcript },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
};

recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
setIsListening(false);
};

setRecognition(recognition);
}
}, []);

const startListening = () => {
if (recognition) {
recognition.start();
setIsListening(true);
}
};
},
[handleInputChange],
);

const stopListening = () => {
if (recognition) {
recognition.stop();
setIsListening(false);
}
};
const {
isListening,
startListening,
stopListening,
permissionState,
audioLevel,
currentLanguage,
supportedLanguages,
changeLanguage,
} = useSpeechRecognition({
onTranscriptChange: handleTranscriptChange,
language: 'en-US',
});

const handleSendMessage = (event: React.UIEvent, messageInput?: string) => {
if (sendMessage) {
sendMessage(event, messageInput);

if (recognition) {
recognition.abort(); // Stop current recognition
setTranscript(''); // Clear transcript
setIsListening(false);

// Clear the input by triggering handleInputChange with empty value
if (handleInputChange) {
const syntheticEvent = {
target: { value: '' },
} as React.ChangeEvent<HTMLTextAreaElement>;
handleInputChange(syntheticEvent);
}
if (isListening) {
stopListening();
}
}
};
Expand Down Expand Up @@ -500,6 +437,11 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
onStart={startListening}
onStop={stopListening}
disabled={isStreaming}
permissionState={permissionState}
audioLevel={audioLevel}
currentLanguage={currentLanguage}
supportedLanguages={supportedLanguages}
onLanguageChange={changeLanguage}
/>
{chatStarted && <ClientOnly>{() => <ExportChatButton exportChat={exportChat} />}</ClientOnly>}
<IconButton
Expand Down
15 changes: 14 additions & 1 deletion app/components/chat/Chat.client.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,22 @@ export const ChatImpl = memo(
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
});

const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider');
return PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER;
const enabledProviders = PROVIDER_LIST.filter((p) => activeProviders[p.name]);

// First try to find the saved provider among enabled providers
if (savedProvider) {
const savedProviderInfo = enabledProviders.find((p) => p.name === savedProvider);

if (savedProviderInfo) {
return savedProviderInfo;
}
}

// If no saved provider or it's not enabled, return first enabled provider or default
return enabledProviders[0] || DEFAULT_PROVIDER;
});

const { showChat } = useStore(chatStore);
Expand Down
21 changes: 19 additions & 2 deletions app/components/chat/GitCloneButton.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,25 @@ interface GitCloneButtonProps {
export default function GitCloneButton({ importChat }: GitCloneButtonProps) {
const { ready, gitClone } = useGit();
const onClick = async (_e: any) => {
console.log('Git clone button clicked');

if (!ready) {
console.log('Git not ready');
return;
}

const repoUrl = prompt('Enter the Git url');
console.log('Got repo URL:', repoUrl);

if (repoUrl) {
console.log('Cloning repo...');

const { workdir, data } = await gitClone(repoUrl);
console.log('Cloned repo to:', workdir);

if (importChat) {
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
console.log(filePaths);
console.log('Filtered file paths:', filePaths);

const textDecoder = new TextDecoder('utf-8');

Expand All @@ -63,14 +70,20 @@ export default function GitCloneButton({ importChat }: GitCloneButtonProps) {
};
})
.filter((f) => f.content);
console.log('Converted file contents:', fileContents.length, 'files');

// Detect and create commands message
console.log('Detecting project commands...');

const commands = await detectProjectCommands(fileContents);
console.log('Detected commands:', commands);

const commandsMessage = createCommandsMessage(commands);
console.log('Created commands message:', commandsMessage);

// Create files message
const filesMessage: Message = {
role: 'assistant',
role: 'assistant' as const,
content: `Cloning the repo ${repoUrl} into ${workdir}
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
${fileContents
Expand All @@ -85,14 +98,18 @@ ${file.content}
id: generateId(),
createdAt: new Date(),
};
console.log('Created files message');

const messages = [filesMessage];

if (commandsMessage) {
messages.push(commandsMessage);
console.log('Added commands message');
}

console.log('Importing chat...');
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
console.log('Chat imported');
}
}
};
Expand Down
Loading