Skip to content

Commit

Permalink
add in migration code for chat histories
Browse files Browse the repository at this point in the history
  • Loading branch information
samlhuillier committed Aug 25, 2024
1 parent 5626796 commit 95d05e0
Show file tree
Hide file tree
Showing 13 changed files with 51 additions and 33 deletions.
4 changes: 2 additions & 2 deletions electron/main/electron-store/storeConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ export interface LLMAPIConfig {
apiKey?: string
}

export interface LLM {
export interface LLMConfig {
modelName: string
apiName: string
contextLength?: number
Expand Down Expand Up @@ -48,7 +48,7 @@ export interface StoreSchema {
vaultDirectories: string[]
directoryFromPreviousSession?: string
}
LLMs: LLM[]
LLMs: LLMConfig[]
LLMAPIs: LLMAPIConfig[]
embeddingModels: {
[modelAlias: string]: EmbeddingModelConfig
Expand Down
25 changes: 25 additions & 0 deletions electron/main/electron-store/storeSchemaMigrator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,37 @@ export function setupDefaultStoreValues(store: Store<StoreSchema>) {
setupDefaultLLMAPIs(store)
}

function ensureChatHistoryIsCorrectProperty(store: Store<StoreSchema>) {
const chatHistories = store.get(StoreKeys.ChatHistories)
if (!chatHistories) {
return
}

Object.keys(chatHistories).forEach((vaultDir) => {
const chats = chatHistories[vaultDir]
chats.map((chat) => {
const outputChat = chat
if (chat.displayableChatHistory) {
outputChat.messages = chat.displayableChatHistory
delete outputChat.displayableChatHistory
}
return outputChat
})
chatHistories[vaultDir] = chats
})

store.set(StoreKeys.ChatHistories, chatHistories)
}

export const initializeAndMaybeMigrateStore = (store: Store<StoreSchema>) => {
const storeSchemaVersion = store.get(StoreKeys.SchemaVersion)
if (storeSchemaVersion !== currentSchemaVersion) {
store.set(StoreKeys.SchemaVersion, currentSchemaVersion)
store.set(StoreKeys.LLMAPIs, [])
store.set(StoreKeys.DefaultLLM, '')
}

ensureChatHistoryIsCorrectProperty(store)

setupDefaultStoreValues(store)
}
4 changes: 2 additions & 2 deletions electron/main/llm/ipcHandlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { ipcMain } from 'electron'
import Store from 'electron-store'
import { ProgressResponse } from 'ollama'

import { LLM, LLMAPIConfig, StoreKeys, StoreSchema } from '../electron-store/storeConfig'
import { LLMConfig, LLMAPIConfig, StoreKeys, StoreSchema } from '../electron-store/storeConfig'

import { addOrUpdateLLMAPIInStore, removeLLM, getLLMConfigs, addOrUpdateLLMInStore } from './llmConfig'
import AnthropicModelSessionService from './models/Anthropic'
Expand All @@ -25,7 +25,7 @@ export const registerLLMSessionHandlers = (store: Store<StoreSchema>) => {

ipcMain.handle('get-llm-api-configs', async () => store.get(StoreKeys.LLMAPIs))

ipcMain.handle('add-or-update-llm-config', async (event, llmConfig: LLM) => {
ipcMain.handle('add-or-update-llm-config', async (event, llmConfig: LLMConfig) => {
await addOrUpdateLLMInStore(store, llmConfig)
})

Expand Down
10 changes: 5 additions & 5 deletions electron/main/llm/llmConfig.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Store from 'electron-store'

import { LLM, LLMAPIConfig, StoreKeys, StoreSchema } from '../electron-store/storeConfig'
import { LLMConfig, LLMAPIConfig, StoreKeys, StoreSchema } from '../electron-store/storeConfig'

import OllamaService from './models/Ollama'

Expand All @@ -18,7 +18,7 @@ export async function addOrUpdateLLMAPIInStore(store: Store<StoreSchema>, newAPI
}
}

export async function addOrUpdateLLMInStore(store: Store<StoreSchema>, newLLM: LLM): Promise<void> {
export async function addOrUpdateLLMInStore(store: Store<StoreSchema>, newLLM: LLMConfig): Promise<void> {
const existingLLMs = store.get(StoreKeys.LLMs) || []

const foundLLM = existingLLMs.find((llm) => llm.modelName === newLLM.modelName)
Expand All @@ -32,7 +32,7 @@ export async function addOrUpdateLLMInStore(store: Store<StoreSchema>, newLLM: L
}
}

export async function getLLMConfigs(store: Store<StoreSchema>, ollamaSession: OllamaService): Promise<LLM[]> {
export async function getLLMConfigs(store: Store<StoreSchema>, ollamaSession: OllamaService): Promise<LLMConfig[]> {
const llmConfigsFromStore = store.get(StoreKeys.LLMs)
const ollamaLLMConfigs = await ollamaSession.getAvailableModels()

Expand All @@ -43,11 +43,11 @@ export async function getLLMConfig(
store: Store<StoreSchema>,
ollamaSession: OllamaService,
modelName: string,
): Promise<LLM | undefined> {
): Promise<LLMConfig | undefined> {
const llmConfigs = await getLLMConfigs(store, ollamaSession)

if (llmConfigs) {
return llmConfigs.find((model: LLM) => model.modelName === modelName)
return llmConfigs.find((model: LLMConfig) => model.modelName === modelName)
}
return undefined
}
Expand Down
6 changes: 3 additions & 3 deletions electron/main/llm/models/Ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import * as os from 'os'
import * as path from 'path'

import { app } from 'electron'
import { LLM, LLMAPIConfig } from 'electron/main/electron-store/storeConfig'
import { LLMConfig, LLMAPIConfig } from 'electron/main/electron-store/storeConfig'
import { Tiktoken, TiktokenModel, encodingForModel } from 'js-tiktoken'
import { ModelResponse, ProgressResponse, Ollama } from 'ollama'

Expand Down Expand Up @@ -173,11 +173,11 @@ class OllamaService {
this.childProcess = null
}

public getAvailableModels = async (): Promise<LLM[]> => {
public getAvailableModels = async (): Promise<LLMConfig[]> => {
const ollamaModelsResponse = await this.client.list()

const output = ollamaModelsResponse.models.map(
(model: ModelResponse): LLM => ({
(model: ModelResponse): LLMConfig => ({
modelName: model.name,
contextLength: 4096,
apiName: defaultOllamaAPI.name,
Expand Down
6 changes: 3 additions & 3 deletions electron/preload/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import {
EmbeddingModelConfig,
EmbeddingModelWithLocalPath,
EmbeddingModelWithRepo,
LLM,
LLMConfig,
LLMAPIConfig,
LLMGenerationParameters,
Tab,
Expand Down Expand Up @@ -133,9 +133,9 @@ const llm = {
(llmName: string, llmConfig: LLMAPIConfig, isJSONMode: boolean, chatHistory: Chat) => Promise<string>
>('streaming-llm-response'),

getLLMConfigs: createIPCHandler<() => Promise<LLM[]>>('get-llm-configs'),
getLLMConfigs: createIPCHandler<() => Promise<LLMConfig[]>>('get-llm-configs'),
getLLMAPIConfigs: createIPCHandler<() => Promise<LLMAPIConfig[]>>('get-llm-api-configs'),
addOrUpdateLLMConfig: createIPCHandler<(model: LLM) => Promise<void>>('add-or-update-llm-config'),
addOrUpdateLLMConfig: createIPCHandler<(model: LLMConfig) => Promise<void>>('add-or-update-llm-config'),
addOrUpdateLLMAPIConfig:
createIPCHandler<(modelConfig: LLMAPIConfig) => Promise<void>>('add-or-update-llm-api-config'),
removeLLM: createIPCHandler<(modelNameToDelete: string) => Promise<void>>('remove-llm'),
Expand Down
2 changes: 1 addition & 1 deletion src/components/Chat/ChatInterface.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ const ChatInterface: React.FC<ChatInterfaceProps> = ({
>
<div className="relative mt-4 flex size-full flex-col items-center gap-3 overflow-x-hidden p-10 pt-0">
<div className="w-full max-w-3xl">
{currentChatHistory && currentChatHistory.messages.length > 0 ? (
{currentChatHistory && currentChatHistory.messages && currentChatHistory.messages.length > 0 ? (
// Display chat history if it exists
currentChatHistory.messages
.filter((msg) => msg.role !== 'system')
Expand Down
4 changes: 0 additions & 4 deletions src/components/Chat/ChatWrapper.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -107,10 +107,6 @@ const ChatWrapper: React.FC<ChatWrapperProps> = ({
return {
id: prev!.id,
messages: newDisplayableHistory,
openAIChatHistory: newDisplayableHistory.map((message) => ({
role: message.role,
content: message.content,
})),
}
})
},
Expand Down
5 changes: 3 additions & 2 deletions src/components/Chat/chatUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export type ReorChatMessage = CoreMessage & {
}

export type Chat = {
[x: string]: any
id: string
messages: ReorChatMessage[]
}
Expand Down Expand Up @@ -120,13 +121,13 @@ export const resolveRAGContext = async (query: string, chatFilters: ChatFilters)
}

export const getChatHistoryContext = (chatHistory: Chat | undefined): DBQueryResult[] => {
if (!chatHistory) return []
if (!chatHistory || !chatHistory.messages) return []
const contextForChat = chatHistory.messages.map((message) => message.context).flat()
return contextForChat as DBQueryResult[]
}

export const getDisplayableChatName = (chat: Chat): string => {
if (chat.messages.length === 0 || !chat.messages[chat.messages.length - 1].content) {
if (!chat.messages || chat.messages.length === 0 || !chat.messages[chat.messages.length - 1].content) {
return 'Empty Chat'
}

Expand Down
4 changes: 2 additions & 2 deletions src/components/Settings/LLMSettings/DefaultLLMSelector.tsx
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import React from 'react'

import { LLM } from 'electron/main/electron-store/storeConfig'
import { LLMConfig } from 'electron/main/electron-store/storeConfig'
import posthog from 'posthog-js'

import CustomSelect from '../../Common/Select'

interface DefaultLLMSelectorProps {
onModelChange: (model: string) => void
llmConfigs: LLM[]
llmConfigs: LLMConfig[]
defaultLLM: string
setDefaultLLM: (model: string) => void
}
Expand Down
4 changes: 2 additions & 2 deletions src/components/Settings/LLMSettings/hooks/useLLMConfigs.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { useState, useEffect } from 'react'

import { LLM } from 'electron/main/electron-store/storeConfig'
import { LLMConfig } from 'electron/main/electron-store/storeConfig'

const useLLMConfigs = () => {
const [llmConfigs, setLLMConfigs] = useState<LLM[]>([])
const [llmConfigs, setLLMConfigs] = useState<LLMConfig[]>([])
const [defaultLLM, setDefaultLLM] = useState<string>('')

const fetchAndUpdateModelConfigs = async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@ import React, { useState } from 'react'

import { Button } from '@material-tailwind/react'

import { APIInterface, LLM, LLMAPIConfig } from 'electron/main/electron-store/storeConfig'
import { APIInterface, LLMConfig, LLMAPIConfig } from 'electron/main/electron-store/storeConfig'
import ReorModal from '../../../Common/Modal'

export const openAIDefaultAPIName = 'OpenAI'
export const anthropicDefaultAPIName = 'Anthropic'

export const openAIDefaultModels: LLM[] = [
export const openAIDefaultModels: LLMConfig[] = [
{
contextLength: 128000,
modelName: 'gpt-4o',
Expand All @@ -31,7 +31,7 @@ export const openAIDefaultModels: LLM[] = [
},
]

export const anthropicDefaultModels: LLM[] = [
export const anthropicDefaultModels: LLMConfig[] = [
{
contextLength: 180000,
modelName: 'claude-3-5-sonnet-20240620',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,10 +265,6 @@ const WritingAssistant: React.FC<WritingAssistantProps> = ({
return {
id: prev!.id,
messages: newDisplayableHistory,
openAIChatHistory: newDisplayableHistory.map((message) => ({
role: message.role,
content: message.content,
})),
}
})
}
Expand Down

0 comments on commit 95d05e0

Please sign in to comment.