diff --git a/.env.example b/.env.example index 5d766dc..5820dd3 100644 --- a/.env.example +++ b/.env.example @@ -23,6 +23,12 @@ OPENAI_API_KEY="" # sk-90... # You can get this at aistudio.google.com GOOGLE_API_KEY="" # AIz... +# ------------------------------ +# Obligatory if you're using Anthropic's models and want to use tool calling: +# ------------------------------ +# You can get this at https://anthropic.com/ +ANTHROPIC_API_KEY="" # sk-... + # ------------------------------ # Obligatory if you're using one of OpenRouter models: # ------------------------------ diff --git a/src/clients/open-router.ts b/src/clients/open-router.ts index 0511663..5239ab0 100644 --- a/src/clients/open-router.ts +++ b/src/clients/open-router.ts @@ -14,6 +14,7 @@ import { ConversationSummaryMemory, } from "langchain/memory"; import { + ANTHROPIC_API_KEY, DEFAULT_MODEL, GOOGLE_API_KEY, MODEL_TEMPERATURE, @@ -29,10 +30,12 @@ import { getOpenRouterMemoryFor, } from "../crud/conversation"; import { + anthropicToolCallingModels, googleToolCallingModels, openAIToolCallingModels, } from "./tools/tool-calling-models"; import { tools } from "./tools/tools-openrouter"; +import { ChatAnthropic } from "@langchain/anthropic"; function parseMessageHistory( rawHistory: { [key: string]: string }[] @@ -160,6 +163,29 @@ export async function createExecutorForOpenRouter( prompt, }); } + // Anthropics LLM with Tool Calling Agent + else if ( + anthropicToolCallingModels.includes(llmModel) && + ANTHROPIC_API_KEY !== "" + ) { + console.log("Using Anthropics LLM"); + prompt = await pull( + "luisotee/wa-assistant-tool-calling" + ); + + llm = new ChatAnthropic({ + modelName: llmModel, + streaming: true, + temperature: MODEL_TEMPERATURE, + apiKey: ANTHROPIC_API_KEY, + }); + + agent = await createToolCallingAgent({ + llm, + tools, + prompt, + }); + } // OpenRouter LLMs without Tool Calling Agent, with Structured Agent else { console.log("Using OpenRouter LLM"); diff --git a/src/clients/tools/tool-calling-models.ts b/src/clients/tools/tool-calling-models.ts index c7b4981..3043072 100644 --- a/src/clients/tools/tool-calling-models.ts +++ b/src/clients/tools/tool-calling-models.ts @@ -6,3 +6,10 @@ export const openAIToolCallingModels = [ ]; export const googleToolCallingModels = ["gemini-1.5-pro", "gemini-1.5-flash"]; + +export const anthropicToolCallingModels = [ + "claude-3-5-sonnet-20240620", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", +]; diff --git a/src/constants.ts b/src/constants.ts index 99db14e..56655bd 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -64,3 +64,4 @@ export const MODEL_TEMPERATURE = parseFloat( ); export const ENABLE_GOOGLE_ROUTES = process.env.ENABLE_GOOGLE_ROUTES as string; export const GOOGLE_API_KEY = process.env.GOOGLE_API_KEY as string; +export const ANTHROPIC_API_KEY = process.env.ANTHROPIC_API_KEY as string; diff --git a/src/handlers/command/change-llm.ts b/src/handlers/command/change-llm.ts index 3ec0fc8..418e31e 100644 --- a/src/handlers/command/change-llm.ts +++ b/src/handlers/command/change-llm.ts @@ -12,7 +12,9 @@ const LLM_OPTIONS = { "5": "o1-mini", "6": "gemini-1.5-pro", "7": "gemini-1.5-flash", - "8": "anthropic/claude-3.5-sonnet", + "8": "claude-3-5-sonnet-20240620", + "9": "claude-3-opus-20240229", + "10": "claude-3-haiku-20240307", }; export async function handleChangeLLM(message: Message, args: string) { @@ -38,13 +40,15 @@ export async function handleChangeLLM(message: Message, args: string) { *${CMD_PREFIX}change 3* for _gpt-4o_ (OpenAI API) *${CMD_PREFIX}change 4* for _o1-preview_ (OpenAI API) *${CMD_PREFIX}change 5* for _o1-mini_ (OpenAI API) - *${CMD_PREFIX}change 6* for _gemini-pro-1.5_ (Google API) + *${CMD_PREFIX}change 6* for _gemini-1.5-pro_ (Google API) *${CMD_PREFIX}change 7* for _gemini-1.5-flash_ (Google API) - *${CMD_PREFIX}change 8* for _anthropic/claude-3.5-sonnet_ (OpenRouter API) + *${CMD_PREFIX}change 8* for _claude-3-5-sonnet_ (Anthropic API) + *${CMD_PREFIX}change 9* for _claude-3-opus_ (Anthropic API) + *${CMD_PREFIX}change 10* for _claude-3-haiku_ (Anthropic API) - You can also type the name of your desired model, like *${CMD_PREFIX}change mistralai/mixtral-8x7b-instruct* + You can also type the name of your desired model supported by OpenRouter, like *${CMD_PREFIX}change mistralai/mixtral-8x7b-instruct* - See the list of available models at OpenRouter docs in https://openrouter.ai/docs#models. + See the list of available models of OpenRouter docs in https://openrouter.ai/docs#models. `, chat.id._serialized, { linkPreview: false }