From 273139ed5f3cb298b8971825ee23a44e50b5d31e Mon Sep 17 00:00:00 2001 From: Muspi Merol Date: Tue, 14 May 2024 03:36:25 +0800 Subject: [PATCH] chore: support model `gpt-4o-2024-05-13` and update deps --- pyproject.toml | 8 ++++---- src/routes/run.py | 31 +------------------------------ src/utils/llm/__init__.py | 31 +++++++++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 34 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f119e887..16cf632d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,21 +4,21 @@ requires-python = ">=3.10,<3.13" readme = "README.md" license = { text = "MIT" } dependencies = [ - "fastapi~=0.110.3", + "fastapi-slim~=0.111.0", "uvicorn[standard]~=0.29.0", "promplate[all]~=0.3.4.4", "promplate-trace[langfuse,langsmith]==0.3.0dev2", "python-box~=7.1.1", "pydantic-settings~=2.2.1", "httpx[http2]~=0.27.0", - "promptools[stream,validation]~=0.1.3.2", + "promptools[stream,validation]~=0.1.3.3", "fake-useragent~=1.5.1", "html2text~=2024.2.26", "beautifulsoup4~=4.12.3", "rich~=13.7.1", "zhipuai~=2.0.1", - "anthropic~=0.25.7", - "dashscope~=1.17.1", + "anthropic~=0.25.8", + "dashscope~=1.19.0", ] [tool.pdm.dev-dependencies] diff --git a/src/routes/run.py b/src/routes/run.py index e643b53b..7a628a72 100644 --- a/src/routes/run.py +++ b/src/routes/run.py @@ -11,7 +11,7 @@ from ..logic import get_node from ..logic.tools import tool_map from ..utils.config import env -from ..utils.llm import find_llm +from ..utils.llm import Model, find_llm from .sse import non_duplicated_event_stream run_router = APIRouter(tags=["call"]) @@ -23,35 +23,6 @@ class Msg(BaseModel): name: Annotated[Literal.__getitem__(tuple(tool_map)), str] | None = None # type: ignore -Model = Literal[ - "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-0613", - "gpt-3.5-turbo-1106", - "gpt-3.5-turbo-0125", - "gpt-4-1106-preview", - "gpt-4-0125-preview", - "gpt-4-turbo-2024-04-09", - "chatglm_turbo", - "claude-instant-1.2", - "claude-2.1", - "claude-3-opus-20240229", - "claude-3-sonnet-20240229", - "claude-3-haiku-20240307", - "gemma-7b-it", - "llama3-8b-8192", - "llama3-70b-8192", - "llama2-70b-4096", - "mixtral-8x7b-32768", - "nous-hermes-2-mixtral-8x7b-dpo", - "qwen-turbo", - "qwen-plus", - "qwen-max", - "abab5.5s-chat", - "abab5.5-chat", - "abab6-chat", -] - - run_config_fields = {"model", "temperature", "stop", "stop_sequences"} diff --git a/src/utils/llm/__init__.py b/src/utils/llm/__init__.py index 5b7dcd37..e3970383 100644 --- a/src/utils/llm/__init__.py +++ b/src/utils/llm/__init__.py @@ -1,3 +1,5 @@ +from typing import Literal + from .anthropic import anthropic from .chatglm import glm from .dispatch import find_llm @@ -6,3 +8,32 @@ from .octoai import octoai from .openai import openai from .qwen import qwen + +Model = Literal[ + "gpt-3.5-turbo-0301", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0125", + "gpt-4o-2024-05-13", + "gpt-4-1106-preview", + "gpt-4-0125-preview", + "gpt-4-turbo-2024-04-09", + "chatglm_turbo", + "claude-instant-1.2", + "claude-2.1", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "gemma-7b-it", + "llama3-8b-8192", + "llama3-70b-8192", + "llama2-70b-4096", + "mixtral-8x7b-32768", + "nous-hermes-2-mixtral-8x7b-dpo", + "qwen-turbo", + "qwen-plus", + "qwen-max", + "abab5.5s-chat", + "abab5.5-chat", + "abab6-chat", +]