From ad07c50429c72d635fbdc52c55b472a7c8f6601c Mon Sep 17 00:00:00 2001 From: Muspi Merol Date: Sun, 8 Dec 2024 15:38:49 +0800 Subject: [PATCH] chore: update groq models --- src/utils/llm/__init__.py | 6 ++++-- src/utils/llm/groq.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/utils/llm/__init__.py b/src/utils/llm/__init__.py index fea4478..52c54f9 100644 --- a/src/utils/llm/__init__.py +++ b/src/utils/llm/__init__.py @@ -69,10 +69,12 @@ "llama-3.1-70b-versatile", "llama-3.1-70b-specdec", "llama-3.1-405b-reasoning", - "llama-3.2-1b-text-preview", - "llama-3.2-3b-text-preview", + "llama-3.2-1b-preview", + "llama-3.2-3b-preview", "llama-3.2-11b-vision-preview", "llama-3.2-90b-vision-preview", + "llama-3.3-70b-versatile", + "llama-3.3-70b-specdec", "llama3.1-8b", "llama3.1-70b", "mixtral-8x7b-32768", diff --git a/src/utils/llm/groq.py b/src/utils/llm/groq.py index c6509ae..41a956e 100644 --- a/src/utils/llm/groq.py +++ b/src/utils/llm/groq.py @@ -16,6 +16,7 @@ @link_llm("llama3-") @link_llm("llama-3.1") @link_llm("llama-3.2") +@link_llm("llama-3.3") @link_llm("mixtral") class Groq(AsyncChatOpenAI): async def complete(self, prompt: str | list[Message], /, **config):