From 6c5d0880b616b50a1af580f0b5a03abc468b36d3 Mon Sep 17 00:00:00 2001 From: Victor Miti Date: Tue, 30 Jul 2024 15:06:34 +0100 Subject: [PATCH] chore: undo Llama 3.1 model introduced in e04fe6c --- app/core/podcast/content.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/core/podcast/content.py b/app/core/podcast/content.py index 86b7ab4..1716def 100644 --- a/app/core/podcast/content.py +++ b/app/core/podcast/content.py @@ -125,8 +125,8 @@ def create_transcript(news: list[dict[str, str]], dest: str, summarizer: Callabl # model = "lmsys/vicuna-13b-v1.5-16k" # model = "mistralai/Mixtral-8x7B-Instruct-v0.1" # model = "mistralai/Mixtral-8x22B-Instruct-v0.1" - # model = "meta-llama/Llama-3-70b-chat-hf" - model = "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" + model = "meta-llama/Llama-3-70b-chat-hf" + # model = "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" # model = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT" # model = "Qwen/Qwen1.5-14B-Chat" temperature = 0.8