Skip to content

Commit

Permalink
refactor: use client.chat.completions everywhere
Browse files Browse the repository at this point in the history
somehow, client.completions seems very fragile
  • Loading branch information
engineervix committed Sep 12, 2024
1 parent 967b61b commit f9803c3
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 39 deletions.
19 changes: 10 additions & 9 deletions app/core/podcast/eleventify.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,25 +49,26 @@ def create_episode_summary(content: str, episode: str) -> str:

prompt = f"Given the transcript of today's episode below, write a very brief summary to use as a description for the media file. Your summary should be a single paragraph, not exceeding 2 sentences.\n\n```\n{content}\n```"

# model = "lmsys/vicuna-13b-v1.5-16k"
# model = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO"
# model = "openchat/openchat-3.5-1210"
# model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
model = "mistralai/Mixtral-8x7B-v0.1"
model = "meta-llama/Meta-Llama-3-70B-Instruct-Turbo"
temperature = 0.75
max_tokens = 512

response = client.completions.create(
prompt=prompt,
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "user",
"content": prompt,
},
],
temperature=temperature,
max_tokens=max_tokens,
)
logger.info(response)
logger.info(completion)

fallback = f"This is episode {episode} of the Zed News Podcast."

if result := response.choices[0].text.strip():
if result := completion.choices[0].message.content.strip():
result = result.replace("```", "") # Remove triple backticks
first_line = result.splitlines()[0].lower()
unwanted = ["summary:", "here's", "here is", "sure"]
Expand Down
53 changes: 36 additions & 17 deletions app/core/social/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@
import pathlib
import random
import sys
import numpy as np
from http import HTTPStatus

import facebook
import numpy as np
import PIL
import requests
from dotenv import load_dotenv
from moviepy.editor import CompositeVideoClip, ImageClip, TextClip, VideoFileClip
from together import Together
from moviepy.video.compositing.concatenate import concatenate_videoclips
from moviepy.video.tools.segmenting import findObjects
from together import Together

from app.core.utilities import (
ASSETS_DIR, # noqa: F401
Expand Down Expand Up @@ -258,24 +258,32 @@ def create_facebook_post(content: str, url: str) -> str:
https://docs.together.ai/reference/complete
"""

prompt = f"You are a social media marketing guru. Your task is to immediately produce a short Facebook teaser post of today's podcast episode whose details are below. Use bullet points, emojis and hashtags as appropriate. Don't cover every news item, just the most interesting ones. Your post will accompany a video, whose URL is {url}. Please note that this video is just basically the audio with some looping animated background, therefore, your post shouldn't ask people to 'watch' the video per se, but rather to, 'check it out' or 'tune in to'. Do not use markdown.\n\n```\n{content}\n```"
system_prompt = f"You are a social media marketing expert. Your task is to produce a short Facebook teaser post for today's podcast episode based on the provided podcast episode details. The post should highlight only the most interesting news items, using bullet points, emojis, and hashtags where appropriate. Keep in mind that your post will accompany a video, whose URL is {url}, and which is primarily an audio recording with a looping animated background. Therefore, avoid asking people to 'watch' the video. Instead, encourage them to 'check it out' or 'tune in.'"

user_prompt = f"Produce a Facebook teaser post based on the following podcast episode details.\n\n\n{content}\n"

# model = "lmsys/vicuna-13b-v1.5-16k"
# model = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO"
# model = "openchat/openchat-3.5-1210"
model = "mistralai/Mixtral-8x7B-v0.1"
model = "meta-llama/Meta-Llama-3-70B-Instruct-Turbo"
temperature = 0.75
max_tokens = 1024

response = client.completions.create(
prompt=prompt,
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
temperature=temperature,
max_tokens=max_tokens,
)
logger.info(response)
logger.info(completion)

if result := response.choices[0].text.strip():
if result := completion.choices[0].message.content.strip():
result = result.replace("```", "") # Remove triple backticks
first_line = result.splitlines()[0].lower()
unwanted = ["facebook post:", "post:", "here's your", "here is"]
Expand Down Expand Up @@ -305,21 +313,32 @@ def create_episode_summary(content: str) -> str:
host = f"Host: {podcast_host}"
separator = "------------------------------------"

prompt = f"You are a social media marketing guru. Your task is to write a very brief summary to use as a description for a facebook video post, given the transcript of today's episode below. Use bullet points, emojis and hashtags as appropriate. Do not use markdown. Please note that this video is just basically the audio with some looping animated background, therefore, your post shouldn't ask people to 'watch' the video per se, but rather to, 'check it out' or 'tune in to'. At the end, mention that more details can be obtained from {podcast_url}.\n\n```{separator}\n\n{title}\n{date}\n{host}\n\n{separator}\n\n{content}\n```"
system_prompt = f"You are a social media marketing guru. Your task is to write a very brief summary to use as a description for a Facebook video post, given the transcript of today's episode below. Use bullet points, emojis, and hashtags as appropriate. Do not use markdown. Please note that this video is just basically the audio with some looping animated background, so your post shouldn't ask people to 'watch' the video, but rather to 'check it out' or 'tune in to'. At the end, mention that more details can be obtained from {podcast_url}."

user_prompt = f"{separator}\n\n{title}\n{date}\n{host}\n\n{separator}\n\n{content}\n"

model = "mistralai/Mixtral-8x7B-v0.1"
model = "meta-llama/Meta-Llama-3-70B-Instruct-Turbo"
temperature = 0.75
max_tokens = 1024

response = client.completions.create(
prompt=prompt,
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
temperature=temperature,
max_tokens=max_tokens,
)
logger.info(response)
logger.info(completion)

if result := response.choices[0].text.strip():
if result := completion.choices[0].message.content.strip():
result = result.replace("```", "") # Remove triple backticks
first_line = result.splitlines()[0].lower()
unwanted = ["summary:", "here's", "here is", "sure"]
Expand Down
59 changes: 46 additions & 13 deletions app/core/summarization/backends/together.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
import time
import sys
import time

from together import Together, error

Expand All @@ -16,8 +16,11 @@ def summarize(content: str, title: str) -> str:
https://docs.together.ai/reference/complete
"""

prompt = f"You are a distinguished news editor and content publisher, your task is to summarize the following news entry. The summary should accurately reflect the main message and arguments presented in the original news entry, while also being concise and easy to understand. Your summary should not exceed two sentences.\n\n ```{content}```:"
model = "mistralai/Mixtral-8x7B-v0.1"
system_prompt = "You are a distinguished news editor and content publisher. Your task is to summarize the provided news entry. The summary should accurately reflect the main message and arguments presented in the original news entry, while also being concise and easy to understand."

user_prompt = f"Summarize the following news entry in not more than two sentences.\n\n ```{content}```"

model = "meta-llama/Meta-Llama-3-70B-Instruct-Turbo"
temperature = 0.7
max_tokens = 192

Expand All @@ -26,16 +29,25 @@ def summarize(content: str, title: str) -> str:

while retries < max_retries:
try:
response = client.completions.create(
prompt=prompt,
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
temperature=temperature,
max_tokens=max_tokens,
)
time.sleep(1.5)
logging.info(response)
logging.info(completion)

if result := response.choices[0].text.strip():
if result := completion.choices[0].message.content.strip():
result = result.replace("```", "") # Remove triple backticks
first_line = result.splitlines()[0].lower()
unwanted = ["summary:", "here's", "here is", "sure"]
Expand All @@ -62,8 +74,11 @@ def brief_summary(content: str, title: str) -> str:
https://docs.together.ai/reference/complete
"""

prompt = f"You are a distinguished news editor and content publisher, your task is to summarize the following news entry in one sentence.\n\n ```{content}```:"
model = "mistralai/Mixtral-8x7B-v0.1"
system_prompt = "You are a distinguished news editor and content publisher. Your task is to summarize the provided news entry. The summary should accurately reflect the main message and arguments presented in the original news entry, while also being concise and easy to understand."

user_prompt = f"Summarize the following news entry in one sentence.\n\n ```{content}```"

model = "meta-llama/Meta-Llama-3-70B-Instruct-Turbo"
temperature = 0.7
max_tokens = 96

Expand All @@ -72,16 +87,34 @@ def brief_summary(content: str, title: str) -> str:

while retries < max_retries:
try:
response = client.completions.create(
prompt=prompt,
completion = client.chat.completions.create(
model=model,
messages=[
{
"role": "system",
"content": system_prompt,
},
{
"role": "user",
"content": user_prompt,
},
],
temperature=temperature,
max_tokens=max_tokens,
)
time.sleep(1.5)
logging.info(response)
logging.info(completion)

return response.choices[0].text
if result := completion.choices[0].message.content.strip():
result = result.replace("```", "") # Remove triple backticks
first_line = result.splitlines()[0].lower()
unwanted = ["summary:", "here's", "here is", "sure"]

if any(string in first_line for string in unwanted):
# Remove the first line from result
result = "\n".join(result.split("\n")[1:])

return result.replace("\n", "") # Remove newlines
except error.ServiceUnavailableError:
retries += 1
logging.error(f"Service unavailable. Retrying {retries}/{max_retries} in 10 seconds...")
Expand Down

0 comments on commit f9803c3

Please sign in to comment.