From 6d20a91d73116729ede3ce1f4ab9301ff135b5a2 Mon Sep 17 00:00:00 2001 From: beingkk Date: Wed, 6 Dec 2023 10:42:21 +0000 Subject: [PATCH] signals bot v0.1 --- .gitignore | 1 + signals_app.py | 401 ++++++++++++++++++ .../sandbox/signals/data/00_system.jsonl | 1 + src/genai/sandbox/signals/data/01_intro.jsonl | 3 + .../signals/data/02_signal_impact.jsonl | 2 + .../signals/data/03_signal_choice.jsonl | 2 + .../sandbox/signals/data/04_follow_up.jsonl | 2 + .../sandbox/signals/data/func_intent.json | 15 + .../sandbox/signals/data/func_top_signal.json | 15 + .../signals/data/func_top_three_signals.json | 18 + .../sandbox/signals/data/intent_actions.json | 14 + .../sandbox/signals/data/prompt_intent.jsonl | 1 + .../signals/data/prompt_top_signal.jsonl | 1 + .../data/prompt_top_three_signals.jsonl | 1 + .../sandbox/signals/data/signals_2023.json | 37 ++ src/genai/sandbox/signals/signals_app.py | 161 +++---- src/genai/sandbox/signals/signals_test.ipynb | 18 +- 17 files changed, 610 insertions(+), 83 deletions(-) create mode 100644 signals_app.py create mode 100644 src/genai/sandbox/signals/data/00_system.jsonl create mode 100644 src/genai/sandbox/signals/data/01_intro.jsonl create mode 100644 src/genai/sandbox/signals/data/02_signal_impact.jsonl create mode 100644 src/genai/sandbox/signals/data/03_signal_choice.jsonl create mode 100644 src/genai/sandbox/signals/data/04_follow_up.jsonl create mode 100644 src/genai/sandbox/signals/data/func_intent.json create mode 100644 src/genai/sandbox/signals/data/func_top_signal.json create mode 100644 src/genai/sandbox/signals/data/func_top_three_signals.json create mode 100644 src/genai/sandbox/signals/data/intent_actions.json create mode 100644 src/genai/sandbox/signals/data/prompt_intent.jsonl create mode 100644 src/genai/sandbox/signals/data/prompt_top_signal.jsonl create mode 100644 src/genai/sandbox/signals/data/prompt_top_three_signals.jsonl create mode 100644 src/genai/sandbox/signals/data/signals_2023.json diff --git a/.gitignore b/.gitignore index a4156f2..83e67f7 100644 --- a/.gitignore +++ b/.gitignore @@ -169,3 +169,4 @@ src/scraping/data/* src/evals/parenting_chatbot/* src/genai/parenting_chatbot/prodigy_eval/_scrap/* !src/genai/parenting_chatbot/prodigy_eval/data/ +!src/genai/sandbox/signals/data/ \ No newline at end of file diff --git a/signals_app.py b/signals_app.py new file mode 100644 index 0000000..a0c9e7b --- /dev/null +++ b/signals_app.py @@ -0,0 +1,401 @@ +import streamlit as st + +from genai import MessageTemplate, FunctionTemplate +from genai.eyfs import TextGenerator +from genai.streamlit_pages.utils import reset_state +import json +import os +import openai +from dotenv import load_dotenv +load_dotenv() + +selected_model = "gpt-4-1106-preview" +temperature = 0.6 + +# Paths to prompts +PROMPT_PATH = "src/genai/sandbox/signals/data/" +PATH_SIGNALS_DATA = PROMPT_PATH + "signals_2023.json" +PATH_SYSTEM = PROMPT_PATH + "00_system.jsonl" +PATH_INTRO = PROMPT_PATH + "01_intro.jsonl" +PATH_ACTIONS = PROMPT_PATH + "intent_actions.json" + +# Top signal function +path_func_top_signal = PROMPT_PATH + "func_top_signal.json" +path_prompt_top_signal = PROMPT_PATH + "prompt_top_signal.jsonl" +# Top three signals function +path_func_top_three_signals = PROMPT_PATH + "func_top_three_signals.json" +path_prompt_top_three_signals = PROMPT_PATH + "prompt_top_three_signals.jsonl" +# Intent detection function +path_func_intent = PROMPT_PATH + "func_intent.json" +path_prompt_intent = PROMPT_PATH + "prompt_intent.jsonl" +# Prompt: Impact on the user +path_prompt_impact = PROMPT_PATH + "02_signal_impact.jsonl" +# Prompt: Summary of different signals +path_prompt_choice = PROMPT_PATH + "03_signal_choice.jsonl" +# Prompt: Following up on user's question +path_prompt_following_up = PROMPT_PATH + "04_follow_up.jsonl" + +def auth_openai() -> None: + """Authenticate with OpenAI.""" + try: + openai.api_key = os.environ["OPENAI_API_KEY"] + except Exception: + openai.api_key = st.secrets["OPENAI_API_KEY"] + + +def read_jsonl(path: str) -> list: + """Read a JSONL file.""" + with open(path, "r") as f: + return [json.loads(line) for line in f.readlines()] + + +def generate_signals_texts(signals_data: dict, chosen_signals: list = None): + signals = [signal["short_name"] for signal in signals_data] + signals_titles = [signal["title"] for signal in signals_data] + signals_summaries = [signal["summary"] for signal in signals_data] + + if chosen_signals is None: + chosen_signals = signals + + # Combine titles and summaries into a single string + signals_description = "" + for short_name, title, summary in zip(signals, signals_titles, signals_summaries): + if short_name in chosen_signals: + signals_description += f"Signal '{short_name}': {title}\n{summary}\n\n" + + return signals_description + + +def generate_action_texts(action_data: dict): + actions = [a["name"] for a in action_data] + action_descriptions = [a["description"] for a in action_data] + action_text = "" + for name, description in zip(actions, action_descriptions): + action_text += f"Action '{name}': {description}\n\n" + return action_text + +# Prepare the data +signals_data = json.load(open(PATH_SIGNALS_DATA, "r")) +signals_dict = {s['short_name']: s for s in signals_data} +signals_descriptions = generate_signals_texts(signals_data) +signals = [s['short_name'] for s in signals_data] + +actions_data = json.load(open(PATH_ACTIONS, "r")) +actions_descriptions = generate_action_texts(actions_data) +actions = [a['name'] for a in actions_data] + + +def predict_intent(user_message: str, messages: list) -> str: + """Detect the intent of the user's message. + + Args: + user_message (str): The user's message. + messages (list): The history of messages. + + Returns: + str: The intent of the user's message. Possible outputs are: + - "explain": The user wants to know more about a signal. + - "more_signals": The user wants to know more about a signal. + - "follow_up": The user wants to know more about a signal. + - "next_steps": The user wants to know more about a signal. + - "none": The user's message does not match any intent. + """ + func_intent = json.loads(open(path_func_intent).read()) + message_history = [MessageTemplate.load(m) for m in st.session_state.messages] + message = MessageTemplate.load(path_prompt_intent) + all_messages = message_history + [message] + function = FunctionTemplate.load(func_intent) + response = TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=all_messages, + message_kwargs={"intents": actions_descriptions, "user_input": user_message}, + stream=False, + functions=[function.to_prompt()], + function_call={"name": "predict_intent"}, + ) + intent = json.loads(response['choices'][0]['message']['function_call']['arguments']) + return intent['prediction'] + + +def predict_top_signal(user_message: str, signals: list) -> str: + """Predict the top signal from the user's message. + + Args: + user_message (str): The user's message. + + Returns: + str: The top signal from the user's message. + """ + # Function call + func_top_signal = json.loads(open(path_func_top_signal).read()) + func_top_signal['parameters']['properties']['prediction']['enum'] = signals + + message = MessageTemplate.load(path_prompt_top_signal) + function = FunctionTemplate.load(func_top_signal) + + response = TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=[message], + message_kwargs={"signals": signals_descriptions, "user_input": user_message}, + stream=False, + functions=[function.to_prompt()], + function_call={"name": "predict_top_signal"}, + ) + top_signal = json.loads(response['choices'][0]['message']['function_call']['arguments']) + return top_signal['prediction'] + + +def predict_top_three_signals(user_message: str, allowed_signals: list) -> list: + """Predict the top signal from the user's message. + + Args: + user_message (str): The user's message. + + Returns: + str: The top signal from the user's message. + """ + # Function call + func_top_signals = json.loads(open(path_func_top_three_signals).read()) + func_top_signals['parameters']['properties']['prediction']['items']['enum'] = allowed_signals + print(func_top_signals) + message = MessageTemplate.load(path_prompt_top_three_signals) + function_top_three = FunctionTemplate.load(func_top_signals) + + signals_descriptions_ = generate_signals_texts(signals_data, allowed_signals) + + response = TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=[message], + message_kwargs={"signals": signals_descriptions_, "user_input": user_message}, + stream=False, + functions=[function_top_three.to_prompt()], + function_call={"name": "predict_top_signals"}, + ) + top_signals = json.loads(response['choices'][0]['message']['function_call']['arguments']) + print(message) + print(f"Prediction: {top_signals}") + print(response) + return top_signals['prediction'] + +def signals_bot(sidebar: bool = True) -> None: + """Explain me a concept like I'm 3.""" + + # Define your custom CSS + # custom_css = """ + # + # """ + + # # Apply the custom CSS + # st.markdown(custom_css, unsafe_allow_html=True) + + st.title("Signals chatbot") + st.write("Let's discuss the future!") + + # First time running the app + if "messages" not in st.session_state: + # Record of messages to display on the app + st.session_state.messages = [] + # Record of messages to send to the LLM + st.session_state.history = [] + # Keep track of which state we're in + st.session_state.state = "start" + # Fetch system and introduction messages + st.session_state.signals = [] + + # Add system message to the history + system_message = read_jsonl(PATH_SYSTEM)[0] + system_message = MessageTemplate.load(system_message) + system_message.format_message(**{"signals": signals_descriptions}) + st.session_state.history.append(system_message.to_prompt()) + print(system_message.to_prompt()) + # Add the intro messages + intro_messages = read_jsonl(PATH_INTRO) + print(intro_messages) + for m in intro_messages: + st.session_state.messages.append(m) + st.session_state.history.append(m) + + # Display chat messages on app rerun + for message in st.session_state.messages: + with st.chat_message(message["role"]): + st.markdown(message["content"]) + + # Get user message + user_message = st.chat_input("") + if user_message: + # Display user message + with st.chat_message("user"): + st.markdown(user_message) + st.session_state.messages.append({"role": "user", "content": user_message}) + st.session_state.history.append({"role": "user", "content": user_message}) + + if st.session_state.state == "start": + intent = "new_signal" + st.session_state.user_info = user_message + st.session_state.state = "chatting" + else: + intent = predict_intent(user_message, st.session_state.history) + print(intent) + # intent = "following_up" + + if intent == "new_signal": + # Predict the signal to explain + allowed_signals = [s for s in signals if s not in st.session_state.signals] + signal_to_explain = predict_top_signal(user_message, allowed_signals) + st.session_state.signals.append(signal_to_explain) + st.session_state.active_signal = signal_to_explain + print(signal_to_explain) + print(f"I have these signals in memory: {st.session_state.signals}") + # Explain the signal + instruction = MessageTemplate.load(path_prompt_impact) + message_history = [MessageTemplate.load(m) for m in st.session_state.history] + message_history += [instruction] + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signal": signals_dict[signal_to_explain]['full_text'], + "user_input": st.session_state.user_info + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) + st.session_state.messages.append({"role": "assistant", "content": full_response}) + st.session_state.history.append({"role": "assistant", "content": full_response}) + + elif intent == "more_signals": + # Select the top 5 most relevant signals for the user + # (remove the seen signals) + # Provide an overview of the impacts of signal on the reader + # Ask which one the bot should elaborate on + allowed_signals = [s for s in signals if s not in st.session_state.signals] + top_signals = predict_top_three_signals(st.session_state.user_info, allowed_signals) + print(allowed_signals) + print(top_signals) + print(top_signals[0:3]) + # Explain the signal + instruction = MessageTemplate.load(path_prompt_choice) + top_signals_text = generate_signals_texts(signals_data, top_signals) + message_history = [MessageTemplate.load(m) for m in st.session_state.history] + message_history += [instruction] + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signals": top_signals_text, + "user_input": st.session_state.user_info + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) + st.session_state.messages.append({"role": "assistant", "content": full_response}) + st.session_state.history.append({"role": "assistant", "content": full_response}) + + elif intent == "following_up": + print(st.session_state.active_signal) + #Follow up the user's message + instruction = MessageTemplate.load(path_prompt_following_up) + message_history = [MessageTemplate.load(m) for m in st.session_state.history] + message_history += [instruction] + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signal": signals_dict[st.session_state.active_signal]['full_text'], + "user_input": user_message + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) + + st.session_state.messages.append({"role": "assistant", "content": full_response}) + st.session_state.history.append({"role": "assistant", "content": full_response}) + + # # Add user message to history + # prompt = prompt2() + # st.session_state.messages.append({"role": "user", "content": prompt.to_prompt()}) + # print(user_message) + # # Generate AI response + # with st.chat_message("assistant"): + # message_placeholder = st.empty() + # full_response = "" + # for response in TextGenerator.generate( + # model=selected_model, + # temperature=temperature, + # messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state.messages], + # message_kwargs= None, + # stream=True, + # ): + # full_response += response.choices[0].delta.get("content", "") + # message_placeholder.markdown(full_response + "▌") + # message_placeholder.markdown(full_response) + # # Add AI response to history + # st.session_state.messages.append({"role": "assistant", "content": full_response}) + + +def llm_call( + selected_model: str, temperature: float, message: MessageTemplate, messages_placeholders: dict) -> str: + """Call the LLM""" + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=[message], + message_kwargs=messages_placeholders, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + + message_placeholder.markdown(full_response) + + return full_response + + +def prompt2(): + """ + Generate a prompt for an overview of the impact of signals on the user + """ + prompt = MessageTemplate.load(data_path + "prompt2.json") + return prompt + +def main() -> None: + """Run the app.""" + auth_openai() + + signals_bot(sidebar=False) + + +main() diff --git a/src/genai/sandbox/signals/data/00_system.jsonl b/src/genai/sandbox/signals/data/00_system.jsonl new file mode 100644 index 0000000..50bdd53 --- /dev/null +++ b/src/genai/sandbox/signals/data/00_system.jsonl @@ -0,0 +1 @@ +{"role": "user", "content": "###Instructions###\nYou are a helpful, kind, intelligent and polite futurist. You work for the UK innovation agency Nesta, and your task is to engage the user about the future signals and trends that Nesta has researched, by helping the user imagine and appreciate how the signals will impact their life. You will personalise the user experience by taking the information provided by the user and tailoring your explanation to the user background. Here are the future signals that you can talk about: {signals}. Do not discuss other future signals as this is not part of this year's Nesta's Signals edition."} diff --git a/src/genai/sandbox/signals/data/01_intro.jsonl b/src/genai/sandbox/signals/data/01_intro.jsonl new file mode 100644 index 0000000..977da88 --- /dev/null +++ b/src/genai/sandbox/signals/data/01_intro.jsonl @@ -0,0 +1,3 @@ +{"role": "assistant", "content": "Hi, I’m Scout, Discovery Hub’s experimental AI assistant which helps people explore and interpret signals about the future. ✨"} +{"role": "assistant", "content": "This year we have collected signals about a variety of topics, from green energy to education, to health and even sleep."} +{"role": "assistant", "content": "Tell me one or two things about you and your interests, so that I can suggest which future signals might be the most relevant to you!"} diff --git a/src/genai/sandbox/signals/data/02_signal_impact.jsonl b/src/genai/sandbox/signals/data/02_signal_impact.jsonl new file mode 100644 index 0000000..f293084 --- /dev/null +++ b/src/genai/sandbox/signals/data/02_signal_impact.jsonl @@ -0,0 +1,2 @@ +{"role": "user", "content": "Start your answer by explaining in one clear sentence how the selected future signal might be relevant to the user, given the user information and conversation history. Then describe three ways how the selected future signal might impact them. Keep these descriptions short, two-three sentences at most. Finish your answer by encouraging the user to ask questions about this signal (note that you will try your best to answer them) or suggest to ask about the other future signals. Remember that you must be patient and never offend or be aggressive. \n\n###Future signal###{signal}\n\n###User information### Here is what the user told you about themselves: {user_input}.\n\n###Answer###" +} \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/03_signal_choice.jsonl b/src/genai/sandbox/signals/data/03_signal_choice.jsonl new file mode 100644 index 0000000..94999bd --- /dev/null +++ b/src/genai/sandbox/signals/data/03_signal_choice.jsonl @@ -0,0 +1,2 @@ +{"role": "user", "content": "Start your answer by explaining each of the signals in one clear sentence (use similar language to the signals descriptions). If possible, indicate how a signal might be relevant to the user, given the user information and conversation history. Finish your answer by asking the user to choose one of the signals to hear more about it. Remember that you must be patient and never offend or be aggressive. \n\n###Future signals###{signals}\n\n###User information### Here is what the user told you about themselves: {user_input}.\n\n###Answer###" +} \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/04_follow_up.jsonl b/src/genai/sandbox/signals/data/04_follow_up.jsonl new file mode 100644 index 0000000..979a49e --- /dev/null +++ b/src/genai/sandbox/signals/data/04_follow_up.jsonl @@ -0,0 +1,2 @@ +{"role": "user", "content": "Answer to the user's most recent message. Be as concise or detailed as necessary. Use the information from the future signal description when relevant. Keep your answers conversational and three to four sentences long at most. \n\n###Future signal###{signal}\n\n###User information### Here is what the user told you: {user_input}.\n\n###Answer###" +} \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/func_intent.json b/src/genai/sandbox/signals/data/func_intent.json new file mode 100644 index 0000000..446d281 --- /dev/null +++ b/src/genai/sandbox/signals/data/func_intent.json @@ -0,0 +1,15 @@ +{ + "name": "predict_intent", + "description": "Predict what is the user's intent", + "parameters": { + "type": "object", + "properties": { + "prediction": { + "type": "string", + "enum": ["new_signal", "more_signals", "following_up"], + "description": "The predicted intent" + } + }, + "required": ["prediction"] + } + } \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/func_top_signal.json b/src/genai/sandbox/signals/data/func_top_signal.json new file mode 100644 index 0000000..940184e --- /dev/null +++ b/src/genai/sandbox/signals/data/func_top_signal.json @@ -0,0 +1,15 @@ +{ + "name": "predict_top_signal", + "description": "Predict which one of the signal is the most relevant to user input", + "parameters": { + "type": "object", + "properties": { + "prediction": { + "type": "string", + "enum": [], + "description": "The predicted most relevant signal" + } + }, + "required": ["prediction"] + } + } \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/func_top_three_signals.json b/src/genai/sandbox/signals/data/func_top_three_signals.json new file mode 100644 index 0000000..2c6333c --- /dev/null +++ b/src/genai/sandbox/signals/data/func_top_three_signals.json @@ -0,0 +1,18 @@ +{ + "name": "predict_top_signals", + "description": "Predict which three signals are the most relevant to user input", + "parameters": { + "type": "object", + "properties": { + "prediction": { + "type": "array", + "items": { + "type": "string", + "enum": [] + }, + "description": "The predicted most relevant signals" + } + }, + "required": ["prediction"] + } + } \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/intent_actions.json b/src/genai/sandbox/signals/data/intent_actions.json new file mode 100644 index 0000000..4f7e34e --- /dev/null +++ b/src/genai/sandbox/signals/data/intent_actions.json @@ -0,0 +1,14 @@ +[ + { + "name": "new_signal", + "description": "User wishes to change the topic and talk about an new future signal. Alternatively, the user has been just presented with a set of future signal options by the assistant, and the user has now chosen which signal to talk about more." + }, + { + "name": "more_signals", + "description": "User has asked to hear more about other future signals" + }, + { + "name": "following_up", + "description": "User is following up with another question about the signal that's being discussed just now." + } +] \ No newline at end of file diff --git a/src/genai/sandbox/signals/data/prompt_intent.jsonl b/src/genai/sandbox/signals/data/prompt_intent.jsonl new file mode 100644 index 0000000..9f29fc9 --- /dev/null +++ b/src/genai/sandbox/signals/data/prompt_intent.jsonl @@ -0,0 +1 @@ +{"role": "user", "content": "You are a helpful chatbot talking with the user about the articles of future signals that we wrote. ###Instructions### Predict the intended action of the user, what the user wishes you to carry out based on the conversation history. Pay attention to the most recent messages\n\n###Possible intents###\n{intents}\n\n"} diff --git a/src/genai/sandbox/signals/data/prompt_top_signal.jsonl b/src/genai/sandbox/signals/data/prompt_top_signal.jsonl new file mode 100644 index 0000000..15f68df --- /dev/null +++ b/src/genai/sandbox/signals/data/prompt_top_signal.jsonl @@ -0,0 +1 @@ +{"role": "user", "content": "###Instructions### Predict which of the following future signals is the most relevant to user input. You have to choose one of these signals. \n\n###Future signal summaries###\n{signals}\n\n###User input:\n{user_input}"} diff --git a/src/genai/sandbox/signals/data/prompt_top_three_signals.jsonl b/src/genai/sandbox/signals/data/prompt_top_three_signals.jsonl new file mode 100644 index 0000000..eff8e9d --- /dev/null +++ b/src/genai/sandbox/signals/data/prompt_top_three_signals.jsonl @@ -0,0 +1 @@ +{"role": "user", "content": "###Instructions### Predict which three of the following future signals are the most relevant to user input. You have to choose three of these signals. \n\n###Future signal summaries###\n{signals}\n\n###User input:\n{user_input}"} diff --git a/src/genai/sandbox/signals/data/signals_2023.json b/src/genai/sandbox/signals/data/signals_2023.json new file mode 100644 index 0000000..80593e0 --- /dev/null +++ b/src/genai/sandbox/signals/data/signals_2023.json @@ -0,0 +1,37 @@ +[ + { + "short_name": "Robochefs", + "title":"Robochefs: a bumpy road from lab to kitchen?", + "summary": "The signal discusses the impact of automation in commercial kitchens, suggesting that redirecting research and development towards healthier processes, like air frying, could benefit our diets. Companies like Sweetgreen and YPC Technologies are innovating in this space, with initiatives like “cooking as a service” using fresh ingredients. However, the sector faces challenges due to a downturn in venture capital investment, leading to shutdowns of several kitchen robotics start-ups, indicating a need for further refinement in their value propositions.", + "full_text": "Robochefs: a bumpy road from lab to kitchen? Experiments trialling automation in commercial kitchens could spell mixed news for our diets. What would you like for lunch? How about a katsu curry? Take a seat – it’ll be prepared by a robot, customised to your preferences and ready in three minutes. The scenario may sound like it was lifted from a sci-fi novel, but it became a reality for employees at Ocado’s headquarters in 2022. They hosted the first real-world trial of the food service robot Semblr, the creation of UK-based start-up Karakuri. But before you get too excited, the path from lab to commercial (or domestic) kitchens looks far from assured, and an injection of automation might not necessarily spell good news for our diets. Kitchen robotics have been swept forward as part of a wider wave of innovation in the foodtech space, which includes the use of automation in supermarkets and food deliveries. In kitchen automation specifically, our analysis found that the start-up Aitme raised £7.7 million in 2021, while Hyphen raised £17.7 million in early 2022. This level of investment has been underpinned by a boom in venture capital (VC) investment over the previous decade fuelled by low interest rates. But now that VC investors are pulling back across the board and overall investment in the foodtech sector is down 65% year on year, how many ‘robochefs’ will still be left standing when the dust settles? Back in 2015, one of the first kitchen robots to achieve a degree of fame was unveiled by Moley Robotics. It boasted robotic arms capable of whipping up a crab bisque, modelled on the movements of MasterChef champion Tim Anderson. These highly sophisticated inventions are unlikely to make an appearance in our homes any time soon (in 2021 a ‘Moley Robotics kitchen’ was reported to cost a minimum of £150,000). However, this technology could be suitable for use in commercial settings. In particular, dark kitchens – catering premises without a restaurant front that exist solely to produce takeaways – are being targeted as especially promising sites for robotics and automation. These sites (also known as ghost kitchens) can typically be found tucked away on industrial estates and areas with low footfall. The low cost of renting this kind of property, when combined with the labour savings from kitchen robots, opens up new opportunities for companies to lower the price of meals. Tel-Aviv based Kitchen Robotics is specifically focused on the dark kitchen sector with its ‘Beastro robot’. A wave of kitchen automation could spell upheaval for workers in the food industry – estimates suggest that one particular robot could save employers in the US up to 75% in wages. But there are also nutritional implications. Karakuri recently debuted an automated fryer that can produce 60kg of french fries per hour (that’s more than 500 portions of McDonald’s medium fries). Earlier last year, American fast food chain White Castle announced plans to install 100 Miso Robotics fryer robots in its restaurants. Chipotle has teamed up with the same company to test out tortilla chip making robots. If innovation ends up being primarily channelled towards the mass production of fast food, one consequence could be a relative reduction in price of these products when compared to healthier options. How might this impact consumer behaviour? Over time though, kitchen automation need not be bad for our waistlines if some of this research and development (R&D) is redirected towards healthier processes. For example, fryer robots could be developed to use the lower-fat technique of air frying instead of deep frying. Chains specialising in healthier options might also start to get in on the act – last year, the US salad chain Sweetgreen acquired kitchen robotics start-up Spyce, to “create healthy fast food at scale”. The Canadian kitchen robotics company YPC Technologies claims its robots can make thousands of recipes using fresh ingredients and have created a “cooking as a service” business called Jasper (situating robotic kitchens near residential high-rises and charging a subscription fee for freshly prepared meals). But before any of these innovations in kitchen robotics can scale, they will need to survive the sharp market downturn in VC investment. Already, there are reports of shutdowns, including at least three pizza robotics start-ups in 2022. A salad-making start-up, Chowbotics, was also shut down a year after being acquired by the food delivery company DoorDash. The recession will likely lead to further closures and dwindling resources for R&D. Kitchen robotics start-ups may still have work to do when it comes to refining their value proposition, but there are clearly potential efficiency gains up for grabs when it comes to automating time-consuming aspects of food preparation and distribution. However, we should also be keeping a very close watch on whether the push for faster, cheaper food comes at the expense of the quality of our diets.", + "url": "www.nesta.org.uk/feature/future-signals-2023/robochefs-a-bumpy-road-from-lab-to-kitchen/" + }, + { + "short_name": "Abundant energy", + "title": "Too cheap to meter: could low-cost renewables create an abundance of energy?", + "summary": "The article highlights the significant decrease in costs for solar and wind power, offering hope for a new era of abundant, clean energy despite the current energy crisis. It discusses the challenges of energy intermittency with renewables and suggests overproducing energy to compensate, enabling opportunities for storage and flexible use. Additionally, it explores the potential for using excess renewable energy in innovative ways, like green desalination and direct air capture for carbon removal, while cautioning against potential environmental impacts and overconsumption.", + "full_text": "Too cheap to meter: could low-cost renewables create an abundance of energy? A trend towards cheaper, cleaner power could pave the way for creative uses of excess energy. News of record-breaking temperatures filled the headlines in 2022. But amid the dire warnings about the effects of climate change was a signal of hope about our ability to reduce carbon emissions. A report from the International Renewable Energy Authority showed that, between 2010 and 2021, the global average cost of new solar power had declined by a staggering 88% while onshore wind had gone down by 68%. Defying persistently pessimistic forecasts, this data was further evidence of a wider trend towards a precipitous drop in the cost of renewables. Given the acute energy crisis we face today, it sounds counterintuitive to say that we could be standing on the cusp of a new era of energy abundance. But renewable energy is cheap to produce, especially compared to current gas prices and, crucially, there is almost no limit on how much wind or solar energy we can use; the only real constraint is how quickly we can build the infrastructure and integrate supply. There is now, if we want to pursue it, the prospect of abundant, clean, nearly inexhaustible energy. But there is a catch: this renewable energy will only be abundant some of the time. Currently, energy demand has peaks and troughs. But while it's relatively easy to ramp up production in a gas power station in anticipation of the nation boiling the kettle at half time during a World Cup match, the sun and wind don’t come with an on-off switch. One answer to this challenge is to build extra renewable capacity so that we have plenty of power, even during the periods when natural supplies dip. But when renewables are going at full tilt, what could we do with this large amount of excess energy? An instinctive response is to suggest that we should avoid over-producing energy only to waste some of it, focusing instead on storage and flexible energy use. While both of those will be a critical part of our net-zero energy system, they are easier to do with a larger electricity supply: demand for electricity will increase dramatically anyway as we move towards net zero. Having more electricity than we need opens up more options for storage, as storage methods could afford to be less efficient and there would be smaller troughs in supply. But there is also a risk here of thinking in a limited way. If energy was clean, cheap and plentiful, what other useful things could we do with it? Abundant energy can be transformative, often in ways we don’t expect. So what opportunities might there be for using excess energy from cheap and widespread renewables? Perhaps first we should look to nations that already have abundant energy. A number of Middle Eastern countries use their generous endowments of fossil fuels to desalinate water in areas where freshwater is scarce. In Saudi Arabia, desalination accounts for nearly 20% of energy consumption. If done in a more sustainable way, with abundant renewables, new opportunities for agriculture could open up in many places. This model is also a potentially valuable way of adapting to climate change: where water becomes scarce, green desalination could provide an answer. We could also use excess energy to fight carbon emissions directly. In practice, the landmark international Paris Agreement on climate change will require us to remove greenhouse gases from the atmosphere, not just stop emitting. One way of doing this is direct air capture powered by clean electricity. Already, experimental plants are being developed in Iceland with access to large quantities of cheap geothermal power. A problem at the moment is how much energy is used per unit of carbon removed. One study found that while direct air capture might help us achieve our climate goals, it might need to use perhaps a quarter of global energy demand by 2100. But if green energy is abundant everywhere, it creates scope to use this technology at scale and worldwide. Of course, tackling climate change and energy supply through renewables might create new kinds of environmental havoc. Endless energy might enable people to more easily encroach on previously untouched habitats such as the poles or the deep sea, as energy is one of the resources needed to keep people alive in these environments. Yet opening these new frontiers could harm native species. We could also simply become ever more wasteful and fail to use excess energy for anything socially useful at all. A future with abundant renewable energy is also far from assured. In 1954 the then-chairman of the United States Atomic Energy Commission, Lewis Strauss, pointed to a nuclear-powered future with electricity 'too cheap to meter'. But the technology never replaced fossil fuels in the way some anticipated. Similar predictions have been made about nuclear fusion since the 1950s and although recent strides mean that it might be somewhat closer than previously anticipated, we're still some way from widespread deployment. Yet there are grounds for cautious optimism. New, improved forecasting techniques, which are more closely aligned with recent data, are beginning to point to a rosier future where renewable energy might be much, much cheaper, even in the face of growing demand, which is estimated to increase by nearly 90% by 2050. This represents a huge potential step change in productivity as part of the reduction in cost is down to improvements in efficiency – we can generate more energy with less hardware. And renewable energy’s greatest weakness – intermittency – might turn out to be a strength as we create excess supply that presents opportunities that were previously practically impossible.", + "url": "https://www.nesta.org.uk/feature/future-signals-2023/too-cheap-to-meter-could-low-cost-renewables-create-an-abundance-of-energy/" + }, + { + "short_name": "Pronatalism vs pro-family", + "title": "Baby boom: pronatalism vs pro-family", + "summary": "The article discusses the rising political interest in pronatal policies, which aim to incentivize higher birth rates in response to declining fertility rates and the economic consequences of an aging population. While some countries have implemented measures like baby bonuses and tax cuts with varied success, these policies are often critiqued for their modest impact on long-term birth rates and potential implications for women's roles and anti-immigration sentiments. The article suggests that a more effective approach would be comprehensive pro-family policies focusing on childcare, parental leave, and overall family wellbeing, as seen in countries like Sweden, where such policies have been associated with relatively higher fertility rates.", + "full_text": "Baby boom: pronatalism vs pro-family As fertility rates decline, calls for policies that promise incentives for families to have more children are on the rise. Spooked by demographic trends, politicians are taking a closer interest in how many babies their country’s citizens are choosing to have. In fact, the UN suggests that 28% of countries worldwide have policies intended to incentivise people to have children, including baby bonuses, tax cuts, childcare provision and more generous parental leave. These ideas aren’t new, but in 2022 we started to hear of senior politicians in the UK entertaining the merits of these so-called ‘pronatal’ policies, with reports hitting the press of a cabinet minister backing tax cuts for women who have more babies. But should the UK be quite so quick to follow other countries down this path? The surge in political interest in fertility rates has been sparked by the projected economic consequences of the current demographic trajectory. While the total world population recently reached eight billion and is still growing, that growth is being driven by people living longer rather than being born in larger numbers. According to the UN, about two-thirds of the world’s population live in countries where too few babies are being born for the population to replace itself. In England and Wales, the ONS reports that the fertility rate has declined since 1964 and has been below replacement levels since 1973. Fewer babies being born means fewer people joining the population, leading to a risk of economic and social stagnation. Combined with people living longer, fewer births also means an increasing imbalance between the size of the retired population and the working population available to support them. At first glance, pronatal policies might appear to be a win-win for the state and for families. In Lestijärvi in Finland, families have received €10,000 per birth since 2013. France has a longstanding range of pronatal policies: public spending on families is generous, families receive grants, benefits and subsidised childcare to help with the costs of having children and France has had a consistently high fertility rate. However, taking a closer look at the recent wave of pronatalism, there are good reasons to be sceptical of policy interventions designed primarily to incentivise more births. Some studies have pointed to a ‘modest’ impact on birth rates but overall the evidence base on the impact is generally mixed. For example, these policies may create a short-term boost in birth rates, but this may reflect an influence on birth timing without changing the total number of children born in the long term. Hungary charges families with three or more children virtually no taxes, but so far it does not look as if this policy has led to many third or fourth babies. The political climate in Hungary underscores other reasons to be sceptical about the pronatal push: these policies are often linked to anti-immigration and nationalist sentiments, or to restrictive policies around access to contraception and abortion. A focus on birth rates alone implies a narrowly defined role and status for women, and raises questions about whether the state has any place ascribing a social or economic value to procreation. Further, the costs of having a child are not limited to the time of birth, meaning longer-term pro-family policies that promote parent and child wellbeing and support parental labour market participation may make more sense from a wider social and economic point of view. For example, these policies could include improved provision of childcare, more flexible working for parents, high-quality education and health services, a child-friendly built environment and a culture that is welcoming to children and supportive of parents. Swedish families receive a range of social benefits combined with long periods of protected parental leave and excellent childcare provision. As a package, these pro-family policies seem to have accompanied a climbing fertility rate between 2000 and 2010, and while it has since fallen, it remains higher than many of its European neighbours. The UK, and England in particular, does not perform well on measures of family friendliness at present. UNICEF ranked the UK 28th out of 31 countries across paid parental leave, support for breastfeeding and affordable, high-quality childcare and preschool education. We have the second-highest childcare costs in Europe. A couple with an average income and two children under three spend 29% of their annual income on childcare, compared to just 12% in France. A staggering 62% of parents surveyed by the UK charity Pregnant then Screwed said that high childcare costs prevent them from having more children. Meanwhile, UK child poverty rates are unacceptably high: an average of eight children in a classroom of 30 were living in poverty in 2020-21. Larger families are particularly exposed to deep poverty and the current benefit cap for families with more than two children is a driver of this poverty. The backlash that followed the call for tax cuts for women having more children in this country illustrates the pitfalls of sticking-plaster solutions to long-term demographic shifts. One-off cash incentives, tax cuts or appeals to patriotism are unlikely to change the equation for would-be parents making significant life decisions. Yet the rise of pronatalism should be taken seriously as a signal that something has gone awry when it comes to state support for family life. Rather than a narrow focus on the number of births, support should address families’ needs in the round. The UK urgently needs to improve availability and affordability of childcare and uptake of early education. To improve the conditions in which children grow and develop, we need to reduce the pressures families face, including their financial conditions. It is possible to make a long-lasting difference to the quality of family life in ways that will promote the health of our population in the long term, but we should look beyond the baby bump.", + "url": "https://www.nesta.org.uk/feature/future-signals-2023/baby-boom-pronatalism-vs-pro-family/" + }, + { + "short_name": "Hidden Figures", + "title": "Hidden figures: are we keeping pace with changes to family life?", + "summary": "The article discusses the evolving nature of family life in the UK, noting a significant shift in family structures since 2000, with 44% of children not living with both biological parents throughout childhood. It highlights the increasing diversity in family forms, including kinship care, use of reproductive technologies, and changes in legal definitions of parenthood. The focus is on the importance of investing in the quality of relationships in any family structure for children’s development, and the need for more inclusive and innovative approaches in family services and research to support the various adults involved in a child’s life.", + "full_text": "Hidden figures: are we keeping pace with changes to family life? Data on parenting in the UK points to the need to invest in relationships, not roles. In the year 2000, 19,000 children and their families joined a landmark research project – the Millennium Cohort Study (MCS). This longitudinal study has played a vital role in helping us track trends for children growing up in the early 21st century. This year, the Institute for Fiscal Studies published an analysis of the MCS cohort indicating that 44% of children born in 2000-2001 will not have lived with both their biological parents throughout their childhood. This compares to a figure of 21% for those participating in the 1970 British Cohort Study. As a signal of change, this should spark a deeper conversation about who “does” parenting in our society – along with the implications for those whose work involves children and families. It points to an important shift in children’s lived experiences, one which has been underway for some time. Yet, when it comes to building up a more detailed picture, the information we have about family structure is still relatively limited. The Nuffield Foundation has commented that “our principal sources of data no longer reflect the reality of modern family life in the UK”. The picture may be incomplete, but there are some sources of insight into the range of adults who are involved in children’s lives, beyond biological parents. In 2011, 180,000 children under 18 were estimated to be living with a relative or family friend in kinship care, while about a quarter of pre-school children in England received childcare from grandparents in 2021. In recent decades, widening access to reproductive technologies such as IVF, surrogacy and egg donation have given rise to new family forms and models of kinship. Changes in legal definitions of parenthood have followed. For example, as of 2009, the ‘non birth mother’ in a lesbian couple could be named as the other parent on a birth certificate if the couple conceived together through donor insemination or IVF. A wide cross-section of adults are likely to find themselves in a parenting-type role at some point, whether through a new partner, as an adoptive or foster parent or in a less formalised role. These are not the family structures for which many of our policies and services were designed, which leads to the risk that caregivers who would benefit from support are being overlooked or inadvertently excluded. Historically, fathers have often been absent from early years research, services and data, an omission that is beginning to be better addressed through initiatives such as DadPad. Should we now start asking if there are additional hidden figures in children’s lives? And whether we are missing opportunities to support other people playing a key role in children’s development? One thing is clear: in any family structure it’s the quality of relationships that matters for children’s development, particularly with key carers. Secure attachment with a caregiver and warm and responsive adult-child relationships make a difference to children’s social, emotional and cognitive development and later outcomes. Relationships matter in other ways too: parents’ mental health affects children and access to social networks matter for everyone in the family. Many family services are increasingly focusing on relationships, both between parents and children and between the parents themselves. Other ways in which we could respond to this signal could include specialist interventions that work with different adults taking on parenting roles, alongside approaches to intelligence and data-gathering that focus on understanding who matters in children’s lives. More inclusive and participatory approaches in research and service design would help to ensure that we are really listening to and understanding families, leading to more responsive support. Some innovations are addressing the changing nature of families and households. Shared Lives Plus, for example, showed us that we can think creatively about how we support both relationships and the needs within different homes when they matched young people seeking accommodation with elderly people with room to spare. Nesta in Wales worked with Flintshire County Council to adapt the Mockingbird Family Model to create extended families around foster children and carers. And, if more of us are going to play a parenting role, whether as a biological parent or a temporary carer, can we be better prepared? What if we got ahead of the curve and made sure that we’re all schooled in key aspects of child development, such as attachment theory, or even how to get the most out of play or sharing a story book? The fact that families are changing shouldn’t be a surprise. The last few hundred years provide us with plenty of examples of how attitudes and practices can change – from wet nurses and governesses and “seen and not heard” to an increasing emphasis on children’s rights and voices. So perhaps this signal is really a reminder of what we have always known – “parenting” is a process and a relationship, rather than a fixed role. We need to think about the whole ecosystem of adults who have the potential to positively influence children’s lives, investing in the quality of relationships first and foremost and creating a culture that genuinely supports everyone involved in parenting.", + "url": "https://www.nesta.org.uk/feature/future-signals-2023/hidden-figures-are-we-keeping-pace-with-changes-to-family-life/" + }, + { + "short_name": "Green neighbours", + "title": "Everybody needs green neighbours", + "summary": "The article discusses the growing trend of community-led initiatives in the UK to reduce energy bills and decarbonize homes. It highlights the case of Stithians, a village in Cornwall, where a project is replacing traditional heating systems with efficient ground source heat pumps, benefiting around 250 homes. The article also mentions other models of community energy efforts, such as peer-to-peer trading, group purchasing schemes, and community energy generation, and emphasizes the potential of these initiatives in addressing energy challenges, despite some difficulties in securing investment and government support.", + "full_text": "Everybody needs green neighbours Communities are increasingly coming together to reduce energy bills and decarbonise homes. In September 2022, a small village in Cornwall called Stithians echoed to the sounds of loud drilling. But, unusually, the noise could well have been a welcome sound to those living nearby. It heralded the creation of boreholes designed to replace existing heating systems with efficient ground source heat pumps. Around 250 homes, many of which rely on expensive oil heating or electric storage heaters, are set to benefit from the scheme run by Kensa Utilities and funded by the European Regional Development Fund. The need for cheaper, cleaner energy is more urgent than ever. Of the 630,000 excess winter deaths in England and Wales in 2020-21, cold homes are thought to have contributed to around 13,500 and fuel poverty around 6,300. With living costs and energy bills at a record high, the number is likely to be much higher this winter. Most householders are likely to respond by turning down their thermostat and boiler flow temperature, improving insulation or switching off energy-guzzling appliances. But, as in Stithians, there are a growing number of local initiatives where neighbours are pooling resources, skills or knowledge with their wider community. In many cases, these initiatives are building on ideas that have been tested elsewhere. There are more than 300 community energy organisations in the UK that provide community-led renewable energy or run local energy projects, and collectively they saved consumers £3.3 million off their energy bills in 2021. It is an approach growing in popularity as households search for cheaper, more resilient and sustainable options for heat and power this winter. Last year saw a 38% increase in the delivery of community-led energy efficiency and energy-saving services. This wave of community energy and heating initiatives spans a diverse spectrum. It includes peer-to-peer trading models (where energy or heat is traded between individual households or communities), group purchasing schemes (that reduce the cost to the individual through leveraging economies of scale) and community energy generation (where a community owns or controls the means to produce renewable energy locally). These approaches vary in size and ambition, from providing heat networks for a whole community (as in Stithians) to disrupter companies building energy production on a national scale, but they are all based on the principle of communities sharing resources and knowledge. What do these models look like in action? Nonprofits such as Energy Local are enabling peer-to-peer trading where households with energy generation capabilities can club together and exchange local, clean energy for an agreed price. Solar Together is an example of group purchasing in London, responsible for installing solar panels in more than 2,100 homes in London and securing bulk-buying discounts for residents. Other communities have taken energy generation into their own hands. Bristol Energy Cooperative generates solar energy for its local community and, thanks to solar roofs, has facilitated more than £350,000 of community benefit payments since being established in 2011. Meanwhile, Ripple Energy runs a large-scale scheme where people anywhere in the country can buy shares in cooperatively-owned wind farms in order to reduce their energy bills. It’s been so successful that it’s now oversubscribed and seeking to increase the number of wind farms it owns. Forging closer community links has some other, less obvious, positive outcomes. Social feedback can have a powerful effect on people’s choices about energy consumption. There is evidence that giving households information about how much energy they spend compared to their neighbours can lead to small but enduring energy savings. Neighbours are also well-positioned to help those who are harder to reach by supporting each other to increase the energy efficiency of their homes and providing advice and referrals for vulnerable residents at risk of fuel poverty. Despite the benefits local energy and heating initiatives bring to residents, it has generally been difficult for some of these initiatives (such as community energy projects) to make a case for investment, particularly given the lack of tax relief. There is also minimal government support for community and home energy generation, illustrated by the removal of energy generation incentives such as the feed-in tariff. However, some of these emerging projects have the potential to overcome the challenges around scale and sustainability that have prevented community energy projects from scaling to date. For example, the district heating project in Stithians acts as a proof of concept when it comes to bringing together the private sector and community actors. The scale of the energy and climate challenges we face demands a multi-pronged approach. We anticipate that in coming years this will give rise to a variety of new business models facilitating collective action on heating and energy. These could serve as important tools in the effort to decarbonise homes at scale and reduce energy bills.", + "url": "https://www.nesta.org.uk/feature/future-signals-2023/everybody-needs-green-neighbours/" + } +] \ No newline at end of file diff --git a/src/genai/sandbox/signals/signals_app.py b/src/genai/sandbox/signals/signals_app.py index 9e62705..832bf64 100644 --- a/src/genai/sandbox/signals/signals_app.py +++ b/src/genai/sandbox/signals/signals_app.py @@ -23,8 +23,8 @@ path_func_top_signal = PROMPT_PATH + "func_top_signal.json" path_prompt_top_signal = PROMPT_PATH + "prompt_top_signal.jsonl" # Top three signals function -path_func_top_signals = PROMPT_PATH + "func_top_signals.json" -path_prompt_top_signals = PROMPT_PATH + "prompt_top_signals.jsonl" +path_func_top_three_signals = PROMPT_PATH + "func_top_three_signals.json" +path_prompt_top_three_signals = PROMPT_PATH + "prompt_top_three_signals.jsonl" # Intent detection function path_func_intent = PROMPT_PATH + "func_intent.json" path_prompt_intent = PROMPT_PATH + "prompt_intent.jsonl" @@ -147,7 +147,7 @@ def predict_top_signal(user_message: str, signals: list) -> str: return top_signal['prediction'] -def predict_top_three_signals(user_message: str, signals: list) -> str: +def predict_top_three_signals(user_message: str, signals: list) -> list: """Predict the top signal from the user's message. Args: @@ -157,11 +157,11 @@ def predict_top_three_signals(user_message: str, signals: list) -> str: str: The top signal from the user's message. """ # Function call - func_top_signals = json.loads(open(path_func_top_signals).read()) - func_top_signals['parameters']['properties']['prediction']['enum'] = signals - - message = MessageTemplate.load(path_prompt_top_signals) - function = FunctionTemplate.load(func_top_signals) + func_top_signals = json.loads(open(path_func_top_three_signals).read()) + func_top_signals['parameters']['properties']['prediction']['items']['enum'] = signals + print(func_top_signals) + message = MessageTemplate.load(path_prompt_top_three_signals) + function_top_three = FunctionTemplate.load(func_top_signals) response = TextGenerator.generate( model=selected_model, @@ -169,10 +169,13 @@ def predict_top_three_signals(user_message: str, signals: list) -> str: messages=[message], message_kwargs={"signals": signals_descriptions, "user_input": user_message}, stream=False, - functions=[function.to_prompt()], + functions=[function_top_three.to_prompt()], function_call={"name": "predict_top_signals"}, ) top_signals = json.loads(response['choices'][0]['message']['function_call']['arguments']) + print(message) + print(f"Prediction: {top_signals}") + print(response) return top_signals['prediction'] def signals_bot(sidebar: bool = True) -> None: @@ -199,10 +202,7 @@ def signals_bot(sidebar: bool = True) -> None: st.title("Signals chatbot") st.write("Let's discuss the future!") - # Keep track of discussed signals - st.session_state.signals = [] - - # First time runnig the app + # First time running the app if "messages" not in st.session_state: # Record of messages to display on the app st.session_state.messages = [] @@ -211,16 +211,20 @@ def signals_bot(sidebar: bool = True) -> None: # Keep track of which state we're in st.session_state.state = "start" # Fetch system and introduction messages - with st.chat_message("assistant"): - # Add system message to the history - system_message = read_jsonl(PATH_SYSTEM) - st.session_state.history.append(system_message) - # Add the intro messages - intro_messages = read_jsonl(PATH_INTRO) - print(intro_messages) - for m in intro_messages: - st.session_state.messages.append(m) - st.session_state.history.append(m) + st.session_state.signals = [] + + # Add system message to the history + system_message = read_jsonl(PATH_SYSTEM)[0] + system_message = MessageTemplate.load(system_message) + system_message.format_message(**{"signals": signals_descriptions}) + st.session_state.history.append(system_message.to_prompt()) + print(system_message.to_prompt()) + # Add the intro messages + intro_messages = read_jsonl(PATH_INTRO) + print(intro_messages) + for m in intro_messages: + st.session_state.messages.append(m) + st.session_state.history.append(m) # Display chat messages on app rerun for message in st.session_state.messages: @@ -241,34 +245,38 @@ def signals_bot(sidebar: bool = True) -> None: st.session_state.user_info = user_message st.session_state.state = "chatting" else: - intent = predict_intent(user_message, st.session_state.messages) + intent = predict_intent(user_message, st.session_state.history) + print(intent) # intent = "following_up" if intent == "new_signal": # Predict the signal to explain allowed_signals = [s for s in signals if s not in st.session_state.signals] signal_to_explain = predict_top_signal(user_message, allowed_signals) - st.session_state.signals += signal_to_explain + st.session_state.signals.append(signal_to_explain) st.session_state.active_signal = signal_to_explain + print(signal_to_explain) + print(f"I have these signals in memory: {st.session_state.signals}") # Explain the signal instruction = MessageTemplate.load(path_prompt_impact) - message_history = [MessageTemplate.load(m) for m in st.session_state.messages] + message_history = [MessageTemplate.load(m) for m in st.session_state.history] message_history += [instruction] - message_placeholder = st.empty() - full_response = "" - for response in TextGenerator.generate( - model=selected_model, - temperature=temperature, - messages=message_history, - message_kwargs={ - "signal": signals_dict[signal_to_explain]['full_text'], - "user_input": st.session_state.user_info - }, - stream=True, - ): - full_response += response.choices[0].delta.get("content", "") - message_placeholder.markdown(full_response + "▌") - message_placeholder.markdown(full_response) + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signal": signals_dict[signal_to_explain]['full_text'], + "user_input": st.session_state.user_info + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) st.session_state.messages.append({"role": "assistant", "content": full_response}) st.session_state.history.append({"role": "assistant", "content": full_response}) @@ -278,50 +286,55 @@ def signals_bot(sidebar: bool = True) -> None: # Provide an overview of the impacts of signal on the reader # Ask which one the bot should elaborate on allowed_signals = [s for s in signals if s not in st.session_state.signals] - top_signals = predict_top_three_signals(user_message, allowed_signals)[0:3] + top_signals = predict_top_three_signals(st.session_state.user_info, allowed_signals) + print(allowed_signals) + print(top_signals) + print(top_signals[0:3]) # Explain the signal instruction = MessageTemplate.load(path_prompt_choice) top_signals_text = generate_signals_texts(signals_data, top_signals) - message_history = [MessageTemplate.load(m) for m in st.session_state.messages] + message_history = [MessageTemplate.load(m) for m in st.session_state.history] message_history += [instruction] - message_placeholder = st.empty() - full_response = "" - for response in TextGenerator.generate( - model=selected_model, - temperature=temperature, - messages=message_history, - message_kwargs={ - "signals": top_signals_text, - "user_input": st.session_state.user_info - }, - stream=True, - ): - full_response += response.choices[0].delta.get("content", "") - message_placeholder.markdown(full_response + "▌") - message_placeholder.markdown(full_response) + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signals": top_signals_text, + "user_input": st.session_state.user_info + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) st.session_state.messages.append({"role": "assistant", "content": full_response}) st.session_state.history.append({"role": "assistant", "content": full_response}) elif intent == "following_up": #Follow up the user's message instruction = MessageTemplate.load(path_prompt_following_up) - message_history = [MessageTemplate.load(m) for m in st.session_state.messages] + message_history = [MessageTemplate.load(m) for m in st.session_state.history] message_history += [instruction] - message_placeholder = st.empty() - full_response = "" - for response in TextGenerator.generate( - model=selected_model, - temperature=temperature, - messages=message_history, - message_kwargs={ - "signal": signals_dict[st.session_state.active_signal]['full_text'], - "user_input": user_message - }, - stream=True, - ): - full_response += response.choices[0].delta.get("content", "") - message_placeholder.markdown(full_response + "▌") - message_placeholder.markdown(full_response) + with st.chat_message("assistant"): + message_placeholder = st.empty() + full_response = "" + for response in TextGenerator.generate( + model=selected_model, + temperature=temperature, + messages=message_history, + message_kwargs={ + "signal": signals_dict[st.session_state.active_signal]['full_text'], + "user_input": user_message + }, + stream=True, + ): + full_response += response.choices[0].delta.get("content", "") + message_placeholder.markdown(full_response + "▌") + message_placeholder.markdown(full_response) st.session_state.messages.append({"role": "assistant", "content": full_response}) st.session_state.history.append({"role": "assistant", "content": full_response}) diff --git a/src/genai/sandbox/signals/signals_test.ipynb b/src/genai/sandbox/signals/signals_test.ipynb index 48cc66f..6c64180 100644 --- a/src/genai/sandbox/signals/signals_test.ipynb +++ b/src/genai/sandbox/signals/signals_test.ipynb @@ -17,7 +17,7 @@ }, { "cell_type": "code", - "execution_count": 228, + "execution_count": 233, "metadata": {}, "outputs": [], "source": [ @@ -618,7 +618,7 @@ }, { "cell_type": "code", - "execution_count": 227, + "execution_count": 236, "metadata": {}, "outputs": [ { @@ -629,7 +629,7 @@ " MessageTemplate(initial_template={'role': 'user', 'content': 'Start your answer by explaining each of the signals in one clear sentence (use similar language to the signals descriptions). If possible, indicate how a signal might be relevant to the user, given the user information and conversation history. Finish your answer by asking the user to choose one of the signals to hear more about it. Remember that you must be patient and never offend or be aggressive. \\n\\n###Future signals###{signals}\\n\\n###User information### Here is what the user told you about themselves: {user_input}.\\n\\n###Answer###', 'name': None}, role='user', content=\"Start your answer by explaining each of the signals in one clear sentence (use similar language to the signals descriptions). If possible, indicate how a signal might be relevant to the user, given the user information and conversation history. Finish your answer by asking the user to choose one of the signals to hear more about it. Remember that you must be patient and never offend or be aggressive. \\n\\n###Future signals###Signal 'robochefs': Robochefs: a bumpy road from lab to kitchen?\\nThe signal discusses the impact of automation in commercial kitchens, suggesting that redirecting research and development towards healthier processes, like air frying, could benefit our diets. Companies like Sweetgreen and YPC Technologies are innovating in this space, with initiatives like “cooking as a service” using fresh ingredients. However, the sector faces challenges due to a downturn in venture capital investment, leading to shutdowns of several kitchen robotics start-ups, indicating a need for further refinement in their value propositions.\\n\\nSignal 'abundant_energy': Too cheap to meter: could low-cost renewables create an abundance of energy?\\nThe article highlights the significant decrease in costs for solar and wind power, offering hope for a new era of abundant, clean energy despite the current energy crisis. It discusses the challenges of energy intermittency with renewables and suggests overproducing energy to compensate, enabling opportunities for storage and flexible use. Additionally, it explores the potential for using excess renewable energy in innovative ways, like green desalination and direct air capture for carbon removal, while cautioning against potential environmental impacts and overconsumption.\\n\\nSignal 'baby_boom': Baby boom: pronatalism vs pro-family\\nThe article discusses the rising political interest in pronatal policies, which aim to incentivize higher birth rates in response to declining fertility rates and the economic consequences of an aging population. While some countries have implemented measures like baby bonuses and tax cuts with varied success, these policies are often critiqued for their modest impact on long-term birth rates and potential implications for women's roles and anti-immigration sentiments. The article suggests that a more effective approach would be comprehensive pro-family policies focusing on childcare, parental leave, and overall family wellbeing, as seen in countries like Sweden, where such policies have been associated with relatively higher fertility rates.\\n\\n\\n\\n###User information### Here is what the user told you about themselves: I like food.\\n\\n###Answer###\", name=None)]" ] }, - "execution_count": 227, + "execution_count": 236, "metadata": {}, "output_type": "execute_result" } @@ -641,7 +641,7 @@ }, { "cell_type": "code", - "execution_count": 231, + "execution_count": 237, "metadata": {}, "outputs": [], "source": [ @@ -659,20 +659,20 @@ }, { "cell_type": "code", - "execution_count": 232, + "execution_count": 238, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Given your interest in food and your role as a parent, you may find the 'robochefs' signal interesting as it discusses the potential for healthier cooking methods and fresh ingredients in automated kitchens. This could have implications for the way we prepare food at home in the future, potentially making it easier to provide healthy meals for your family.\n", + "Given your interest in food, you might find the signal 'robochefs' particularly relevant as it discusses the intersection of technology and culinary practices. Robochefs are a new development where automation is being integrated into commercial kitchens, potentially influencing the way food is prepared and served. Companies are exploring healthier cooking methods and fresh ingredients, but the industry is facing economic challenges with a need for clearer benefits to attract investment.\n", "\n", - "The 'abundant_energy' signal might be relevant if you're interested in renewable energy sources, especially as it relates to the cost and usage of energy in your home, which can include running appliances like your heat pump.\n", + "The signal 'abundant_energy' could be interesting to you as a parent and homeowner with a heat pump, because it talks about the future of energy sources, like solar and wind power, which could affect how you power your home and devices, including heating and cooling systems. The article touches on the possibility of energy becoming more affordable and abundant, which can lead to innovative uses and impact environmental sustainability.\n", "\n", - "The 'baby_boom' signal could be of interest as it discusses policies related to family wellbeing, including childcare and parental leave, which may directly impact you as a parent.\n", + "Lastly, the signal 'baby_boom' touches on societal trends and policies that may affect you as a parent. It discusses how governments are responding to changing birth rates with policies that could influence family life, such as childcare support and parental leave. This could be of interest to you in understanding the broader context of family wellbeing and the support you might expect from societal structures.\n", "\n", - "Would you like to know more about any of these topics?\n" + "Would you like to hear more about how robochefs could change the future of cooking and food preparation, the potential for an energy-rich future with renewables that could impact your heat pump usage, or the societal and policy implications of pronatalism and pro-family approaches? Please let me know which topic you'd like to explore further.\n" ] } ],