Skip to content

Commit

Permalink
Merge pull request #476 from GPS-Solutions/streamlit_embedded_ui
Browse files Browse the repository at this point in the history
Fixes for I need help flow
  • Loading branch information
jonchenn authored Feb 8, 2024
2 parents acc50ec + e5d8994 commit 8eee21c
Show file tree
Hide file tree
Showing 4 changed files with 147 additions and 114 deletions.
44 changes: 40 additions & 4 deletions components/frontend_streamlit/src/components/help_modal.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,46 @@
"""

import streamlit as st
import time

def handle_click():
st.session_state.help_state = not st.session_state.help_state
def handle_click(messages_cont, spinner_cont, help_cont, input_cont):
# Clear original message
with help_cont:
st.write("")

def help_form():
start_time = time.time()
count = 0
time_elapsed = 0

while time_elapsed < 4:
count += 1
with spinner_cont:
with st.chat_message("ai"):
st.write("Loading." + "." * int(count % 3),
is_user=True, key="help_loading2")

time.sleep(1)
time_elapsed = time.time() - start_time

# Clear initial human output message
with input_cont:
st.write("")
# Hide spinner
with spinner_cont:
st.write("")

with messages_cont:
with st.chat_message("ai"):
st.markdown(
"Your ticket number is: **5010**<br>"\
"You will receive an **email notification** "\
"within 48 hours.<br>"\
"You may continue to utilize the chat assistant, "\
"or can close or navigate away from this window.",
unsafe_allow_html=True
)

def help_form(messages_cont, spinner_cont, help_cont, input_cont):
name_col, pref_col = st.columns(2)

with name_col:
Expand All @@ -42,5 +77,6 @@ def help_form():
with issue_col:
st.text_input("Detail Your Issue")

st.button("Send", on_click=handle_click)
st.button("Send", on_click=handle_click,
args=[messages_cont, spinner_cont, help_cont, input_cont])

197 changes: 97 additions & 100 deletions components/frontend_streamlit/src/pages/6_Custom_Chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,64 +101,99 @@ def on_submit(user_input):
st.session_state.messages.append({"HumanInput": user_input})
message_index = len(st.session_state.messages)

with st.chat_message("user"):
st.write(user_input, is_user=True, key=f"human_{message_index}")

# Send API to llm-service
default_route = st.session_state.get("default_route", None)
routing_agents = get_all_routing_agents()
routing_agent_names = list(routing_agents.keys())
chat_llm_type = st.session_state.get("chat_llm_type")
Logger.info(f"llm_type in session {chat_llm_type}")

if default_route is None:
# pick the first routing agent as default
if routing_agent_names:
routing_agent = routing_agent_names[0]
else:
routing_agent = "default"
response = run_dispatch(user_input,
routing_agent,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
st.session_state.default_route = response.get("route", None)

elif default_route in routing_agent_names:
response = run_dispatch(user_input,
default_route,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
st.session_state.default_route = response.get("route", None)

elif default_route == "Chat":
response = run_chat(user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
elif default_route == "Plan":
response = run_agent_plan("Plan", user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
# Handle user who needs help
if user_input.lower() == "i need help":
input_cont = st.empty()
help_cont = st.empty()
start_time = time.time()

with input_cont:
with st.chat_message("user"):
st.write(user_input, is_user=True, key=f"human_{message_index}")

count = 0
time_elapsed = 0
while time_elapsed < 4:
count += 1
with spinner_container:
with st.chat_message("ai"):
st.write("Loading." + "." * int(count % 3),
is_user=True, key="help_loading")

time.sleep(1)
time_elapsed = time.time() - start_time
hide_loading()

with help_cont:
with st.chat_message("ai"):
st.write(
"If it's an emergency, please dial 911."\
" Otherwise, complete the form below",
key=f"ai_{message_index}"
)
with st.expander("Get Further Assistance", expanded=True):
help_form(messages_container, spinner_container, help_cont, input_cont)

# User doesn't need help, handle normally
else:
st.error(f"Unsupported route {default_route}")
response = None
with st.chat_message("user"):
st.write(user_input, is_user=True, key=f"human_{message_index}")

# Send API to llm-service
default_route = st.session_state.get("default_route", None)
routing_agents = get_all_routing_agents()
routing_agent_names = list(routing_agents.keys())
chat_llm_type = st.session_state.get("chat_llm_type")
Logger.info(f"llm_type in session {chat_llm_type}")

if default_route is None:
# pick the first routing agent as default
if routing_agent_names:
routing_agent = routing_agent_names[0]
else:
routing_agent = "default"
response = run_dispatch(user_input,
routing_agent,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
st.session_state.default_route = response.get("route", None)

elif default_route in routing_agent_names:
response = run_dispatch(user_input,
default_route,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type,
run_as_batch_job=True)
st.session_state.default_route = response.get("route", None)

elif default_route == "Chat":
response = run_chat(user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
elif default_route == "Plan":
response = run_agent_plan("Plan", user_input,
chat_id=st.session_state.get("chat_id"),
llm_type=chat_llm_type)
else:
st.error(f"Unsupported route {default_route}")
response = None

if response:
st.session_state.chat_id = response["chat"]["id"]
if response:
st.session_state.chat_id = response["chat"]["id"]

# TODO: Currently the AIOutput vs content are inconsistent across
# API response and in a UserChat history.
if "content" in response:
response["AIOutput"] = response["content"]
del response["chat"]
# TODO: Currently the AIOutput vs content are inconsistent across
# API response and in a UserChat history.
if "content" in response:
response["AIOutput"] = response["content"]
del response["chat"]

# Append new message from the API response and display it.
append_and_display_message(response)
# Append new message from the API response and display it.
append_and_display_message(response)

# If the response has a batch async job, keep pulling the job result.
if "batch_job" in response:
update_async_job(response["batch_job"]["id"])
# If the response has a batch async job, keep pulling the job result.
if "batch_job" in response:
update_async_job(response["batch_job"]["id"])

def hide_loading():
global spinner_container
Expand Down Expand Up @@ -282,63 +317,25 @@ def append_and_display_message(item):


def display_message(item, item_index):
needs_help = False

if "HumanInput" in item:
if item["HumanInput"].lower() == "i need help":
needs_help = True
with st.chat_message("user"):
st.write(item["HumanInput"], is_user=True, key=f"human_{item_index}")

#if "route_name" in item and "AIOutput" not in item:
# route_name = item["route_name"]
# with st.chat_message("ai"):
# st.write(
# f"Using route **`{route_name}`** to respond.",
# key=f"ai_{item_index}",
# )

route_logs = item.get("route_logs", None)
if route_logs and route_logs.strip() != "":
with st.expander("Expand to see Agent's thought process"):
st.write(format_ai_output(route_logs))

if "AIOutput" in item:
if needs_help:
if "help_state" not in st.session_state:
st.session_state.help_state = False

if st.session_state.help_state is False:
with st.chat_message("ai"):
st.write(
"If it's an emergency, please dial 911."\
" Otherwise, complete the form below",
key=f"ai_{item_index}"
)
with st.expander("Get Further Assistance", expanded=True):
help_form()
else:
with st.chat_message("ai"):
st.markdown(
"Your ticket number is: **5010**<br>"\
"You will receive an **email notification** "\
"within 48 hours.<br>"\
"You may continue to utilize the chat assistant, "\
"or can close or navigate away from this window.",
unsafe_allow_html=True
)

needs_help = False
else:
with st.chat_message("ai"):
ai_output = item["AIOutput"]
ai_output = format_ai_output(ai_output)
st.write(
ai_output,
key=f"ai_{item_index}",
unsafe_allow_html=False,
is_table=False, # TODO: Detect whether an output content type.
)
with st.chat_message("ai"):
ai_output = item["AIOutput"]
ai_output = format_ai_output(ai_output)
st.write(
ai_output,
key=f"ai_{item_index}",
unsafe_allow_html=False,
is_table=False, # TODO: Detect whether an output content type.
)

# Append all query references.
if item.get("db_result", None):
Expand Down
18 changes: 9 additions & 9 deletions components/frontend_streamlit/src/styles/custom_shared_styles.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def main_styles():
margin-bottom: 12px;
margin-left: 12px;
}}
.main [data-testid="block-container"] {{
.main .block-container {{
padding-top: 0;
padding-bottom: 0;
}}
Expand Down Expand Up @@ -97,8 +97,8 @@ def main_styles():
}}
/* Smartphones and small devices */
@media screen and (max-width: 1024px) {{
.main [data-testid="block-container"] {{
@media screen and (max-width: 1020px) {{
.main .block-container {{
max-width: 40rem;
}}
.main {{
Expand All @@ -108,8 +108,8 @@ def main_styles():
}}
/* Laptops and small displays */
@media screen and (min-width: 1024px) and (max-width: 1366px) {{
.main [data-testid="block-container"] {{
@media screen and (min-width: 1020px) and (max-width: 1366px) {{
.main .block-container {{
max-width: 52rem;
}}
[data-baseweb=select] {{
Expand All @@ -122,8 +122,8 @@ def main_styles():
/* Large monitors */
@media screen and (min-width: 1366px) and (max-width: 1600px) {{
.main [data-testid="block-container"] {{
max-width: 58rem;
.main .block-container {{
max-width: 60rem;
}}
.main {{
margin-right: 350px;
Expand All @@ -132,8 +132,8 @@ def main_styles():
/* Very large monitors */
@media screen and (min-width: 1600px) {{
.main [data-testid="block-container"] {{
max-width: 60rem;
.main .block-container {{
max-width: 65rem;
}}
.main {{
margin-right: 450px;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def custom_chat_theme():
}}
/* Main container scroll positioning */
.main [data-testid="block-container"] {{
.main .block-container {{
overflow: auto;
}}
Expand Down

0 comments on commit 8eee21c

Please sign in to comment.