Skip to content

Commit

Permalink
more robust run_agent function. Adds CosmosDB memory to bot.py to avo…
Browse files Browse the repository at this point in the history
…id memory sharing
  • Loading branch information
pablomarin committed Oct 1, 2023
1 parent 1411d11 commit ea38a1f
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 34 deletions.
67 changes: 45 additions & 22 deletions apps/backend/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@
import os
import re
import asyncio
import random
from concurrent.futures import ThreadPoolExecutor
from langchain.chat_models import AzureChatOpenAI
from langchain.utilities import BingSearchAPIWrapper
from langchain.memory import ConversationBufferWindowMemory
from langchain.memory import CosmosDBChatMessageHistory
from langchain.agents import ConversationalChatAgent, AgentExecutor, Tool
from typing import Any, Dict, List, Optional, Union
from langchain.callbacks.base import BaseCallbackHandler
Expand Down Expand Up @@ -38,8 +40,7 @@ def on_llm_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
asyncio.run(self.tc.send_activity(f"LLM Error: {error}\n"))

def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> Any:
asyncio.run(self.tc.send_activity(f"Tool: {serialized['name']}\n"))
asyncio.run(self.tc.send_activity(Activity(type=ActivityTypes.typing)))
asyncio.run(self.tc.send_activity(f"Tool: {serialized['name']}"))

def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
if "Action Input" in action.log:
Expand All @@ -48,20 +49,36 @@ def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
asyncio.run(self.tc.send_activity(Activity(type=ActivityTypes.typing)))

class MyBot(ActivityHandler):
# See https://aka.ms/about-bot-activity-message to learn more about the message and other activity types.

MODEL_DEPLOYMENT_NAME = os.environ.get("AZURE_OPENAI_MODEL_NAME")
memory = ConversationBufferWindowMemory(memory_key="chat_history", return_messages=True, k=10)

def __init__(self, conversation_state: ConversationState, user_state: UserState):
self.model_name = os.environ.get("AZURE_OPENAI_MODEL_NAME")

async def on_members_added_activity(self, members_added: ChannelAccount, turn_context: TurnContext):
for member_added in members_added:
if member_added.id != turn_context.activity.recipient.id:
await turn_context.send_activity(WELCOME_MESSAGE)


# See https://aka.ms/about-bot-activity-message to learn more about the message and other activity types.
async def on_message_activity(self, turn_context: TurnContext):

typing_activity = Activity(type=ActivityTypes.typing)
await turn_context.send_activity(typing_activity)


# Extract info from TurnContext
session_id = turn_context.activity.conversation.id
user_id = turn_context.activity.from_property.id + "-" + turn_context.activity.channel_id
input_text_metadata = dict()
input_text_metadata["local_timestamp"] = turn_context.activity.local_timestamp.strftime("%I:%M:%S %p, %A, %B %d of %Y")
input_text_metadata["local_timezone"] = turn_context.activity.local_timezone
input_text_metadata["locale"] = turn_context.activity.locale

# Setting the query to send to OpenAI
input_text = turn_context.activity.text + "\n\n metadata:\n" + str(input_text_metadata)

# Set Callback Handler
cb_handler = BotServiceCallbackHandler(turn_context)
cb_manager = CallbackManager(handlers=[cb_handler])

llm = AzureChatOpenAI(deployment_name=self.MODEL_DEPLOYMENT_NAME, temperature=0.5, max_tokens=1000, callback_manager=cb_manager)
# Set LLM
llm = AzureChatOpenAI(deployment_name=self.model_name, temperature=0.5, max_tokens=1000, callback_manager=cb_manager)

# Initialize our Tools/Experts
text_indexes = ["cogsrch-index-files", "cogsrch-index-csv"]
Expand All @@ -82,20 +99,26 @@ async def on_message_activity(self, turn_context: TurnContext):

tools = [www_search, sql_search, doc_search, chatgpt_search, book_search]

# Set main Agent
llm_a = AzureChatOpenAI(deployment_name=self.MODEL_DEPLOYMENT_NAME, temperature=0.5, max_tokens=500)
agent = ConversationalChatAgent.from_llm_and_tools(llm=llm_a, tools=tools, system_message=CUSTOM_CHATBOT_PREFIX, human_message=CUSTOM_CHATBOT_SUFFIX)
agent_chain = AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, memory=self.memory)

# Set brain Agent
cosmos = CosmosDBChatMessageHistory(
cosmos_endpoint=os.environ['AZURE_COSMOSDB_ENDPOINT'],
cosmos_database=os.environ['AZURE_COSMOSDB_NAME'],
cosmos_container=os.environ['AZURE_COSMOSDB_CONTAINER_NAME'],
connection_string=os.environ['AZURE_COMOSDB_CONNECTION_STRING'],
session_id=session_id,
user_id=user_id
)
cosmos.prepare_cosmos()
memory = ConversationBufferWindowMemory(memory_key="chat_history", return_messages=True, k=30, chat_memory=cosmos)
agent = ConversationalChatAgent.from_llm_and_tools(llm=llm, tools=tools,system_message=CUSTOM_CHATBOT_PREFIX,human_message=CUSTOM_CHATBOT_SUFFIX)
agent_chain = AgentExecutor.from_agent_and_tools(agent=agent, tools=tools, memory=memory)

await turn_context.send_activity(Activity(type=ActivityTypes.typing))
# Please note below that running a non-async function like run_agent in a separate thread won't make it truly asynchronous. It allows the function to be called without blocking the event loop, but it may still have synchronous behavior internally.

loop = asyncio.get_event_loop()
answer = await loop.run_in_executor(ThreadPoolExecutor(), run_agent, turn_context.activity.text, agent_chain)
answer = await loop.run_in_executor(ThreadPoolExecutor(), run_agent, input_text, agent_chain)

await turn_context.send_activity(answer)


async def on_members_added_activity(self, members_added: ChannelAccount, turn_context: TurnContext):
for member_added in members_added:
if member_added.id != turn_context.activity.recipient.id:
await turn_context.send_activity(WELCOME_MESSAGE)

27 changes: 15 additions & 12 deletions common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,19 +443,22 @@ def get_answer(llm: AzureChatOpenAI,
def run_agent(question:str, agent_chain: AgentExecutor) -> str:
"""Function to run the brain agent and deal with potential parsing errors"""

try:
return agent_chain.run(input=question)

except OutputParserException as e:
# If the agent has a parsing error, we use OpenAI model again to reformat the error and give a good answer
chatgpt_chain = LLMChain(
llm=agent_chain.agent.llm_chain.llm,
prompt=PromptTemplate(input_variables=["error"],template='Remove any json formating from the below text, also remove any portion that says someting similar this "Could not parse LLM output: ". Reformat your response in beautiful Markdown. Just give me the reformated text, nothing else.\n Text: {error}'),
verbose=False
)
for i in range(5):
try:
response = agent_chain.run(input=question)
break
except OutputParserException as e:
# If the agent has a parsing error, we use OpenAI model again to reformat the error and give a good answer
chatgpt_chain = LLMChain(
llm=agent_chain.agent.llm_chain.llm,
prompt=PromptTemplate(input_variables=["error"],template='Remove any json formating from the below text, also remove any portion that says someting similar this "Could not parse LLM output: ". Reformat your response in beautiful Markdown. Just give me the reformated text, nothing else.\n Text: {error}'),
verbose=False
)

response = chatgpt_chain.run(str(e))
return response
response = chatgpt_chain.run(str(e))
continue

return response


######## TOOL CLASSES #####################################
Expand Down

0 comments on commit ea38a1f

Please sign in to comment.