-
Notifications
You must be signed in to change notification settings - Fork 0
/
agents.py
83 lines (69 loc) · 3.6 KB
/
agents.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from langchain_openai import ChatOpenAI
from langchain_core.chat_history import InMemoryChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate
from langchain.agents import AgentExecutor, create_tool_calling_agent
from langchain_core.runnables.history import RunnableWithMessageHistory
from helper_tools import insert_score, get_scores,plot_something,format_page_links
from langchain_community.document_loaders import PyPDFLoader
from langchain_core.vectorstores import InMemoryVectorStore
from langchain_openai import OpenAIEmbeddings
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain.tools.retriever import create_retriever_tool
from pydantic import BaseModel, Field
from langchain_core.tools import tool
import glob
@tool
class Joke(BaseModel):
"""Any time the user asks for a joke this tool will be called and a joke will be returned in json format"""
setup:str = Field(..., description="The setup of the joke")
punchline:str = Field(..., description="The setup of the joke")
why_funny:str = Field(..., description="The reason why the joke is funny")
class TutorAgent:
def __init__(self, session_id="test-session"):
print("initializing")
# Initialize model, memory, and tools
self.model = ChatOpenAI(model="gpt-4o-mini")
self.memory = InMemoryChatMessageHistory(session_id=session_id)
all_docs = []
file_paths = ["pdfs/reuse_ase.pdf", "pdfs/safety.pdf"]
for file_path in file_paths:
loader = PyPDFLoader(file_path)
docs = loader.load()
all_docs += docs
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=200)
splits = text_splitter.split_documents(all_docs)
vectorstore = InMemoryVectorStore.from_documents(
documents=splits, embedding=OpenAIEmbeddings()
)
retriever = vectorstore.as_retriever()
retriever_tool = create_retriever_tool(retriever, name="retrieve_document", description="Retrieves the pdf documents from the relevant lecture")
# Define the prompt template with a system message placeholder
self.prompt = ChatPromptTemplate.from_messages(
[
("system", "{system_message}"),
("placeholder", "{chat_history}"), # History of interactions
("human", "{input}"),
("placeholder", "{agent_scratchpad}"), # Internal for steps created through function calling
]
)
# Define the tools
self.tools = [get_scores, insert_score, plot_something, retriever_tool, format_page_links, Joke]
# Create the agent and executor
self.agent = create_tool_calling_agent(self.model, self.tools, self.prompt)
self.agent_executor = AgentExecutor(agent=self.agent, tools=self.tools)
# Wrap with chat history management
self.agent_with_chat_history = RunnableWithMessageHistory(
self.agent_executor,
lambda session_id: self.memory,
input_messages_key="input",
history_messages_key="chat_history",
)
# Default configuration for the agent
self.config = {"configurable": {"session_id": session_id}}
def call_agent(self, prompt, system_message) -> str:
"""Calls the agent with a prompt and returns the response output.
Optionally takes a system_message to update the agent's behavior dynamically."""
response = self.agent_with_chat_history.invoke(
{"input": prompt, "system_message": system_message}, self.config
)
return response["output"]