Skip to content

Commit

Permalink
Enable defining new functions after agent creation (microsoft#285)
Browse files Browse the repository at this point in the history
* Enable defining new functions after agent creation

* Add notebook for function inception example

* format

* 1. fix bug 2. support remove function

* 1. fix bug 2. support remove function

* 1. add example doc 2. change test file 3. change ipynb title

* Update website/docs/Examples.md

---------

Co-authored-by: Li Jiang <[email protected]>
Co-authored-by: “skzhang1” <“[email protected]”>
Co-authored-by: Shaokun Zhang <[email protected]>
Co-authored-by: Qingyun Wu <[email protected]>
  • Loading branch information
5 people authored Dec 2, 2023
1 parent 5547e3b commit 17c51fd
Show file tree
Hide file tree
Showing 4 changed files with 419 additions and 0 deletions.
35 changes: 35 additions & 0 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1207,6 +1207,41 @@ def register_function(self, function_map: Dict[str, Callable]):
"""
self._function_map.update(function_map)

def update_function_signature(self, func_sig: Union[str, Dict], is_remove: None):
"""update a function_signature in the LLM configuration for function_call.
Args:
func_sig (str or dict): description/name of the function to update/remove to the model. See: https://platform.openai.com/docs/api-reference/chat/create#chat/create-functions
is_remove: whether removing the funciton from llm_config with name 'func_sig'
"""

if not self.llm_config:
error_msg = "To update a function signature, agent must have an llm_config"
logger.error(error_msg)
raise AssertionError(error_msg)

if is_remove:
if "functions" not in self.llm_config.keys():
error_msg = "The agent config doesn't have function {name}.".format(name=func_sig)
logger.error(error_msg)
raise AssertionError(error_msg)
else:
self.llm_config["functions"] = [
func for func in self.llm_config["functions"] if func["name"] != func_sig
]
else:
if "functions" in self.llm_config.keys():
self.llm_config["functions"] = [
func for func in self.llm_config["functions"] if func.get("name") != func_sig["name"]
] + [func_sig]
else:
self.llm_config["functions"] = [func_sig]

if len(self.llm_config["functions"]) == 0:
del self.llm_config["functions"]

self.client = OpenAIWrapper(**self.llm_config)

def can_execute_function(self, name: str) -> bool:
"""Whether the agent can execute the function."""
return name in self._function_map
Expand Down
325 changes: 325 additions & 0 deletions notebook/agentchat_inception_function.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,325 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Auto Generated Agent Chat: Function Inception\n",
"\n",
"AutoGen offers conversable agents powered by LLM, tool or human, which can be used to perform tasks collectively via automated chat. This framework allows tool use and human participation through multi-agent conversation. Please find documentation about this feature [here](https://microsoft.github.io/autogen/docs/Use-Cases/agent_chat).\n",
"\n",
"In this notebook, we demonstrate how to use `AssistantAgent` and `UserProxyAgent` to give them the ability to auto-extend the list of functions the model may call. Functions need to be registered to `UserProxyAgent`, which will be responsible for executing any function calls made by `AssistantAgent`. The assistant also needs to know the signature of functions that may be called. A special `define_function` function is registered, which registers a new function in `UserProxyAgent` and updates the configuration of the assistant.\n",
"\n",
"In the example scenario, the user first asks to define a function that gets a URL and prints the response body. Then the user asks to print the response body, and the assistant suggests to the user to call the new function.\n",
"\n",
"## Requirements\n",
"\n",
"AutoGen requires `Python>=3.8`."
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
"\n",
"Define a function that gets a URL, then prints the response body.\n",
"Reply TERMINATE when the function is defined.\n",
"\n",
"--------------------------------------------------------------------------------\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
"\n",
"\u001b[32m***** Suggested function Call: define_function *****\u001b[0m\n",
"Arguments: \n",
"{\n",
" \"name\": \"get_url_response_body\",\n",
" \"description\": \"Gets a URL, then prints the response body\",\n",
" \"arguments\": \"{\\\"url\\\": {\\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The URL\\\"}}\",\n",
" \"packages\": \"requests\",\n",
" \"code\": \"import requests\\n\\n\\ndef get_url_response_body(url):\\n response = requests.get(url)\\n print(response.text)\\n\"\n",
"}\n",
"\u001b[32m****************************************************\u001b[0m\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[35m\n",
">>>>>>>> EXECUTING FUNCTION define_function...\u001b[0m\n",
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
"\n",
"\u001b[32m***** Response from calling function \"define_function\" *****\u001b[0m\n",
"A function has been added to the context of this conversation.\n",
"Description: Gets a URL, then prints the response body\n",
"\u001b[32m************************************************************\u001b[0m\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
"\n",
"TERMINATE\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
"\n",
"Print the response body of https://echo.free.beeceptor.com/\n",
"Use the functions you know about.\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
"\n",
"\u001b[32m***** Suggested function Call: get_url_response_body *****\u001b[0m\n",
"Arguments: \n",
"{\n",
" \"url\": \"https://echo.free.beeceptor.com/\"\n",
"}\n",
"\u001b[32m**********************************************************\u001b[0m\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[35m\n",
">>>>>>>> EXECUTING FUNCTION get_url_response_body...\u001b[0m\n",
"execute_code:\n",
"\n",
"import subprocess\n",
"print(\"Installing package: requests\")\n",
"subprocess.run([\"pip\", \"-qq\", \"install\", \"requests\"])\n",
"print(\"Result of get_url_response_body function execution:\")\n",
"import requests\n",
"\n",
"\n",
"def get_url_response_body(url):\n",
" response = requests.get(url)\n",
" print(response.text)\n",
"\n",
"args={'url': 'https://echo.free.beeceptor.com/'}\n",
"result=get_url_response_body(**args)\n",
"if result is not None: print(result)\n",
"\n",
"Result: Installing package: requests\n",
"Result of get_url_response_body function execution:\n",
"{\n",
" \"method\": \"GET\",\n",
" \"path\": \"/\",\n",
" \"ip\": \"104.28.208.116\",\n",
" \"headers\": {\n",
" \"host\": \"echo.free.beeceptor.com\",\n",
" \"user-agent\": \"python-requests/2.31.0\",\n",
" \"accept\": \"*/*\",\n",
" \"accept-encoding\": \"gzip, deflate\",\n",
" \"x-forwarded-for\": \"104.28.208.116\",\n",
" \"x-forwarded-host\": \"echo.free.beeceptor.com\",\n",
" \"x-forwarded-proto\": \"https\",\n",
" \"x-real-ip\": \"104.28.208.116\"\n",
" },\n",
" \"parsedQueryParams\": {}\n",
"}\n",
"\n",
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
"\n",
"\u001b[32m***** Response from calling function \"get_url_response_body\" *****\u001b[0m\n",
"Installing package: requests\n",
"Result of get_url_response_body function execution:\n",
"{\n",
" \"method\": \"GET\",\n",
" \"path\": \"/\",\n",
" \"ip\": \"104.28.208.116\",\n",
" \"headers\": {\n",
" \"host\": \"echo.free.beeceptor.com\",\n",
" \"user-agent\": \"python-requests/2.31.0\",\n",
" \"accept\": \"*/*\",\n",
" \"accept-encoding\": \"gzip, deflate\",\n",
" \"x-forwarded-for\": \"104.28.208.116\",\n",
" \"x-forwarded-host\": \"echo.free.beeceptor.com\",\n",
" \"x-forwarded-proto\": \"https\",\n",
" \"x-real-ip\": \"104.28.208.116\"\n",
" },\n",
" \"parsedQueryParams\": {}\n",
"}\n",
"\n",
"\u001b[32m******************************************************************\u001b[0m\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
"\n",
"The response body of https://echo.free.beeceptor.com/ is:\n",
"\n",
"{\n",
" \"method\": \"GET\",\n",
" \"path\": \"/\",\n",
" \"ip\": \"104.28.208.116\",\n",
" \"headers\": {\n",
" \"host\": \"echo.free.beeceptor.com\",\n",
" \"user-agent\": \"python-requests/2.31.0\",\n",
" \"accept\": \"*/*\",\n",
" \"accept-encoding\": \"gzip, deflate\",\n",
" \"x-forwarded-for\": \"104.28.208.116\",\n",
" \"x-forwarded-host\": \"echo.free.beeceptor.com\",\n",
" \"x-forwarded-proto\": \"https\",\n",
" \"x-real-ip\": \"104.28.208.116\"\n",
" },\n",
" \"parsedQueryParams\": {}\n",
"}\n",
"\n",
"TERMINATE\n",
"\n",
"--------------------------------------------------------------------------------\n"
]
}
],
"source": [
"from autogen import AssistantAgent, UserProxyAgent, config_list_from_json\n",
"from autogen.code_utils import execute_code\n",
"import json\n",
"\n",
"config_list = config_list_from_json(\n",
" \"OAI_CONFIG_LIST\",\n",
" filter_dict={\n",
" # Function calling with GPT 3.5\n",
" \"model\": [\"gpt-3.5-turbo-16k-0613\"],\n",
" },\n",
")\n",
"llm_config = {\n",
" \"functions\": [\n",
" {\n",
" \"name\": \"define_function\",\n",
" \"description\": \"Define a function to add to the context of the conversation. Necessary Python packages must be declared. Once defined, the assistant may decide to use this function, respond with a normal message.\",\n",
" \"parameters\": {\n",
" \"type\": \"object\",\n",
" \"properties\": {\n",
" \"name\": {\n",
" \"type\": \"string\",\n",
" \"description\": \"The name of the function to define.\",\n",
" },\n",
" \"description\": {\n",
" \"type\": \"string\",\n",
" \"description\": \"A short description of the function.\",\n",
" },\n",
" \"arguments\": {\n",
" \"type\": \"string\",\n",
" \"description\": \"JSON schema of arguments encoded as a string. For example: { \\\"url\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The URL\\\", }}\",\n",
" },\n",
" \"packages\": {\n",
" \"type\": \"string\",\n",
" \"description\": \"A list of package names imported by the function, and that need to be installed with pip prior to invoking the function. This solves ModuleNotFoundError.\",\n",
" },\n",
" \"code\": {\n",
" \"type\": \"string\",\n",
" \"description\": \"The implementation in Python. Do not include the function declaration.\",\n",
" },\n",
" },\n",
" \"required\": [\"name\", \"description\", \"arguments\", \"packages\", \"code\"],\n",
" },\n",
" },\n",
" ],\n",
" \"config_list\": config_list,\n",
" \"request_timeout\": 120,\n",
"}\n",
"\n",
"def define_function(name, description, arguments, packages, code):\n",
" json_args = json.loads(arguments)\n",
" function_config = {\n",
" \"name\": name,\n",
" \"description\": description,\n",
" \"parameters\": { \"type\": \"object\", \"properties\": json_args },\n",
" # TODO Make all arguments required\n",
" \"required\": [\"url\"],\n",
" }\n",
" llm_config[\"functions\"] = llm_config[\"functions\"] + [function_config]\n",
" user_proxy.register_function(\n",
" function_map={\n",
" name: lambda **args: execute_func(name, packages, code, **args)\n",
" }\n",
" )\n",
" assistant.update_function_signature(function_config, is_remove= False)\n",
" return f\"A function has been added to the context of this conversation.\\nDescription: {description}\"\n",
"\n",
"def execute_func(name, packages, code, **args):\n",
" pip_install = f\"\"\"print(\"Installing package: {packages}\")\\nsubprocess.run([\"pip\", \"-qq\", \"install\", \"{packages}\"])\"\"\" if packages else ''\n",
" str = f\"\"\"\n",
"import subprocess\n",
"{pip_install}\n",
"print(\"Result of {name} function execution:\")\n",
"{code}\n",
"args={args}\n",
"result={name}(**args)\n",
"if result is not None: print(result)\n",
"\"\"\"\n",
" print(f\"execute_code:\\n{str}\")\n",
" result = execute_code(str)[1]\n",
" print(f\"Result: {result}\")\n",
" return result\n",
"\n",
"def _is_termination_msg(message):\n",
" \"\"\"Check if a message is a termination message.\"\"\"\n",
" if isinstance(message, dict):\n",
" message = message.get(\"content\")\n",
" if message is None:\n",
" return False\n",
" return message.rstrip().endswith(\"TERMINATE\")\n",
"\n",
"assistant = AssistantAgent(\n",
" name=\"chatbot\",\n",
" system_message=\"\"\"You are an assistant.\n",
" The user will ask a question.\n",
" You may use the provided functions before providing a final answer.\n",
" Only use the functions you were provided.\n",
" When the answer has been provided, reply TERMINATE.\"\"\",\n",
" llm_config=llm_config,\n",
")\n",
"\n",
"user_proxy = UserProxyAgent(\n",
" \"user_proxy\",\n",
" code_execution_config=False,\n",
" is_termination_msg=_is_termination_msg,\n",
" default_auto_reply=\"Reply TERMINATE when the initial request has been fulfilled.\",\n",
" human_input_mode=\"NEVER\")\n",
"\n",
"user_proxy.register_function(\n",
" function_map={\n",
" \"define_function\": define_function\n",
" }\n",
")\n",
"\n",
"# user_proxy.initiate_chat(\n",
"# assistant, message=\"What functions do you know about?\")\n",
"\n",
"user_proxy.initiate_chat(\n",
" assistant, message=\"Define a function that gets a URL, then prints the response body.\\nReply TERMINATE when the function is defined.\")\n",
"\n",
"# user_proxy.initiate_chat(\n",
"# assistant, message=\"List functions do you know about.\")\n",
"\n",
"user_proxy.initiate_chat(\n",
" assistant, message=\"Print the response body of https://echo.free.beeceptor.com/\\nUse the functions you know about.\")\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Loading

0 comments on commit 17c51fd

Please sign in to comment.