Skip to content

Commit

Permalink
Update config
Browse files Browse the repository at this point in the history
  • Loading branch information
LeoLjl committed Nov 20, 2024
1 parent 5b97c47 commit b116150
Show file tree
Hide file tree
Showing 3 changed files with 259 additions and 410 deletions.
8 changes: 4 additions & 4 deletions autogen/agentchat/contrib/agent_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ def _create_agent(
description = agent_config["description"]

# Path to the customize **ConversableAgent** class.
model_path = agent_config.get("model_path", None)
agent_path = agent_config.get("agent_path", None)
filter_dict = {}
if len(model_name_or_hf_repo) > 0:
filter_dict.update({"model": model_name_or_hf_repo})
Expand Down Expand Up @@ -302,8 +302,8 @@ def _create_agent(
)

model_class = autogen.AssistantAgent
if model_path:
module_path, model_class_name = model_path.replace("/", ".").rsplit(".", 1)
if agent_path:
module_path, model_class_name = agent_path.replace("/", ".").rsplit(".", 1)
module = importlib.import_module(module_path)
model_class = getattr(module, model_class_name)
if not issubclass(model_class, autogen.ConversableAgent):
Expand All @@ -313,7 +313,7 @@ def _create_agent(
additional_config = {
k: v
for k, v in agent_config.items()
if k not in ["model", "name", "system_message", "description", "model_path", "tags"]
if k not in ["model", "name", "system_message", "description", "agent_path", "tags"]
}
agent = model_class(
name=agent_name, llm_config=current_config.copy(), description=description, **additional_config
Expand Down
64 changes: 49 additions & 15 deletions autogen/agentchat/contrib/captainagent.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,27 @@ class CaptainAgent(ConversableAgent):
(In preview) Captain agent, designed to solve a task with an agent or a group of agents.
"""

DEFAULT_NESTED_CONFIG = {
"autobuild_init_config": {
"config_file_or_env": "OAI_CONFIG_LIST",
"builder_model": "gpt-4o-mini",
"agent_model": "gpt-4o-mini",
},
"autobuild_build_config": {
"default_llm_config": {"temperature": 1, "top_p": 0.95, "max_tokens": 2048},
"code_execution_config": {
"timeout": 300,
"work_dir": "groupchat",
"last_n_messages": 1,
"use_docker": False,
},
"coding": True,
},
"group_chat_config": {"max_round": 10},
"group_chat_llm_config": None,
"max_turns": 5,
}

AUTOBUILD_TOOL = {
"type": "function",
"function": {
Expand Down Expand Up @@ -104,6 +125,9 @@ class CaptainAgent(ConversableAgent):

DEFAULT_DESCRIPTION = "A helpful AI assistant that can build a group of agents at a proper time to solve a task."

# This is used to prompt the LLM to summarize the conversation history between CaptainAgent's tool execution history
SUMMARY_PROMPT = "Read the following conversation history between an expert and a group of agent experts, summarize the conversation history. You should include the initial task, the experts' plan and the attempt, finally the results of the conversation."

def __init__(
self,
name: str,
Expand Down Expand Up @@ -146,6 +170,9 @@ def __init__(

if system_message is None:
system_message = self.AUTOBUILD_SYSTEM_MESSAGE
nested_config = self._update_config(self.DEFAULT_NESTED_CONFIG, nested_config)
if nested_config["group_chat_llm_config"] is None:
nested_config["group_chat_llm_config"] = llm_config.copy()

self.assistant = ConversableAgent(
name="CaptainAgent",
Expand All @@ -166,27 +193,36 @@ def __init__(
{
"sender": self.executor,
"recipient": self.assistant,
"max_turns": 6,
# "message": message,
"max_turns": nested_config["max_turns"],
"summary_method": "reflection_with_llm",
"summary_args": {
"summary_prompt": "Please summarize the conversation history derived from an experts' group chat.",
},
}
],
trigger=UserProxyAgent,
# reply_func_from_nested_chats=None,
position=0,
)

@staticmethod
def _update_config(default_dict: Dict, update_dict: Optional[Dict]) -> Dict:
"""
Recursively updates the default_dict with values from update_dict.
"""
if update_dict is None:
return default_dict

for key, value in update_dict.items():
default_value = default_dict.get(key)
if isinstance(default_value, dict) and isinstance(value, dict):
# Recursively update nested dictionaries
default_dict[key] = CaptainAgent._update_config(default_value, value)
else:
# Update the value or add new key
default_dict[key] = value

def check_nested_config(nested_config: Dict):
if "autobuild_init_config" in nested_config.keys():
assert (
"autobuild_build_config" in nested_config.keys()
), "autobuild_build_config is required when using autobuild as nested mode."
assert (
"group_chat_llm_config" in nested_config.keys()
), "group_chat_llm_config is required when using autobuild as nested mode."
else:
raise ValueError("nested_config should contain either autobuild_init_config or meta_prompting_llm_config.")
return default_dict


class CaptainUserProxyAgent(ConversableAgent):
Expand Down Expand Up @@ -312,7 +348,6 @@ def __init__(
"seek_experts_help": lambda **args: self._run_autobuild(**args),
}
)
check_nested_config(nested_config)
self._agent_config_save_path = agent_config_save_path
self._nested_config = nested_config.copy()
self._code_execution_config = code_execution_config
Expand All @@ -323,8 +358,7 @@ def __init__(
def _run_autobuild(self, group_name: str, execution_task: str, building_task: str = "") -> str:
"""
Build a group of agents by AutoBuild to solve the task.
This function requires the nested_config to contain the autobuild_init_config,
autobuild_llm_config, group_chat_llm_config.
This function requires the nested_config to contain the autobuild_init_config, autobuild_llm_config, group_chat_llm_config.
"""
print("==> Running AutoBuild...", flush=True)
print("\n==> Building task: ", building_task, flush=True)
Expand Down
Loading

0 comments on commit b116150

Please sign in to comment.