diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 82427b9f33..943be7af12 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -69,14 +69,14 @@ jobs: - name: Install packages and dependencies run: | python -m pip install --upgrade pip wheel - pip install -e .[cosmosdb] + pip install -e .[test,cosmosdb,interop] python -c "import autogen" pip install pytest-cov>=5 mock - name: Install optional dependencies for code executors # code executors and udfs auto skip without deps, so only run for python 3.11 if: matrix.python-version == '3.11' run: | - pip install -e ".[jupyter-executor,test]" + pip install -e ".[jupyter-executor]" python -m ipykernel install --user --name python3 - name: Set AUTOGEN_USE_DOCKER based on OS shell: bash @@ -97,11 +97,10 @@ jobs: - name: Coverage with Redis if: matrix.python-version == '3.10' run: | - pip install -e .[test,redis,websockets] + pip install -e .[redis,websockets] pytest test --ignore=test/agentchat/contrib --skip-openai --durations=10 --durations-min=1.0 - name: Test with Cosmos DB run: | - pip install -e .[test,cosmosdb] pytest test/cache/test_cosmos_db_cache.py --skip-openai --durations=10 --durations-min=1.0 - name: Upload coverage to Codecov if: matrix.python-version == '3.10' diff --git a/.github/workflows/openai.yml b/.github/workflows/openai.yml index 7dcf3175ba..d7dbbba492 100644 --- a/.github/workflows/openai.yml +++ b/.github/workflows/openai.yml @@ -55,7 +55,7 @@ jobs: if: matrix.python-version == '3.9' run: | pip install docker - pip install -e .[redis] + pip install -e .[redis,interop] - name: Coverage if: matrix.python-version == '3.9' env: diff --git a/=8 b/=8 new file mode 100644 index 0000000000..d47c38ce90 --- /dev/null +++ b/=8 @@ -0,0 +1,6 @@ +Requirement already satisfied: pytest in ./.venv-3.9/lib/python3.9/site-packages (7.4.4) +Requirement already satisfied: iniconfig in ./.venv-3.9/lib/python3.9/site-packages (from pytest) (2.0.0) +Requirement already satisfied: packaging in ./.venv-3.9/lib/python3.9/site-packages (from pytest) (24.2) +Requirement already satisfied: pluggy<2.0,>=0.12 in ./.venv-3.9/lib/python3.9/site-packages (from pytest) (1.5.0) +Requirement already satisfied: exceptiongroup>=1.0.0rc8 in ./.venv-3.9/lib/python3.9/site-packages (from pytest) (1.2.2) +Requirement already satisfied: tomli>=1.0.0 in ./.venv-3.9/lib/python3.9/site-packages (from pytest) (2.2.1) diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index ffd6923721..b2f22ce9c5 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -2600,7 +2600,7 @@ def update_function_signature(self, func_sig: Union[str, Dict], is_remove: None) self.client = OpenAIWrapper(**self.llm_config) - def update_tool_signature(self, tool_sig: Union[str, Dict], is_remove: None): + def update_tool_signature(self, tool_sig: Union[str, Dict], is_remove: bool): """update a tool_signature in the LLM configuration for tool_call. Args: diff --git a/autogen/coding/jupyter/embedded_ipython_code_executor.py b/autogen/coding/jupyter/embedded_ipython_code_executor.py index 09e4a06043..231dca0ffd 100644 --- a/autogen/coding/jupyter/embedded_ipython_code_executor.py +++ b/autogen/coding/jupyter/embedded_ipython_code_executor.py @@ -11,8 +11,10 @@ import uuid from pathlib import Path from queue import Empty -from typing import Any, ClassVar, List +from typing import Any, List +# this is needed for CI to work. The import of this file should fail if jupyter-kernel-gateway is not installed +import jupyter_kernel_gateway from jupyter_client import KernelManager # type: ignore[attr-defined] from jupyter_client.kernelspec import KernelSpecManager from pydantic import BaseModel, Field, field_validator diff --git a/autogen/interop/__init__.py b/autogen/interop/__init__.py new file mode 100644 index 0000000000..8f070c8f24 --- /dev/null +++ b/autogen/interop/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from .crewai import CrewAIInteroperability +from .interoperability import Interoperability +from .interoperable import Interoperable +from .langchain import LangChainInteroperability +from .pydantic_ai import PydanticAIInteroperability +from .registry import register_interoperable_class + +__all__ = ["Interoperability", "Interoperable", "register_interoperable_class"] diff --git a/autogen/interop/crewai/__init__.py b/autogen/interop/crewai/__init__.py new file mode 100644 index 0000000000..50abbf3913 --- /dev/null +++ b/autogen/interop/crewai/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from .crewai import CrewAIInteroperability + +__all__ = ["CrewAIInteroperability"] diff --git a/autogen/interop/crewai/crewai.py b/autogen/interop/crewai/crewai.py new file mode 100644 index 0000000000..cfaa9f5ada --- /dev/null +++ b/autogen/interop/crewai/crewai.py @@ -0,0 +1,83 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import re +import sys +from typing import Any, Optional + +from ...tools import Tool +from ..registry import register_interoperable_class + +__all__ = ["CrewAIInteroperability"] + + +def _sanitize_name(s: str) -> str: + return re.sub(r"\W|^(?=\d)", "_", s) + + +@register_interoperable_class("crewai") +class CrewAIInteroperability: + """ + A class implementing the `Interoperable` protocol for converting CrewAI tools + to a general `Tool` format. + + This class takes a `CrewAITool` and converts it into a standard `Tool` object. + """ + + @classmethod + def convert_tool(cls, tool: Any, **kwargs: Any) -> Tool: + """ + Converts a given CrewAI tool into a general `Tool` format. + + This method ensures that the provided tool is a valid `CrewAITool`, sanitizes + the tool's name, processes its description, and prepares a function to interact + with the tool's arguments. It then returns a standardized `Tool` object. + + Args: + tool (Any): The tool to convert, expected to be an instance of `CrewAITool`. + **kwargs (Any): Additional arguments, which are not supported by this method. + + Returns: + Tool: A standardized `Tool` object converted from the CrewAI tool. + + Raises: + ValueError: If the provided tool is not an instance of `CrewAITool`, or if + any additional arguments are passed. + """ + from crewai.tools import BaseTool as CrewAITool + + if not isinstance(tool, CrewAITool): + raise ValueError(f"Expected an instance of `crewai.tools.BaseTool`, got {type(tool)}") + if kwargs: + raise ValueError(f"The CrewAIInteroperability does not support any additional arguments, got {kwargs}") + + # needed for type checking + crewai_tool: CrewAITool = tool # type: ignore[no-any-unimported] + + name = _sanitize_name(crewai_tool.name) + description = ( + crewai_tool.description.split("Tool Description: ")[-1] + + " (IMPORTANT: When using arguments, put them all in an `args` dictionary)" + ) + + def func(args: crewai_tool.args_schema) -> Any: # type: ignore[no-any-unimported] + return crewai_tool.run(**args.model_dump()) + + return Tool( + name=name, + description=description, + func=func, + ) + + @classmethod + def get_unsupported_reason(cls) -> Optional[str]: + if sys.version_info < (3, 10) or sys.version_info >= (3, 13): + return "This submodule is only supported for Python versions 3.10, 3.11, and 3.12" + + try: + import crewai.tools + except ImportError: + return "Please install `interop-crewai` extra to use this module:\n\n\tpip install ag2[interop-crewai]" + + return None diff --git a/autogen/interop/interoperability.py b/autogen/interop/interoperability.py new file mode 100644 index 0000000000..b86285d6a6 --- /dev/null +++ b/autogen/interop/interoperability.py @@ -0,0 +1,73 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 +from typing import Any, Dict, List, Type + +from ..tools import Tool +from .interoperable import Interoperable +from .registry import InteroperableRegistry + +__all__ = ["Interoperable"] + + +class Interoperability: + """ + A class to handle interoperability between different tool types. + + This class allows the conversion of tools to various interoperability classes and provides functionality + for retrieving and registering interoperability classes. + """ + + registry = InteroperableRegistry.get_instance() + + @classmethod + def convert_tool(cls, *, tool: Any, type: str, **kwargs: Any) -> Tool: + """ + Converts a given tool to an instance of a specified interoperability type. + + Args: + tool (Any): The tool object to be converted. + type (str): The type of interoperability to convert the tool to. + **kwargs (Any): Additional arguments to be passed during conversion. + + Returns: + Tool: The converted tool. + + Raises: + ValueError: If the interoperability class for the provided type is not found. + """ + interop = cls.get_interoperability_class(type) + return interop.convert_tool(tool, **kwargs) + + @classmethod + def get_interoperability_class(cls, type: str) -> Type[Interoperable]: + """ + Retrieves the interoperability class corresponding to the specified type. + + Args: + type (str): The type of the interoperability class to retrieve. + + Returns: + Type[Interoperable]: The interoperability class type. + + Raises: + ValueError: If no interoperability class is found for the provided type. + """ + supported_types = cls.registry.get_supported_types() + if type not in supported_types: + supported_types_formated = ", ".join(["'t'" for t in supported_types]) + raise ValueError( + f"Interoperability class {type} is not supported, supported types: {supported_types_formated}" + ) + + return cls.registry.get_class(type) + + @classmethod + def get_supported_types(cls) -> List[str]: + """ + Returns a sorted list of all supported interoperability types. + + Returns: + List[str]: A sorted list of strings representing the supported interoperability types. + """ + return sorted(cls.registry.get_supported_types()) diff --git a/autogen/interop/interoperable.py b/autogen/interop/interoperable.py new file mode 100644 index 0000000000..185e36089d --- /dev/null +++ b/autogen/interop/interoperable.py @@ -0,0 +1,46 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Optional, Protocol, runtime_checkable + +from ..tools import Tool + +__all__ = ["Interoperable"] + + +@runtime_checkable +class Interoperable(Protocol): + """ + A Protocol defining the interoperability interface for tool conversion. + + This protocol ensures that any class implementing it provides the method + `convert_tool` to convert a given tool into a desired format or type. + """ + + @classmethod + def convert_tool(cls, tool: Any, **kwargs: Any) -> Tool: + """ + Converts a given tool to a desired format or type. + + This method should be implemented by any class adhering to the `Interoperable` protocol. + + Args: + tool (Any): The tool object to be converted. + **kwargs (Any): Additional parameters to pass during the conversion process. + + Returns: + Tool: The converted tool in the desired format or type. + """ + ... + + @classmethod + def get_unsupported_reason(cls) -> Optional[str]: + """Returns the reason for the tool being unsupported. + + This method should be implemented by any class adhering to the `Interoperable` protocol. + + Returns: + str: The reason for the interoperability class being unsupported. + """ + ... diff --git a/autogen/interop/langchain/__init__.py b/autogen/interop/langchain/__init__.py new file mode 100644 index 0000000000..1aa1f7892c --- /dev/null +++ b/autogen/interop/langchain/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from .langchain import LangChainInteroperability + +__all__ = ["LangChainInteroperability"] diff --git a/autogen/interop/langchain/langchain.py b/autogen/interop/langchain/langchain.py new file mode 100644 index 0000000000..a6689d4dad --- /dev/null +++ b/autogen/interop/langchain/langchain.py @@ -0,0 +1,76 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import sys +from typing import Any, Optional + +from ...tools import Tool +from ..registry import register_interoperable_class + +__all__ = ["LangChainInteroperability"] + + +@register_interoperable_class("langchain") +class LangChainInteroperability: + """ + A class implementing the `Interoperable` protocol for converting Langchain tools + into a general `Tool` format. + + This class takes a `LangchainTool` and converts it into a standard `Tool` object, + ensuring compatibility between Langchain tools and other systems that expect + the `Tool` format. + """ + + @classmethod + def convert_tool(cls, tool: Any, **kwargs: Any) -> Tool: + """ + Converts a given Langchain tool into a general `Tool` format. + + This method verifies that the provided tool is a valid `LangchainTool`, + processes the tool's input and description, and returns a standardized + `Tool` object. + + Args: + tool (Any): The tool to convert, expected to be an instance of `LangchainTool`. + **kwargs (Any): Additional arguments, which are not supported by this method. + + Returns: + Tool: A standardized `Tool` object converted from the Langchain tool. + + Raises: + ValueError: If the provided tool is not an instance of `LangchainTool`, or if + any additional arguments are passed. + """ + from langchain_core.tools import BaseTool as LangchainTool + + if not isinstance(tool, LangchainTool): + raise ValueError(f"Expected an instance of `langchain_core.tools.BaseTool`, got {type(tool)}") + if kwargs: + raise ValueError(f"The LangchainInteroperability does not support any additional arguments, got {kwargs}") + + # needed for type checking + langchain_tool: LangchainTool = tool # type: ignore + + def func(tool_input: langchain_tool.args_schema) -> Any: # type: ignore + return langchain_tool.run(tool_input.model_dump()) + + return Tool( + name=langchain_tool.name, + description=langchain_tool.description, + func=func, + ) + + @classmethod + def get_unsupported_reason(cls) -> Optional[str]: + if sys.version_info < (3, 9): + return "This submodule is only supported for Python versions 3.9 and above" + + try: + import langchain_core.tools + except ImportError: + return ( + "Please install `interop-langchain` extra to use this module:\n\n\tpip install ag2[interop-langchain]" + ) + + return None diff --git a/autogen/interop/pydantic_ai/__init__.py b/autogen/interop/pydantic_ai/__init__.py new file mode 100644 index 0000000000..c022ebc414 --- /dev/null +++ b/autogen/interop/pydantic_ai/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from .pydantic_ai import PydanticAIInteroperability + +__all__ = ["PydanticAIInteroperability"] diff --git a/autogen/interop/pydantic_ai/pydantic_ai.py b/autogen/interop/pydantic_ai/pydantic_ai.py new file mode 100644 index 0000000000..78c48dc0cd --- /dev/null +++ b/autogen/interop/pydantic_ai/pydantic_ai.py @@ -0,0 +1,162 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + + +import sys +import warnings +from functools import wraps +from inspect import signature +from typing import Any, Callable, Optional + +from ..registry import register_interoperable_class +from .pydantic_ai_tool import PydanticAITool as AG2PydanticAITool + +__all__ = ["PydanticAIInteroperability"] + + +@register_interoperable_class("pydanticai") +class PydanticAIInteroperability: + """ + A class implementing the `Interoperable` protocol for converting Pydantic AI tools + into a general `Tool` format. + + This class takes a `PydanticAITool` and converts it into a standard `Tool` object, + ensuring compatibility between Pydantic AI tools and other systems that expect + the `Tool` format. It also provides a mechanism for injecting context parameters + into the tool's function. + """ + + @staticmethod + def inject_params( + ctx: Any, + tool: Any, + ) -> Callable[..., Any]: + """ + Wraps the tool's function to inject context parameters and handle retries. + + This method ensures that context parameters are properly passed to the tool + when invoked and that retries are managed according to the tool's settings. + + Args: + ctx (Optional[RunContext[Any]]): The run context, which may include dependencies and retry information. + tool (PydanticAITool): The Pydantic AI tool whose function is to be wrapped. + + Returns: + Callable[..., Any]: A wrapped function that includes context injection and retry handling. + + Raises: + ValueError: If the tool fails after the maximum number of retries. + """ + from pydantic_ai import RunContext + from pydantic_ai.tools import Tool as PydanticAITool + + ctx_typed: Optional[RunContext[Any]] = ctx # type: ignore + tool_typed: PydanticAITool[Any] = tool # type: ignore + + max_retries = tool_typed.max_retries if tool_typed.max_retries is not None else 1 + f = tool_typed.function + + @wraps(f) + def wrapper(*args: Any, **kwargs: Any) -> Any: + if tool_typed.current_retry >= max_retries: + raise ValueError(f"{tool_typed.name} failed after {max_retries} retries") + + try: + if ctx_typed is not None: + kwargs.pop("ctx", None) + ctx_typed.retry = tool_typed.current_retry + result = f(**kwargs, ctx=ctx_typed) # type: ignore[call-arg] + else: + result = f(**kwargs) # type: ignore[call-arg] + tool_typed.current_retry = 0 + except Exception as e: + tool_typed.current_retry += 1 + raise e + + return result + + sig = signature(f) + if ctx_typed is not None: + new_params = [param for name, param in sig.parameters.items() if name != "ctx"] + else: + new_params = list(sig.parameters.values()) + + wrapper.__signature__ = sig.replace(parameters=new_params) # type: ignore[attr-defined] + + return wrapper + + @classmethod + def convert_tool(cls, tool: Any, deps: Any = None, **kwargs: Any) -> AG2PydanticAITool: + """ + Converts a given Pydantic AI tool into a general `Tool` format. + + This method verifies that the provided tool is a valid `PydanticAITool`, + handles context dependencies if necessary, and returns a standardized `Tool` object. + + Args: + tool (Any): The tool to convert, expected to be an instance of `PydanticAITool`. + deps (Any, optional): The dependencies to inject into the context, required if + the tool takes a context. Defaults to None. + **kwargs (Any): Additional arguments that are not used in this method. + + Returns: + AG2PydanticAITool: A standardized `Tool` object converted from the Pydantic AI tool. + + Raises: + ValueError: If the provided tool is not an instance of `PydanticAITool`, or if + dependencies are missing for tools that require a context. + UserWarning: If the `deps` argument is provided for a tool that does not take a context. + """ + from pydantic_ai import RunContext + from pydantic_ai.tools import Tool as PydanticAITool + + if not isinstance(tool, PydanticAITool): + raise ValueError(f"Expected an instance of `pydantic_ai.tools.Tool`, got {type(tool)}") + + # needed for type checking + pydantic_ai_tool: PydanticAITool[Any] = tool # type: ignore[no-any-unimported] + + if tool.takes_ctx and deps is None: + raise ValueError("If the tool takes a context, the `deps` argument must be provided") + if not tool.takes_ctx and deps is not None: + warnings.warn( + "The `deps` argument is provided but will be ignored because the tool does not take a context.", + UserWarning, + ) + + if tool.takes_ctx: + ctx = RunContext( + deps=deps, + retry=0, + # All messages send to or returned by a model. + # This is mostly used on pydantic_ai Agent level. + messages=[], # TODO: check in the future if this is needed on Tool level + tool_name=pydantic_ai_tool.name, + ) + else: + ctx = None + + func = PydanticAIInteroperability.inject_params( + ctx=ctx, + tool=pydantic_ai_tool, + ) + + return AG2PydanticAITool( + name=pydantic_ai_tool.name, + description=pydantic_ai_tool.description, + func=func, + parameters_json_schema=pydantic_ai_tool._parameters_json_schema, + ) + + @classmethod + def get_unsupported_reason(cls) -> Optional[str]: + if sys.version_info < (3, 9): + return "This submodule is only supported for Python versions 3.9 and above" + + try: + import pydantic_ai.tools + except ImportError: + return "Please install `interop-pydantic-ai` extra to use this module:\n\n\tpip install ag2[interop-pydantic-ai]" + + return None diff --git a/autogen/interop/pydantic_ai/pydantic_ai_tool.py b/autogen/interop/pydantic_ai/pydantic_ai_tool.py new file mode 100644 index 0000000000..629f65e7ad --- /dev/null +++ b/autogen/interop/pydantic_ai/pydantic_ai_tool.py @@ -0,0 +1,61 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Callable, Dict + +from ...agentchat.conversable_agent import ConversableAgent +from ...tools import Tool + +__all__ = ["PydanticAITool"] + + +class PydanticAITool(Tool): + """ + A class representing a Pydantic AI Tool that extends the general Tool functionality + with additional functionality specific to Pydantic AI tools. + + This class inherits from the Tool class and adds functionality for registering + tools with a ConversableAgent, along with providing additional schema information + specific to Pydantic AI tools, such as parameters and function signatures. + + Attributes: + parameters_json_schema (Dict[str, Any]): A schema describing the parameters + that the tool's function expects. + """ + + def __init__( + self, name: str, description: str, func: Callable[..., Any], parameters_json_schema: Dict[str, Any] + ) -> None: + """ + Initializes a PydanticAITool object with the provided name, description, + function, and parameter schema. + + Args: + name (str): The name of the tool. + description (str): A description of what the tool does. + func (Callable[..., Any]): The function that is executed when the tool is called. + parameters_json_schema (Dict[str, Any]): A schema describing the parameters + that the function accepts. + """ + super().__init__(name, description, func) + self._func_schema = { + "type": "function", + "function": { + "name": name, + "description": description, + "parameters": parameters_json_schema, + }, + } + + def register_for_llm(self, agent: ConversableAgent) -> None: + """ + Registers the tool with the ConversableAgent for use with a language model (LLM). + + This method updates the agent's tool signature to include the function schema, + allowing the agent to invoke the tool correctly during interactions with the LLM. + + Args: + agent (ConversableAgent): The agent with which the tool will be registered. + """ + agent.update_tool_signature(self._func_schema, is_remove=False) diff --git a/autogen/interop/registry.py b/autogen/interop/registry.py new file mode 100644 index 0000000000..443dcb5beb --- /dev/null +++ b/autogen/interop/registry.py @@ -0,0 +1,67 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Callable, Dict, Generic, List, Type, TypeVar + +from .interoperable import Interoperable + +__all__ = ["register_interoperable_class", "InteroperableRegistry"] + +InteroperableClass = TypeVar("InteroperableClass", bound=Type[Interoperable]) + + +class InteroperableRegistry: + def __init__(self) -> None: + self._registry: Dict[str, Type[Interoperable]] = {} + + def register(self, short_name: str, cls: InteroperableClass) -> InteroperableClass: + if short_name in self._registry: + raise ValueError(f"Duplicate registration for {short_name}") + + self._registry[short_name] = cls + + return cls + + def get_short_names(self) -> List[str]: + return sorted(self._registry.keys()) + + def get_supported_types(self) -> List[str]: + short_names = self.get_short_names() + supported_types = [name for name in short_names if self._registry[name].get_unsupported_reason() is None] + return supported_types + + def get_class(self, short_name: str) -> Type[Interoperable]: + return self._registry[short_name] + + @classmethod + def get_instance(cls) -> "InteroperableRegistry": + return _register + + +# global registry +_register = InteroperableRegistry() + + +# register decorator +def register_interoperable_class(short_name: str) -> Callable[[InteroperableClass], InteroperableClass]: + """Register an Interoperable class in the global registry. + + Returns: + Callable[[InteroperableClass], InteroperableClass]: Decorator function + + Example: + ```python + @register_interoperable_class("myinterop") + class MyInteroperability(Interoperable): + def convert_tool(self, tool: Any) -> Tool: + # implementation + ... + ``` + """ + + def inner(cls: InteroperableClass) -> InteroperableClass: + global _register + return _register.register(short_name, cls) + + return inner diff --git a/autogen/tools/__init__.py b/autogen/tools/__init__.py new file mode 100644 index 0000000000..5902681ce0 --- /dev/null +++ b/autogen/tools/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from .tool import Tool + +__all__ = ["Tool"] diff --git a/autogen/tools/tool.py b/autogen/tools/tool.py new file mode 100644 index 0000000000..43914aa59c --- /dev/null +++ b/autogen/tools/tool.py @@ -0,0 +1,71 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Callable + +from ..agentchat.conversable_agent import ConversableAgent + +__all__ = ["Tool"] + + +class Tool: + """ + A class representing a Tool that can be used by an agent for various tasks. + + This class encapsulates a tool with a name, description, and an executable function. + The tool can be registered with a ConversableAgent for use either with an LLM or for direct execution. + + Attributes: + name (str): The name of the tool. + description (str): A brief description of the tool's purpose or function. + func (Callable[..., Any]): The function to be executed when the tool is called. + """ + + def __init__(self, name: str, description: str, func: Callable[..., Any]) -> None: + """Create a new Tool object. + + Args: + name (str): The name of the tool. + description (str): The description of the tool. + func (Callable[..., Any]): The function that will be executed when the tool is called. + """ + self._name = name + self._description = description + self._func = func + + @property + def name(self) -> str: + return self._name + + @property + def description(self) -> str: + return self._description + + @property + def func(self) -> Callable[..., Any]: + return self._func + + def register_for_llm(self, agent: ConversableAgent) -> None: + """ + Registers the tool for use with a ConversableAgent's language model (LLM). + + This method registers the tool so that it can be invoked by the agent during + interactions with the language model. + + Args: + agent (ConversableAgent): The agent to which the tool will be registered. + """ + agent.register_for_llm(name=self._name, description=self._description)(self._func) + + def register_for_execution(self, agent: ConversableAgent) -> None: + """ + Registers the tool for direct execution by a ConversableAgent. + + This method registers the tool so that it can be executed by the agent, + typically outside of the context of an LLM interaction. + + Args: + agent (ConversableAgent): The agent to which the tool will be registered. + """ + agent.register_for_execution(name=self._name)(self._func) diff --git a/notebook/tools_interoperability.ipynb b/notebook/tools_interoperability.ipynb new file mode 100644 index 0000000000..00469be0d4 --- /dev/null +++ b/notebook/tools_interoperability.ipynb @@ -0,0 +1,418 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cross-Framework LLM Tool Integration with AG2\n", + "\n", + "In this tutorial, we demonstrate how to integrate LLM tools from various frameworks—including [LangChain Tools](https://python.langchain.com/v0.1/docs/modules/tools), [CrewAI Tools](https://github.com/crewAIInc/crewAI-tools/tree/main), and [PydanticAI Tools](https://ai.pydantic.dev/tools/) into the AG2 framework. This process enables smooth interoperability between these systems, allowing developers to leverage the unique capabilities of each toolset within AG2's flexible agent-based architecture. By the end of this guide, you will understand how to configure agents, adapt these tools for use in AG2, and validate the integration through practical examples." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## LangChain Tools Integration\n", + "\n", + "LangChain is a popular framework that offers a wide range of tools to work with LLMs. LangChain has already implemented a variety of tools that can be easily integrated into AG2. You can explore the available tools in the [LangChain Community Tools](https://github.com/langchain-ai/langchain/tree/master/libs/community/langchain_community/tools) folder. These tools, such as those for querying APIs, web scraping, and text generation, can be quickly incorporated into AG2, providing powerful functionality for your agents.\n", + "\n", + "### Installation\n", + "To integrate LangChain tools into the AG2 framework, install the required dependencies:\n", + "\n", + "```bash\n", + "pip install ag2[interop-langchain]\n", + "```\n", + "\n", + "Additionally, this notebook uses LangChain's [Wikipedia Tool](https://api.python.langchain.com/en/latest/tools/langchain_community.tools.wikipedia.tool.WikipediaQueryRun.html), which requires the `wikipedia` package. Install it with:\n", + "\n", + "```bash\n", + "pip install wikipedia\n", + "```\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Imports\n", + "\n", + "Import necessary modules and tools.\n", + "- `WikipediaQueryRun` and `WikipediaAPIWrapper`: Tools for querying Wikipedia.\n", + "- `AssistantAgent` and `UserProxyAgent`: Agents that facilitate communication in the AG2 framework.\n", + "- `Interoperability`: This module acts as a bridge, making it easier to integrate LangChain tools with AG2’s architecture." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "from langchain_community.tools import WikipediaQueryRun\n", + "from langchain_community.utilities import WikipediaAPIWrapper\n", + "\n", + "from autogen import AssistantAgent, UserProxyAgent\n", + "from autogen.interop import Interoperability" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Agent Configuration\n", + "\n", + "Configure the agents for the interaction.\n", + "- `config_list` defines the LLM configurations, including the model and API key.\n", + "- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`).\n", + "- `AssistantAgent` represents the AI agent, configured with the LLM settings." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "config_list = [{\"model\": \"gpt-4o\", \"api_key\": os.environ[\"OPENAI_API_KEY\"]}]\n", + "user_proxy = UserProxyAgent(\n", + " name=\"User\",\n", + " human_input_mode=\"NEVER\",\n", + ")\n", + "\n", + "chatbot = AssistantAgent(\n", + " name=\"chatbot\",\n", + " llm_config={\"config_list\": config_list},\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tool Integration\n", + "\n", + "- Initialize and register the LangChain tool with AG2.\n", + "- `WikipediaAPIWrapper`: Configured to fetch the top 1 result from Wikipedia with a maximum of 1000 characters per document.\n", + "- `WikipediaQueryRun`: A LangChain tool that executes Wikipedia queries.\n", + "- `LangchainInteroperability`: Converts the LangChain tool into a format compatible with the AG2 framework.\n", + "- `ag2_tool.register_for_execution(user_proxy)`: Registers the tool for use by the user_proxy agent.\n", + "- `ag2_tool.register_for_llm(chatbot)`: Registers the tool for integration with the chatbot agent.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "api_wrapper = WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=1000)\n", + "langchain_tool = WikipediaQueryRun(api_wrapper=api_wrapper)\n", + "\n", + "interop = Interoperability()\n", + "ag2_tool = interop.convert_tool(tool=langchain_tool, type=\"langchain\")\n", + "\n", + "ag2_tool.register_for_execution(user_proxy)\n", + "ag2_tool.register_for_llm(chatbot)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "message = \"Tell me about the history of the United States\"\n", + "user_proxy.initiate_chat(recipient=chatbot, message=message, max_turns=2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## CrewAI Tools Integration\n", + "\n", + "CrewAI provides a variety of powerful tools designed for tasks such as web scraping, search, code interpretation, and more. These tools are easy to integrate into the AG2 framework, allowing you to enhance your agents with advanced capabilities. You can explore the full list of available tools in the [CrewAI Tools](https://github.com/crewAIInc/crewAI-tools/tree/main) repository.\n", + "\n", + "### Installation\n", + "Install the required packages for integrating CrewAI tools into the AG2 framework.\n", + "This ensures all dependencies for both frameworks are installed.\n", + "\n", + "```bash\n", + "pip install ag2[interop-crewai]\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Imports\n", + "\n", + "Import necessary modules and tools.\n", + "- `ScrapeWebsiteTool` are the CrewAI tools for web scraping\n", + "- `AssistantAgent` and `UserProxyAgent` are core AG2 classes.\n", + "- `Interoperability`: This module acts as a bridge, making it easier to integrate CrewAI tools with AG2’s architecture." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "from crewai_tools import ScrapeWebsiteTool\n", + "\n", + "from autogen import AssistantAgent, UserProxyAgent\n", + "from autogen.interop import Interoperability" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Agent Configuration\n", + "\n", + "Configure the agents for the interaction.\n", + "- `config_list` defines the LLM configurations, including the model and API key.\n", + "- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`).\n", + "- `AssistantAgent` represents the AI agent, configured with the LLM settings." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "config_list = [{\"model\": \"gpt-4o\", \"api_key\": os.environ[\"OPENAI_API_KEY\"]}]\n", + "user_proxy = UserProxyAgent(\n", + " name=\"User\",\n", + " human_input_mode=\"NEVER\",\n", + ")\n", + "\n", + "chatbot = AssistantAgent(\n", + " name=\"chatbot\",\n", + " llm_config={\"config_list\": config_list},\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tool Integration\n", + "\n", + "Initialize and register the CrewAI tool with AG2.\n", + "- `crewai_tool` is an instance of the `ScrapeWebsiteTool` from CrewAI.\n", + "- `Interoperability` converts the CrewAI tool to make it usable in AG2.\n", + "- `register_for_execution` and `register_for_llm` allow the tool to work with the UserProxyAgent and AssistantAgent." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "interop = Interoperability()\n", + "crewai_tool = ScrapeWebsiteTool()\n", + "ag2_tool = interop.convert_tool(tool=crewai_tool, type=\"crewai\")\n", + "\n", + "ag2_tool.register_for_execution(user_proxy)\n", + "ag2_tool.register_for_llm(chatbot)\n", + "\n", + "message = \"Scrape the website https://ag2.ai/\"\n", + "\n", + "chat_result = user_proxy.initiate_chat(recipient=chatbot, message=message, max_turns=2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(chat_result.summary)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## PydanticAI Tools Integration\n", + "\n", + "[PydanticAI](https://ai.pydantic.dev/) is a newer framework that offers powerful capabilities for working with LLMs. Although it currently does not have a repository with pre-built tools, it provides features like **dependency injection**, allowing you to inject a \"Context\" into a tool for better execution without relying on LLMs. This context can be used for passing parameters or managing state during the execution of a tool. While the framework is still growing, you can integrate its tools into AG2 to enhance agent capabilities, especially for tasks that involve structured data and context-driven logic.\n", + "\n", + "### Installation\n", + "To integrate LangChain tools into the AG2 framework, install the required dependencies:\n", + "\n", + "```bash\n", + "pip install ag2[interop-pydantic-ai]\n", + "```\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Imports\n", + "\n", + "Import necessary modules and tools.\n", + "- `BaseModel`: Used to define data structures for tool inputs and outputs.\n", + "- `RunContext`: Provides context during the execution of tools.\n", + "- `PydanticAITool`: Represents a tool in the PydanticAI framework.\n", + "- `AssistantAgent` and `UserProxyAgent`: Agents that facilitate communication in the AG2 framework.\n", + "- `Interoperability`: This module acts as a bridge, making it easier to integrate PydanticAI tools with AG2’s architecture." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from typing import Optional\n", + "\n", + "from pydantic import BaseModel\n", + "from pydantic_ai import RunContext\n", + "from pydantic_ai.tools import Tool as PydanticAITool\n", + "\n", + "from autogen import AssistantAgent, UserProxyAgent\n", + "from autogen.interop import Interoperability" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Agent Configuration\n", + "\n", + "Configure the agents for the interaction.\n", + "- `config_list` defines the LLM configurations, including the model and API key.\n", + "- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`).\n", + "- `AssistantAgent` represents the AI agent, configured with the LLM settings." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "config_list = [{\"model\": \"gpt-4o\", \"api_key\": os.environ[\"OPENAI_API_KEY\"]}]\n", + "user_proxy = UserProxyAgent(\n", + " name=\"User\",\n", + " human_input_mode=\"NEVER\",\n", + ")\n", + "\n", + "chatbot = AssistantAgent(\n", + " name=\"chatbot\",\n", + " llm_config={\"config_list\": config_list},\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Tool Integration\n", + "\n", + "Integrate the PydanticAI tool with AG2.\n", + "\n", + "- Define a `Player` model using `BaseModel` to structure the input data.\n", + "- Use `RunContext` to securely inject dependencies (like the `Player` instance) into the tool function without exposing them to the LLM.\n", + "- Implement `get_player` to define the tool's functionality, accessing `ctx.deps` for injected data.\n", + "- Convert the tool to an AG2-compatible format with `Interoperability` and register it for execution and LLM communication.\n", + "- Convert the PydanticAI tool into an AG2-compatible format using `convert_tool`.\n", + "- Register the tool for both execution and communication with the LLM by associating it with the `user_proxy` and `chatbot`." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "class Player(BaseModel):\n", + " name: str\n", + " age: int\n", + "\n", + "\n", + "def get_player(ctx: RunContext[Player], additional_info: Optional[str] = None) -> str: # type: ignore[valid-type]\n", + " \"\"\"Get the player's name.\n", + "\n", + " Args:\n", + " additional_info: Additional information which can be used.\n", + " \"\"\"\n", + " return f\"Name: {ctx.deps.name}, Age: {ctx.deps.age}, Additional info: {additional_info}\" # type: ignore[attr-defined]\n", + "\n", + "\n", + "interop = Interoperability()\n", + "pydantic_ai_tool = PydanticAITool(get_player, takes_ctx=True)\n", + "\n", + "# player will be injected as a dependency\n", + "player = Player(name=\"Luka\", age=25)\n", + "ag2_tool = interop.convert_tool(tool=pydantic_ai_tool, type=\"pydanticai\", deps=player)\n", + "\n", + "ag2_tool.register_for_execution(user_proxy)\n", + "ag2_tool.register_for_llm(chatbot)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Initiate a conversation between the `UserProxyAgent` and the `AssistantAgent`.\n", + "\n", + "- Use the `initiate_chat` method to send a message from the `user_proxy` to the `chatbot`.\n", + "- In this example, the user requests the chatbot to retrieve player information, providing \"goal keeper\" as additional context.\n", + "- The `Player` instance is securely injected into the tool using `RunContext`, ensuring the chatbot can retrieve and use this data during the interaction." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "user_proxy.initiate_chat(\n", + " recipient=chatbot, message=\"Get player, for additional information use 'goal keeper'\", max_turns=3\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pyproject.toml b/pyproject.toml index c7d7f08b75..8f1db523ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,9 +60,13 @@ files = [ "autogen/_pydantic.py", "autogen/function_utils.py", "autogen/io", + "autogen/tools", + "autogen/interop", "test/test_pydantic.py", "test/test_function_utils.py", "test/io", + "test/tools", + "test/interop", ] exclude = [ "autogen/math_utils\\.py", @@ -87,7 +91,8 @@ no_implicit_optional = true check_untyped_defs = true warn_return_any = true show_error_codes = true -warn_unused_ignores = true + +warn_unused_ignores = false disallow_incomplete_defs = true disallow_untyped_decorators = true diff --git a/setup.py b/setup.py index 7492699d17..e8eda4d66f 100644 --- a/setup.py +++ b/setup.py @@ -61,8 +61,6 @@ "ipykernel>=6.29.0", ] -types = ["mypy==1.9.0"] + test + jupyter_executor - retrieve_chat = [ "protobuf==4.25.3", "chromadb==0.5.3", @@ -86,6 +84,12 @@ twilio = ["fastapi>=0.115.0,<1", "uvicorn>=0.30.6,<1", "twilio>=9.3.2"] +interop_crewai = ["crewai[tools]>=0.86,<1; python_version>='3.10' and python_version<'3.13'"] +interop_langchain = ["langchain-community>=0.3.12,<1"] +interop_pydantic_ai = ["pydantic-ai==0.0.13"] +interop = interop_crewai + interop_langchain + interop_pydantic_ai + +types = ["mypy==1.9.0"] + test + jupyter_executor + interop if current_os in ["Windows", "Darwin"]: retrieve_chat_pgvector.extend(["psycopg[binary]>=3.1.18"]) @@ -132,6 +136,10 @@ "ollama": ["ollama>=0.3.3", "fix_busted_json>=0.0.18"], "bedrock": ["boto3>=1.34.149"], "twilio": twilio, + "interop-crewai": interop_crewai, + "interop-langchain": interop_langchain, + "interop-pydantic-ai": interop_pydantic_ai, + "interop": interop, "neo4j": neo4j, } diff --git a/setup_ag2.py b/setup_ag2.py index f0ad6cf8f7..006b0f0558 100644 --- a/setup_ag2.py +++ b/setup_ag2.py @@ -55,6 +55,10 @@ "ollama": ["pyautogen[ollama]==" + __version__], "bedrock": ["pyautogen[bedrock]==" + __version__], "twilio": ["pyautogen[twilio]==" + __version__], + "interop-crewai": ["pyautogen[interop-crewai]==" + __version__], + "interop-langchain": ["pyautogen[interop-langchain]==" + __version__], + "interop-pydantic-ai": ["pyautogen[interop-pydantic-ai]==" + __version__], + "interop": ["pyautogen[interop]==" + __version__], "neo4j": ["pyautogen[neo4j]==" + __version__], }, url="https://github.com/ag2ai/ag2", diff --git a/setup_autogen.py b/setup_autogen.py index 38d6aad151..7181799d42 100644 --- a/setup_autogen.py +++ b/setup_autogen.py @@ -55,6 +55,10 @@ "ollama": ["pyautogen[ollama]==" + __version__], "bedrock": ["pyautogen[bedrock]==" + __version__], "twilio": ["pyautogen[twilio]==" + __version__], + "interop-crewai": ["pyautogen[interop-crewai]==" + __version__], + "interop-langchain": ["pyautogen[interop-langchain]==" + __version__], + "interop-pydantic-ai": ["pyautogen[interop-pydantic-ai]==" + __version__], + "interop": ["pyautogen[interop]==" + __version__], "neo4j": ["pyautogen[neo4j]==" + __version__], }, url="https://github.com/ag2ai/ag2", diff --git a/test/interop/__init__.py b/test/interop/__init__.py new file mode 100644 index 0000000000..bcd5401d54 --- /dev/null +++ b/test/interop/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/test/interop/crewai/__init__.py b/test/interop/crewai/__init__.py new file mode 100644 index 0000000000..bcd5401d54 --- /dev/null +++ b/test/interop/crewai/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/test/interop/crewai/test_crewai.py b/test/interop/crewai/test_crewai.py new file mode 100644 index 0000000000..0d39926638 --- /dev/null +++ b/test/interop/crewai/test_crewai.py @@ -0,0 +1,106 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import os +import sys +from tempfile import TemporaryDirectory +from unittest.mock import MagicMock + +import pytest +from conftest import reason, skip_openai + +if sys.version_info >= (3, 10) and sys.version_info < (3, 13): + from crewai_tools import FileReadTool +else: + FileReadTool = MagicMock() + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperable + +if sys.version_info >= (3, 10) and sys.version_info < (3, 13): + from autogen.interop.crewai import CrewAIInteroperability +else: + CrewAIInteroperability = MagicMock() + + +# skip if python version is not in [3.10, 3.11, 3.12] +@pytest.mark.skipif( + sys.version_info < (3, 10) or sys.version_info >= (3, 13), reason="Only Python 3.10, 3.11, 3.12 are supported" +) +class TestCrewAIInteroperability: + @pytest.fixture(autouse=True) + def setup(self) -> None: + + crewai_tool = FileReadTool() + self.model_type = crewai_tool.args_schema + self.tool = CrewAIInteroperability.convert_tool(crewai_tool) + + def test_type_checks(self) -> None: + # mypy should fail if the type checks are not correct + interop: Interoperable = CrewAIInteroperability() + + # runtime check + assert isinstance(interop, Interoperable) + + def test_convert_tool(self) -> None: + with TemporaryDirectory() as tmp_dir: + file_path = f"{tmp_dir}/test.txt" + with open(file_path, "w") as file: + file.write("Hello, World!") + + assert self.tool.name == "Read_a_file_s_content" + assert ( + self.tool.description + == "A tool that can be used to read a file's content. (IMPORTANT: When using arguments, put them all in an `args` dictionary)" + ) + + args = self.model_type(file_path=file_path) + + assert self.tool.func(args=args) == "Hello, World!" + + @pytest.mark.skipif(skip_openai, reason=reason) + def test_with_llm(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] + user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", + ) + + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_execution(user_proxy) + self.tool.register_for_llm(chatbot) + + with TemporaryDirectory() as tmp_dir: + file_path = f"{tmp_dir}/test.txt" + with open(file_path, "w") as file: + file.write("Hello, World!") + + user_proxy.initiate_chat( + recipient=chatbot, message=f"Read the content of the file at {file_path}", max_turns=2 + ) + + for message in user_proxy.chat_messages[chatbot]: + if "tool_responses" in message: + assert message["tool_responses"][0]["content"] == "Hello, World!" + return + + assert False, "Tool response not found in chat messages" + + def test_get_unsupported_reason(self) -> None: + assert CrewAIInteroperability.get_unsupported_reason() is None + + +@pytest.mark.skipif( + sys.version_info >= (3, 10) or sys.version_info < (3, 13), reason="Crew AI Interoperability is supported" +) +class TestCrewAIInteroperabilityIfNotSupported: + def test_get_unsupported_reason(self) -> None: + assert ( + CrewAIInteroperability.get_unsupported_reason() + == "This submodule is only supported for Python versions 3.10, 3.11, and 3.12" + ) diff --git a/test/interop/langchain/__init__.py b/test/interop/langchain/__init__.py new file mode 100644 index 0000000000..bcd5401d54 --- /dev/null +++ b/test/interop/langchain/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/test/interop/langchain/test_langchain.py b/test/interop/langchain/test_langchain.py new file mode 100644 index 0000000000..be0a2f6bfc --- /dev/null +++ b/test/interop/langchain/test_langchain.py @@ -0,0 +1,139 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import os +import sys +import unittest + +import pytest +from conftest import reason, skip_openai +from pydantic import BaseModel, Field + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperable + +if sys.version_info >= (3, 9): + from langchain.tools import tool as langchain_tool +else: + langchain_tool = unittest.mock.MagicMock() + +from autogen.interop.langchain import LangChainInteroperability + + +# skip if python version is not >= 3.9 +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestLangChainInteroperability: + @pytest.fixture(autouse=True) + def setup(self) -> None: + class SearchInput(BaseModel): + query: str = Field(description="should be a search query") + + @langchain_tool("search-tool", args_schema=SearchInput, return_direct=True) # type: ignore[misc] + def search(query: SearchInput) -> str: + """Look up things online.""" + return "LangChain Integration" + + self.model_type = search.args_schema + self.tool = LangChainInteroperability.convert_tool(search) + + def test_type_checks(self) -> None: + # mypy should fail if the type checks are not correct + interop: Interoperable = LangChainInteroperability() + + # runtime check + assert isinstance(interop, Interoperable) + + def test_convert_tool(self) -> None: + assert self.tool.name == "search-tool" + assert self.tool.description == "Look up things online." + + tool_input = self.model_type(query="LangChain") # type: ignore[misc] + assert self.tool.func(tool_input=tool_input) == "LangChain Integration" + + @pytest.mark.skipif(skip_openai, reason=reason) + def test_with_llm(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] + user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", + ) + + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_execution(user_proxy) + self.tool.register_for_llm(chatbot) + + user_proxy.initiate_chat(recipient=chatbot, message="search for LangChain", max_turns=2) + + for message in user_proxy.chat_messages[chatbot]: + if "tool_responses" in message: + assert message["tool_responses"][0]["content"] == "LangChain Integration" + return + + assert False, "No tool response found in chat messages" + + def test_get_unsupported_reason(self) -> None: + assert LangChainInteroperability.get_unsupported_reason() is None + + +# skip if python version is not >= 3.9 +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestLangChainInteroperabilityWithoutPydanticInput: + @pytest.fixture(autouse=True) + def setup(self) -> None: + @langchain_tool + def search(query: str, max_length: int) -> str: + """Look up things online.""" + return f"LangChain Integration, max_length: {max_length}" + + self.tool = LangChainInteroperability.convert_tool(search) + self.model_type = search.args_schema + + def test_convert_tool(self) -> None: + assert self.tool.name == "search" + assert self.tool.description == "Look up things online." + + tool_input = self.model_type(query="LangChain", max_length=100) # type: ignore[misc] + assert self.tool.func(tool_input=tool_input) == "LangChain Integration, max_length: 100" + + @pytest.mark.skipif(skip_openai, reason=reason) + def test_with_llm(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] + user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", + ) + + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_execution(user_proxy) + self.tool.register_for_llm(chatbot) + + user_proxy.initiate_chat(recipient=chatbot, message="search for LangChain, Use max 100 characters", max_turns=2) + + for message in user_proxy.chat_messages[chatbot]: + if "tool_responses" in message: + assert message["tool_responses"][0]["content"] == "LangChain Integration, max_length: 100" + return + + assert False, "No tool response found in chat messages" + + +@pytest.mark.skipif(sys.version_info >= (3, 9), reason="LangChain Interoperability is supported") +class TestLangChainInteroperabilityIfNotSupported: + def test_get_unsupported_reason(self) -> None: + assert ( + LangChainInteroperability.get_unsupported_reason() + == "This submodule is only supported for Python versions 3.9 and above" + ) diff --git a/test/interop/pydantic_ai/__init__.py b/test/interop/pydantic_ai/__init__.py new file mode 100644 index 0000000000..bcd5401d54 --- /dev/null +++ b/test/interop/pydantic_ai/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/test/interop/pydantic_ai/test_pydantic_ai.py b/test/interop/pydantic_ai/test_pydantic_ai.py new file mode 100644 index 0000000000..2840cdbc9a --- /dev/null +++ b/test/interop/pydantic_ai/test_pydantic_ai.py @@ -0,0 +1,235 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import os +import random +import sys +import unittest +from inspect import signature +from typing import Any, Dict, Optional + +import pytest +from conftest import reason, skip_openai +from pydantic import BaseModel + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperable + +if sys.version_info >= (3, 9): + from pydantic_ai import RunContext + from pydantic_ai.tools import Tool as PydanticAITool + +else: + RunContext = unittest.mock.MagicMock() + PydanticAITool = unittest.mock.MagicMock() + +from autogen.interop.pydantic_ai import PydanticAIInteroperability + + +# skip if python version is not >= 3.9 +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestPydanticAIInteroperabilityWithotContext: + @pytest.fixture(autouse=True) + def setup(self) -> None: + def roll_dice() -> str: + """Roll a six-sided dice and return the result.""" + return str(random.randint(1, 6)) + + pydantic_ai_tool = PydanticAITool(roll_dice, max_retries=3) # type: ignore[var-annotated] + self.tool = PydanticAIInteroperability.convert_tool(pydantic_ai_tool) + + def test_type_checks(self) -> None: + # mypy should fail if the type checks are not correct + interop: Interoperable = PydanticAIInteroperability() + # runtime check + assert isinstance(interop, Interoperable) + + def test_convert_tool(self) -> None: + assert self.tool.name == "roll_dice" + assert self.tool.description == "Roll a six-sided dice and return the result." + assert self.tool.func() in ["1", "2", "3", "4", "5", "6"] + + @pytest.mark.skipif(skip_openai, reason=reason) + def test_with_llm(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] + user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", + ) + + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_execution(user_proxy) + self.tool.register_for_llm(chatbot) + + user_proxy.initiate_chat(recipient=chatbot, message="roll a dice", max_turns=2) + + for message in user_proxy.chat_messages[chatbot]: + if "tool_responses" in message: + assert message["tool_responses"][0]["content"] in ["1", "2", "3", "4", "5", "6"] + return + + assert False, "No tool response found in chat messages" + + +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestPydanticAIInteroperabilityDependencyInjection: + + def test_dependency_injection(self) -> None: + def f( + ctx: RunContext[int], # type: ignore[valid-type] + city: str, + date: str, + ) -> str: + """Random function for testing.""" + return f"{city} {date} {ctx.deps}" # type: ignore[attr-defined] + + ctx = RunContext( + deps=123, + retry=0, + messages=None, # type: ignore[arg-type] + tool_name=f.__name__, + ) + pydantic_ai_tool = PydanticAITool(f, takes_ctx=True) # type: ignore[var-annotated] + g = PydanticAIInteroperability.inject_params( + ctx=ctx, + tool=pydantic_ai_tool, + ) + assert list(signature(g).parameters.keys()) == ["city", "date"] + kwargs: Dict[str, Any] = {"city": "Zagreb", "date": "2021-01-01"} + assert g(**kwargs) == "Zagreb 2021-01-01 123" + + def test_dependency_injection_with_retry(self) -> None: + def f( + ctx: RunContext[int], # type: ignore[valid-type] + city: str, + date: str, + ) -> str: + """Random function for testing.""" + raise ValueError("Retry") + + ctx = RunContext( + deps=123, + retry=0, + messages=None, # type: ignore[arg-type] + tool_name=f.__name__, + ) + + pydantic_ai_tool = PydanticAITool(f, takes_ctx=True, max_retries=3) # type: ignore[var-annotated] + g = PydanticAIInteroperability.inject_params( + ctx=ctx, + tool=pydantic_ai_tool, + ) + + for i in range(3): + with pytest.raises(ValueError, match="Retry"): + g(city="Zagreb", date="2021-01-01") + assert pydantic_ai_tool.current_retry == i + 1 + assert ctx.retry == i + + with pytest.raises(ValueError, match="f failed after 3 retries"): + g(city="Zagreb", date="2021-01-01") + assert pydantic_ai_tool.current_retry == 3 + + +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestPydanticAIInteroperabilityWithContext: + @pytest.fixture(autouse=True) + def setup(self) -> None: + class Player(BaseModel): + name: str + age: int + + def get_player(ctx: RunContext[Player], additional_info: Optional[str] = None) -> str: # type: ignore[valid-type] + """Get the player's name. + + Args: + additional_info: Additional information which can be used. + """ + return f"Name: {ctx.deps.name}, Age: {ctx.deps.age}, Additional info: {additional_info}" # type: ignore[attr-defined] + + self.pydantic_ai_tool = PydanticAITool(get_player, takes_ctx=True) # type: ignore[var-annotated] + player = Player(name="Luka", age=25) + self.tool = PydanticAIInteroperability.convert_tool(tool=self.pydantic_ai_tool, deps=player) + + def test_convert_tool_raises_error_if_take_ctx_is_true_and_deps_is_none(self) -> None: + with pytest.raises(ValueError, match="If the tool takes a context, the `deps` argument must be provided"): + PydanticAIInteroperability.convert_tool(tool=self.pydantic_ai_tool, deps=None) + + def test_expected_tools(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": "abc"}] + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + self.tool.register_for_llm(chatbot) + + expected_tools = [ + { + "type": "function", + "function": { + "name": "get_player", + "description": "Get the player's name.", + "parameters": { + "properties": { + "additional_info": { + "anyOf": [{"type": "string"}, {"type": "null"}], + "description": "Additional information which can be used.", + "title": "Additional Info", + } + }, + "required": ["additional_info"], + "type": "object", + "additionalProperties": False, + }, + }, + } + ] + + assert chatbot.llm_config["tools"] == expected_tools # type: ignore[index] + + @pytest.mark.skipif(skip_openai, reason=reason) + def test_with_llm(self) -> None: + config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] + user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", + ) + + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_execution(user_proxy) + self.tool.register_for_llm(chatbot) + + user_proxy.initiate_chat( + recipient=chatbot, message="Get player, for additional information use 'goal keeper'", max_turns=3 + ) + + for message in user_proxy.chat_messages[chatbot]: + if "tool_responses" in message: + assert message["tool_responses"][0]["content"] == "Name: Luka, Age: 25, Additional info: goal keeper" + return + + assert False, "No tool response found in chat messages" + + +@pytest.mark.skipif(sys.version_info >= (3, 9), reason="LangChain Interoperability is supported") +class TestPydanticAIInteroperabilityIfNotSupported: + def test_get_unsupported_reason(self) -> None: + assert ( + PydanticAIInteroperability.get_unsupported_reason() + == "This submodule is only supported for Python versions 3.9 and above" + ) diff --git a/test/interop/pydantic_ai/test_pydantic_ai_tool.py b/test/interop/pydantic_ai/test_pydantic_ai_tool.py new file mode 100644 index 0000000000..f1ae38389e --- /dev/null +++ b/test/interop/pydantic_ai/test_pydantic_ai_tool.py @@ -0,0 +1,74 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import sys +import unittest + +import pytest + +from autogen import AssistantAgent + +if sys.version_info >= (3, 9): + from pydantic_ai.tools import Tool as PydanticAITool + +else: + PydanticAITool = unittest.mock.MagicMock() + +from autogen.interop.pydantic_ai.pydantic_ai_tool import PydanticAITool as AG2PydanticAITool + + +# skip if python version is not >= 3.9 +@pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" +) +class TestPydanticAITool: + def test_register_for_llm(self) -> None: + def foobar(a: int, b: str, c: dict[str, list[float]]) -> str: # type: ignore[misc] + """Get me foobar. + + Args: + a: apple pie + b: banana cake + c: carrot smoothie + """ + return f"{a} {b} {c}" + + tool = PydanticAITool(foobar) # type: ignore[var-annotated] + ag2_tool = AG2PydanticAITool( + name=tool.name, + description=tool.description, + func=tool.function, + parameters_json_schema=tool._parameters_json_schema, + ) + config_list = [{"model": "gpt-4o", "api_key": "abc"}] + chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, + ) + ag2_tool.register_for_llm(chatbot) + expected_tools = [ + { + "type": "function", + "function": { + "name": "foobar", + "description": "Get me foobar.", + "parameters": { + "properties": { + "a": {"description": "apple pie", "title": "A", "type": "integer"}, + "b": {"description": "banana cake", "title": "B", "type": "string"}, + "c": { + "additionalProperties": {"items": {"type": "number"}, "type": "array"}, + "description": "carrot smoothie", + "title": "C", + "type": "object", + }, + }, + "required": ["a", "b", "c"], + "type": "object", + "additionalProperties": False, + }, + }, + } + ] + assert chatbot.llm_config["tools"] == expected_tools # type: ignore[index] diff --git a/test/interop/test_interoperability.py b/test/interop/test_interoperability.py new file mode 100644 index 0000000000..3925f056f1 --- /dev/null +++ b/test/interop/test_interoperability.py @@ -0,0 +1,62 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import sys +from tempfile import TemporaryDirectory +from typing import Any + +import pytest + +from autogen.interop import Interoperability + + +class TestInteroperability: + def test_supported_types(self) -> None: + actual = Interoperability.get_supported_types() + + if sys.version_info < (3, 9): + assert actual == [] + + if sys.version_info >= (3, 9) and sys.version_info < (3, 10): + assert actual == ["langchain", "pydanticai"] + + if sys.version_info >= (3, 10) and sys.version_info < (3, 13): + assert actual == ["crewai", "langchain", "pydanticai"] + + if sys.version_info >= (3, 13): + assert actual == ["langchain", "pydanticai"] + + @pytest.mark.skipif( + sys.version_info < (3, 10) or sys.version_info >= (3, 13), reason="Only Python 3.10, 3.11, 3.12 are supported" + ) + def test_crewai(self) -> None: + from crewai_tools import FileReadTool + + crewai_tool = FileReadTool() + + tool = Interoperability.convert_tool(type="crewai", tool=crewai_tool) + + with TemporaryDirectory() as tmp_dir: + file_path = f"{tmp_dir}/test.txt" + with open(file_path, "w") as file: + file.write("Hello, World!") + + assert tool.name == "Read_a_file_s_content" + assert ( + tool.description + == "A tool that can be used to read a file's content. (IMPORTANT: When using arguments, put them all in an `args` dictionary)" + ) + + model_type = crewai_tool.args_schema + + args = model_type(file_path=file_path) + + assert tool.func(args=args) == "Hello, World!" + + @pytest.mark.skipif( + sys.version_info < (3, 9), reason="Only Python 3.9 and above are supported for LangchainInteroperability" + ) + @pytest.mark.skip(reason="This test is not yet implemented") + def test_langchain(self) -> None: + raise NotImplementedError("This test is not yet implemented") diff --git a/test/interop/test_interoperable.py b/test/interop/test_interoperable.py new file mode 100644 index 0000000000..164854f6aa --- /dev/null +++ b/test/interop/test_interoperable.py @@ -0,0 +1,9 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +from autogen.interop import Interoperable + + +def test_interoperable() -> None: + assert Interoperable is not None diff --git a/test/tools/__init__.py b/test/tools/__init__.py new file mode 100644 index 0000000000..bcd5401d54 --- /dev/null +++ b/test/tools/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 diff --git a/test/tools/test_tool.py b/test/tools/test_tool.py new file mode 100644 index 0000000000..b21edebd45 --- /dev/null +++ b/test/tools/test_tool.py @@ -0,0 +1,59 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + +import os + +import pytest + +from autogen import AssistantAgent, UserProxyAgent +from autogen.tools import Tool + + +class TestTool: + @pytest.fixture(autouse=True) + def setup(self) -> None: + def f(x: str) -> str: + return x + "!" + + self.tool = Tool(name="test_tool", description="A test tool", func=f) + + def test_init(self) -> None: + assert self.tool.name == "test_tool" + assert self.tool.description == "A test tool" + + def test_register_for_llm(self) -> None: + config_list = [{"model": "gpt-4", "api_key": os.environ["OPENAI_API_KEY"]}] + + agent = AssistantAgent( + name="agent", + llm_config={"config_list": config_list}, + ) + + self.tool.register_for_llm(agent=agent) + + expected_tools = [ + { + "type": "function", + "function": { + "description": "A test tool", + "name": "test_tool", + "parameters": { + "type": "object", + "properties": {"x": {"type": "string", "description": "x"}}, + "required": ["x"], + }, + }, + } + ] + + assert agent.llm_config["tools"] == expected_tools # type: ignore[index] + + def test_register_for_execution(self) -> None: + user_proxy = UserProxyAgent( + name="user", + ) + + self.tool.register_for_execution(user_proxy) + assert user_proxy.can_execute_function("test_tool") + assert user_proxy.function_map["test_tool"]("Hello") == "Hello!" diff --git a/website/blog/2024-12-18-Tools-interoperability/index.mdx b/website/blog/2024-12-18-Tools-interoperability/index.mdx new file mode 100644 index 0000000000..3f6ae0962d --- /dev/null +++ b/website/blog/2024-12-18-Tools-interoperability/index.mdx @@ -0,0 +1,425 @@ +--- +title: Cross-Framework LLM Tool Integration with AG2 +authors: + - rjambrecic +tags: [LLM, tools, langchain, crewai, pydanticai] +--- + +**TL;DR** +AG2 lets you bring in tools from different frameworks like **LangChain**, **CrewAI**, and **PydanticAI**. + +- **LangChain**: Useful for tasks like API querying and web scraping. +- **CrewAI**: Offers a variety of tools for web scraping, search, and more. +- **PydanticAI**: Adds context-driven tools and structured data processing. + +With AG2, you can combine these tools and enhance your agents' capabilities. + +In this post, we’ll walk through how to integrate tools from various frameworks—like [LangChain Tools](https://python.langchain.com/v0.1/docs/modules/tools), +[CrewAI Tools](https://github.com/crewAIInc/crewAI-tools/tree/main), and [PydanticAI Tools](https://ai.pydantic.dev/tools/)—into AG2. +This allows you to use tools from different frameworks within AG2, giving your agents more power and flexibility. You’ll see how to set up agents, adapt the tools, and validate the integration through examples.his post, you will understand how to configure agents, adapt these tools for use in AG2, and validate the integration through practical examples. + +## LangChain Tools Integration + +LangChain is a popular framework that offers a wide range of tools to work with LLMs. LangChain has already implemented a variety of tools that can be easily integrated into AG2. You can explore the available tools in the [LangChain Community Tools](https://github.com/langchain-ai/langchain/tree/master/libs/community/langchain_community/tools) folder. These tools, such as those for querying APIs, web scraping, and text generation, can be quickly incorporated into AG2, providing powerful functionality for your agents. + +### Installation +To integrate LangChain tools into the AG2 framework, install the required dependencies: + +```bash +pip install ag2[interop-langchain] +``` + +Additionally, this notebook uses LangChain's [Wikipedia Tool](https://api.python.langchain.com/en/latest/tools/langchain_community.tools.wikipedia.tool.WikipediaQueryRun.html), which requires the `wikipedia` package. Install it with: + +```bash +pip install wikipedia +``` + +### Imports + +Import necessary modules and tools. +- `WikipediaQueryRun` and `WikipediaAPIWrapper`: Tools for querying Wikipedia. +- `AssistantAgent` and `UserProxyAgent`: Agents that facilitate communication in the AG2 framework. +- `Interoperability`: This module acts as a bridge, making it easier to integrate LangChain tools with AG2’s architecture. + +```python +import os + +from langchain_community.tools import WikipediaQueryRun +from langchain_community.utilities import WikipediaAPIWrapper + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperability +``` + +### Agent Configuration + +Configure the agents for the interaction. +- `config_list` defines the LLM configurations, including the model and API key. +- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`). +- `AssistantAgent` represents the AI agent, configured with the LLM settings. + +```python +config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] +user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", +) + +chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, +) +``` + +### Tool Integration + +- Initialize and register the LangChain tool with AG2. +- `WikipediaAPIWrapper`: Configured to fetch the top 1 result from Wikipedia with a maximum of 1000 characters per document. +- `WikipediaQueryRun`: A LangChain tool that executes Wikipedia queries. +- `LangchainInteroperability`: Converts the LangChain tool into a format compatible with the AG2 framework. +- `ag2_tool.register_for_execution(user_proxy)`: Registers the tool for use by the user_proxy agent. +- `ag2_tool.register_for_llm(chatbot)`: Registers the tool for integration with the chatbot agent. + +```python +api_wrapper = WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=1000) +langchain_tool = WikipediaQueryRun(api_wrapper=api_wrapper) + +interop = Interoperability() +ag2_tool = interop.convert_tool(tool=langchain_tool, type="langchain") + +ag2_tool.register_for_execution(user_proxy) +ag2_tool.register_for_llm(chatbot) +``` + +### Initiate Chat +```python +message = "Tell me about the history of the United States" +user_proxy.initiate_chat(recipient=chatbot, message=message, max_turns=2) +``` + +Output: + +```console +User (to chatbot): + +Tell me about the history of the United States + +-------------------------------------------------------------------------------- +chatbot (to User): + +***** Suggested tool call (call_hhy2G43ymytUFmJlDsK9J0tk): wikipedia ***** +Arguments: +{"tool_input":{"query":"history of the United States"}} +************************************************************************** + +-------------------------------------------------------------------------------- + +>>>>>>>> EXECUTING FUNCTION wikipedia... +User (to chatbot): + +***** Response from calling tool (call_hhy2G43ymytUFmJlDsK9J0tk) ***** +Page: History of the United States +Summary: The history of the lands that became the United States began with the arrival of the first people in the Americas around 15,000 BC. After European colonization of North America began in the late 15th century, wars and epidemics decimated Indigenous societies. By the 1760s, the thirteen British colonies were established. The Southern Colonies built an agricultural system on slave labor, enslaving millions from Africa. After defeating France, the British Parliament imposed a series of taxes; resistance to these taxes, especially the Boston Tea Party in 1773, led to Parliament issuing the Intolerable Acts designed to end self-government. +In 1776, the United States declared its independence. Led by General George Washington, it won the Revolutionary War in 1783. The Constitution was adopted in 1789, and a Bill of Rights was added in 1791 to guarantee inalienable rights. Washington, the first president, and his adviser Alexander Hamilton created a +********************************************************************** + +-------------------------------------------------------------------------------- +chatbot (to User): + +The history of the United States begins with the arrival of the first peoples in the Americas around 15,000 BC. This pre-Columbian era was followed by European colonization, beginning in the late 15th century, which dramatically altered the indigenous societies through wars and epidemics. + +By the 1760s, thirteen British colonies were established along the Atlantic seaboard. In the Southern Colonies, an agricultural economy heavily reliant on enslaved labor from Africa was developed. The British victory over France in the Seven Years' War led Parliament to impose various taxes on the colonies. Resistance to these taxes, exemplified by the Boston Tea Party in 1773, prompted the Parliament to enact the Intolerable Acts, seeking to curtail colonial self-governance. + +The United States declared independence in 1776. Under the leadership of General George Washington, the American Revolutionary War concluded successfully in 1783. Subsequently, the U.S. Constitution was adopted in 1789, with the Bill of Rights added in 1791 to ensure inalienable rights. During this early period, President George Washington and his advisor Alexander Hamilton played significant roles in forming the young nation's governmental and economic foundations. + +This overview covers the early formation and foundational moments of what became the United States, setting the stage for the country's subsequent expansion and development. TERMINATE + +-------------------------------------------------------------------------------- +``` + +## CrewAI Tools Integration + +CrewAI provides a variety of powerful tools designed for tasks such as web scraping, search, code interpretation, and more. These tools are easy to integrate into the AG2 framework, allowing you to enhance your agents with advanced capabilities. You can explore the full list of available tools in the [CrewAI Tools](https://github.com/crewAIInc/crewAI-tools/tree/main) repository. + +### Installation +Install the required packages for integrating CrewAI tools into the AG2 framework. +This ensures all dependencies for both frameworks are installed. + +```bash +pip install ag2[interop-crewai] +``` + +### Imports + +Import necessary modules and tools. +- `ScrapeWebsiteTool` are the CrewAI tools for web scraping +- `AssistantAgent` and `UserProxyAgent` are core AG2 classes. +- `Interoperability`: This module acts as a bridge, making it easier to integrate CrewAI tools with AG2’s architecture. + +```python +import os + +from crewai_tools import ScrapeWebsiteTool + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperability +``` + +### Agent Configuration + +Configure the agents for the interaction. +- `config_list` defines the LLM configurations, including the model and API key. +- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`). +- `AssistantAgent` represents the AI agent, configured with the LLM settings. + +```python +config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] +user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", +) + +chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, +) +``` + +### Tool Integration + +Initialize and register the CrewAI tool with AG2. +- `crewai_tool` is an instance of the `ScrapeWebsiteTool` from CrewAI. +- `Interoperability` converts the CrewAI tool to make it usable in AG2. +- `register_for_execution` and `register_for_llm` allow the tool to work with the UserProxyAgent and AssistantAgent. + +```python +interop = Interoperability() +crewai_tool = ScrapeWebsiteTool() +ag2_tool = interop.convert_tool(tool=crewai_tool, type="crewai") + +ag2_tool.register_for_execution(user_proxy) +ag2_tool.register_for_llm(chatbot) + +message = "Scrape the website https://ag2.ai/" + +chat_result = user_proxy.initiate_chat(recipient=chatbot, message=message, max_turns=2) +``` + +Output: + +```console +User (to chatbot): + +Scrape the website https://ag2.ai/ + +-------------------------------------------------------------------------------- +chatbot (to User): + +***** Suggested tool call (call_ZStuwmexfN7j56uJKOi6BCid): Read_website_content ***** +Arguments: +{"args":{"website_url":"https://ag2.ai/"}} +************************************************************************************* + +-------------------------------------------------------------------------------- + +>>>>>>>> EXECUTING FUNCTION Read_website_content... +Using Tool: Read website content +User (to chatbot): + +***** Response from calling tool (call_ZStuwmexfN7j56uJKOi6BCid) ***** + +AgentOS +Join our growing community of over 20,000 agent builders Join our growing community of over 20,000 agent builders The Open-Source AgentOS Build production-ready multi-agent systems in minutes, not months. Github Discord The End-to-End Platform for Multi-Agent Automation The End-to-End Platform for Multi-Agent Automation Flexible Agent Construction and Orchestration Create specialized agents that work together seamlessly. AG2 makes it easy to define roles, configure behaviors, and orchestrate collaboration - all through simple, intuitive code. → Assistant agents for problem-solving → Executor agents for taking action → Critic agents for validation → Group chat managers for coordination Built-in Conversation Patterns Built-in Conversation Patterns Stop wrestling with agent coordination. AG2 handles message routing, state management, and conversation flow automatically. → Two-agent conversations → Group chats with dynamic speaker selection → Sequential chats with context carryover → Nested conversations for modularity Seamless Human-AI collaboration Seamless Human-AI collaboration Seamlessly integrate human oversight and input into your agent workflows. → Configurable human input modes → Flexible intervention points → Optional human approval workflows → Interactive conversation interfaces → Context-aware human handoff Roadmap AG2 STUDIO → Visual agent system design → Real-time testing and debugging → One-click deployment to production → Perfect for prototyping and MVPs AG2 STUDIO → Visual agent system design → Real-time testing and debugging → One-click deployment to production → Perfect for prototyping and MVPs AG2 STUDIO → Visual agent system design → Real-time testing and debugging → One-click deployment to production → Perfect for prototyping and MVPs AG2 MARKETPLACE → Share and monetize your agents → Discover pre-built solution templates → Quick-start your agent development → Connect with other builders AG2 MARKETPLACE → Share and monetize your agents → Discover pre-built solution templates → Quick-start your agent development → Connect with other builders AG2 MARKETPLACE → Share and monetize your agents → Discover pre-built solution templates → Quick-start your agent development → Connect with other builders SCALING TOOLS → Zero to production deployment guides → Usage analytics and cost optimization → Team collaboration features → Enterprise-ready security controls SCALING TOOLS → Zero to production deployment guides → Usage analytics and cost optimization → Team collaboration features → Enterprise-ready security controls SCALING TOOLS → Zero to production deployment guides → Usage analytics and cost optimization → Team collaboration features → Enterprise-ready security controls AG2 STUDIO → Visual agent system design → Real-time testing and debugging → One-click deployment to production → Perfect for prototyping and MVPs AG2 STUDIO → Visual agent system design → Real-time testing and debugging → One-click deployment to production → Perfect for prototyping and MVPs AG2 MARKETPLACE → Share and monetize your agents → Discover pre-built solution templates → Quick-start your agent development → Connect with other builders AG2 MARKETPLACE → Share and monetize your agents → Discover pre-built solution templates → Quick-start your agent development → Connect with other builders SCALING TOOLS → Zero to production deployment guides → Usage analytics and cost optimization → Team collaboration features → Enterprise-ready security controls SCALING TOOLS → Zero to production deployment guides → Usage analytics and cost optimization → Team collaboration features → Enterprise-ready security controls Whether you're a solo founder prototyping the next big AI product, or an enterprise team deploying at scale we're building AG2 for you. This is AgentOS - making multi-agent development accessible to everyone. Github Join Our Growing Community Join Our Growing Community → 20,000+ active agent builders → Daily technical discussions → Weekly community calls → Open RFC process → Regular contributor events (Coming soon) Discord Problem Features Roadmap Community Documentation Problem Features Roadmap Community Documentation Problem Features Roadmap Community Documentation + +********************************************************************** + +-------------------------------------------------------------------------------- +chatbot (to User): + +The website "https://ag2.ai/" promotes a platform named AgentOS, which is designed for building multi-agent systems efficiently. Key highlights from the website are: + +- **Community**: They have a growing community of over 20,000 agent builders. + +- **End-to-End Platform**: AG2 is described as an end-to-end platform for multi-agent automation. It supports flexible agent construction and orchestration, helping to define roles, configure behaviors, and orchestrate collaboration. + +- **Agent Types**: It includes assistant agents for problem-solving, executor agents for taking action, critic agents for validation, and group chat managers for coordination. + +- **Built-in Conversation Patterns**: AG2 offers capabilities for message routing, state management, and conversation flow management, supporting various conversation types like two-agent conversations, group chats, and nested conversations. + +- **Human-AI Collaboration**: The platform facilitates seamless integration of human oversight and input, with options for human intervention and approval workflows. + +- **AG2 Studio**: This feature provides visual agent system design, real-time testing, debugging, and one-click deployment, suited for prototyping and MVPs. + +- **AG2 Marketplace**: Provides a place to share, monetize agents, discover pre-built solution templates, and connect with other builders. + +- **Scaling Tools**: Includes guides for deployment, analytics, cost optimization, team collaboration features, and enterprise-ready security controls. + +- **Community and Documentation**: They encourage connecting through GitHub and Discord and have regular community calls and events planned. + +This comprehensive platform seems to aim at both individual developers and enterprise teams looking to deploy multi-agent systems effectively and collaboratively. TERMINATE + +-------------------------------------------------------------------------------- +``` + +```python +print(chat_result.summary) +``` + +Output: + +```console +The website "https://ag2.ai/" promotes a platform named AgentOS, which is designed for building multi-agent systems efficiently. Key highlights from the website are: + +- **Community**: They have a growing community of over 20,000 agent builders. + +- **End-to-End Platform**: AG2 is described as an end-to-end platform for multi-agent automation. It supports flexible agent construction and orchestration, helping to define roles, configure behaviors, and orchestrate collaboration. + +- **Agent Types**: It includes assistant agents for problem-solving, executor agents for taking action, critic agents for validation, and group chat managers for coordination. + +- **Built-in Conversation Patterns**: AG2 offers capabilities for message routing, state management, and conversation flow management, supporting various conversation types like two-agent conversations, group chats, and nested conversations. + +- **Human-AI Collaboration**: The platform facilitates seamless integration of human oversight and input, with options for human intervention and approval workflows. + +- **AG2 Studio**: This feature provides visual agent system design, real-time testing, debugging, and one-click deployment, suited for prototyping and MVPs. + +- **AG2 Marketplace**: Provides a place to share, monetize agents, discover pre-built solution templates, and connect with other builders. + +- **Scaling Tools**: Includes guides for deployment, analytics, cost optimization, team collaboration features, and enterprise-ready security controls. + +- **Community and Documentation**: They encourage connecting through GitHub and Discord and have regular community calls and events planned. + +This comprehensive platform seems to aim at both individual developers and enterprise teams looking to deploy multi-agent systems effectively and collaboratively. +``` + +## PydanticAI Tools Integration + +[PydanticAI](https://ai.pydantic.dev/) is a newer framework that offers powerful capabilities for working with LLMs. Although it currently does not have a repository with pre-built tools, it provides features like **dependency injection**, allowing you to inject a "Context" into a tool for better execution without relying on LLMs. This context can be used for passing parameters or managing state during the execution of a tool. While the framework is still growing, you can integrate its tools into AG2 to enhance agent capabilities, especially for tasks that involve structured data and context-driven logic. + +### Installation +To integrate LangChain tools into the AG2 framework, install the required dependencies: + +```bash +pip install ag2[interop-pydantic-ai] +``` + + +### Imports + +Import necessary modules and tools. +- `BaseModel`: Used to define data structures for tool inputs and outputs. +- `RunContext`: Provides context during the execution of tools. +- `PydanticAITool`: Represents a tool in the PydanticAI framework. +- `AssistantAgent` and `UserProxyAgent`: Agents that facilitate communication in the AG2 framework. +- `Interoperability`: This module acts as a bridge, making it easier to integrate PydanticAI tools with AG2’s architecture. + +```python +import os +from typing import Optional + +from pydantic import BaseModel +from pydantic_ai import RunContext +from pydantic_ai.tools import Tool as PydanticAITool + +from autogen import AssistantAgent, UserProxyAgent +from autogen.interop import Interoperability +``` + +### Agent Configuration + +Configure the agents for the interaction. +- `config_list` defines the LLM configurations, including the model and API key. +- `UserProxyAgent` simulates user inputs without requiring actual human interaction (set to `NEVER`). +- `AssistantAgent` represents the AI agent, configured with the LLM settings. + +```python +config_list = [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}] +user_proxy = UserProxyAgent( + name="User", + human_input_mode="NEVER", +) + +chatbot = AssistantAgent( + name="chatbot", + llm_config={"config_list": config_list}, +) +``` + +### Tool Integration + +Integrate the PydanticAI tool with AG2. + +- Define a `Player` model using `BaseModel` to structure the input data. +- Use `RunContext` to securely inject dependencies (like the `Player` instance) into the tool function without exposing them to the LLM. +- Implement `get_player` to define the tool's functionality, accessing `ctx.deps` for injected data. +- Convert the tool to an AG2-compatible format with `Interoperability` and register it for execution and LLM communication. +- Convert the PydanticAI tool into an AG2-compatible format using `convert_tool`. +- Register the tool for both execution and communication with the LLM by associating it with the `user_proxy` and `chatbot`. + +```python +class Player(BaseModel): + name: str + age: int + + +def get_player(ctx: RunContext[Player], additional_info: Optional[str] = None) -> str: # type: ignore[valid-type] + """Get the player's name. + + Args: + additional_info: Additional information which can be used. + """ + return f"Name: {ctx.deps.name}, Age: {ctx.deps.age}, Additional info: {additional_info}" # type: ignore[attr-defined] + + +interop = Interoperability() +pydantic_ai_tool = PydanticAITool(get_player, takes_ctx=True) + +# player will be injected as a dependency +player = Player(name="Luka", age=25) +ag2_tool = interop.convert_tool(tool=pydantic_ai_tool, type="pydanticai", deps=player) + +ag2_tool.register_for_execution(user_proxy) +ag2_tool.register_for_llm(chatbot) +``` + +Initiate a conversation between the `UserProxyAgent` and the `AssistantAgent`. + +- Use the `initiate_chat` method to send a message from the `user_proxy` to the `chatbot`. +- In this example, the user requests the chatbot to retrieve player information, providing "goal keeper" as additional context. +- The `Player` instance is securely injected into the tool using `RunContext`, ensuring the chatbot can retrieve and use this data during the interaction. + +```python +user_proxy.initiate_chat( + recipient=chatbot, message="Get player, for additional information use 'goal keeper'", max_turns=3 +) +``` + +Output: + +```console +User (to chatbot): + +Get player, for additional information use 'goal keeper' + +-------------------------------------------------------------------------------- +chatbot (to User): + +***** Suggested tool call (call_lPXIohFiJfnjmgwDnNFPQCzc): get_player ***** +Arguments: +{"additional_info":"goal keeper"} +*************************************************************************** + +-------------------------------------------------------------------------------- + +>>>>>>>> EXECUTING FUNCTION get_player... +User (to chatbot): + +***** Response from calling tool (call_lPXIohFiJfnjmgwDnNFPQCzc) ***** +Name: Luka, Age: 25, Additional info: goal keeper +********************************************************************** + +-------------------------------------------------------------------------------- +chatbot (to User): + +The player's name is Luka, who is a 25-year-old goalkeeper. TERMINATE +``` diff --git a/website/blog/authors.yml b/website/blog/authors.yml index 17fa9ea8f2..3b724a18ca 100644 --- a/website/blog/authors.yml +++ b/website/blog/authors.yml @@ -194,3 +194,9 @@ davorinrusevljan: title: Developer url: https://github.com/davorinrusevljan image_url: https://github.com/davorinrusevljan.png + +rjambrecic: + name: Robert Jambrecic + title: Machine Learning Engineer at Airt + url: https://github.com/rjambrecic + image_url: https://github.com/rjambrecic.png