Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into 391-blog-structured-m…
Browse files Browse the repository at this point in the history
…essages-and-the-new-iostream-method-send
  • Loading branch information
sternakt committed Jan 10, 2025
2 parents 5c621d0 + aeb8254 commit 969d601
Show file tree
Hide file tree
Showing 7 changed files with 549 additions and 14 deletions.
511 changes: 511 additions & 0 deletions autogen/agentchat/contrib/captainagent/captainagent.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion autogen/messages/agent_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def print(self, f: Optional[Callable[..., Any]] = None) -> None:

@wrap_message
class TextMessage(BasePrintReceivedMessage):
content: Optional[Union[str, int, float, bool]] = None # type: ignore [assignment]
content: Optional[Union[str, int, float, bool, list[dict[str, str]]]] = None # type: ignore [assignment]

def print(self, f: Optional[Callable[..., Any]] = None) -> None:
f = f or print
Expand Down
7 changes: 3 additions & 4 deletions notebook/agentchat_websockets.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -380,11 +380,10 @@
"metadata": {},
"outputs": [],
"source": [
"from http.server import HTTPServer, SimpleHTTPRequestHandler\n",
"from tempfile import TemporaryDirectory\n",
"from pathlib import Path\n",
"import json\n",
"from IPython.display import HTML, display, clear_output\n",
"from http.server import HTTPServer, SimpleHTTPRequestHandler\n",
"\n",
"from IPython.display import HTML, clear_output, display\n",
"\n",
"PORT = 8000\n",
"\n",
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@ twilio = [
interop-crewai = [
"crewai[tools]>=0.86,<1; python_version>='3.10' and python_version<'3.13'",
"weaviate-client==4.10.2; python_version>='3.10' and python_version<'3.13'",
# crewai uses litellm, litellm introduced uvloop as deps with version 1.57.5 which does not support win32
"litellm<1.57.5; sys_platform=='win32'",
]
interop-langchain = ["langchain-community>=0.3.12,<1"]
interop-pydantic-ai = ["pydantic-ai==0.0.13"]
Expand Down
2 changes: 1 addition & 1 deletion test/agentchat/contrib/test_agent_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

import pytest

from autogen.agentchat.contrib.agent_builder import AgentBuilder
from autogen.agentchat.contrib.captainagent.agent_builder import AgentBuilder

from ...conftest import KEY_LOC, OAI_CONFIG_LIST, reason, skip_openai # noqa: E402

Expand Down
2 changes: 1 addition & 1 deletion test/agentchat/contrib/test_captainagent.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pytest

from autogen import UserProxyAgent
from autogen.agentchat.contrib.captainagent import CaptainAgent
from autogen.agentchat.contrib.captainagent.captainagent import CaptainAgent

from ...conftest import KEY_LOC, OAI_CONFIG_LIST, Credentials, reason, skip_openai # noqa: E402

Expand Down
37 changes: 30 additions & 7 deletions test/messages/test_agent_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,17 +305,42 @@ def test_print(


class TestTextMessage:
def test_print_context_message(self, uuid: UUID, sender: ConversableAgent, recipient: ConversableAgent) -> None:
message = {"content": "hello {name}", "context": {"name": "there"}}

@pytest.mark.parametrize(
"message, expected_content",
[
(
{"content": "hello {name}", "context": {"name": "there"}},
"hello {name}",
),
(
{
"content": [
{
"type": "text",
"text": "Please extract table from the following image and convert it to Markdown.",
}
]
},
"Please extract table from the following image and convert it to Markdown.",
),
],
)
def test_print_messages(
self,
uuid: UUID,
sender: ConversableAgent,
recipient: ConversableAgent,
message: dict[str, Any],
expected_content: str,
) -> None:
actual = create_received_message_model(uuid=uuid, message=message, sender=sender, recipient=recipient)

assert isinstance(actual, TextMessage)
expected_model_dump = {
"type": "text",
"content": {
"uuid": uuid,
"content": "hello {name}",
"content": message["content"],
"sender_name": "sender",
"recipient_name": "recipient",
},
Expand All @@ -325,11 +350,9 @@ def test_print_context_message(self, uuid: UUID, sender: ConversableAgent, recip
mock = MagicMock()
actual.print(f=mock)

# print(mock.call_args_list)

expected_call_args_list = [
call("\x1b[33msender\x1b[0m (to recipient):\n", flush=True),
call("hello {name}", flush=True),
call(expected_content, flush=True),
call(
"\n",
"--------------------------------------------------------------------------------",
Expand Down

0 comments on commit 969d601

Please sign in to comment.