diff --git a/autogen/agentchat/conversable_agent.py b/autogen/agentchat/conversable_agent.py index d23d8765d278..f457667cf8b8 100644 --- a/autogen/agentchat/conversable_agent.py +++ b/autogen/agentchat/conversable_agent.py @@ -1324,7 +1324,7 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[ extracted_response = llm_client.extract_text_or_completion_object(response)[0] if extracted_response is None: - warnings.warn("Extracted_response from {response} is None.", UserWarning) + warnings.warn(f"Extracted_response from {response} is None.", UserWarning) return None # ensure function and tool calls will be accepted when sent back to the LLM if not isinstance(extracted_response, str) and hasattr(extracted_response, "model_dump"):