Skip to content

Commit

Permalink
print the response in the warning (microsoft#2438)
Browse files Browse the repository at this point in the history
  • Loading branch information
olgavrou authored Apr 18, 2024
1 parent 4c7107c commit d5e30e0
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1324,7 +1324,7 @@ def _generate_oai_reply_from_client(self, llm_client, messages, cache) -> Union[
extracted_response = llm_client.extract_text_or_completion_object(response)[0]

if extracted_response is None:
warnings.warn("Extracted_response from {response} is None.", UserWarning)
warnings.warn(f"Extracted_response from {response} is None.", UserWarning)
return None
# ensure function and tool calls will be accepted when sent back to the LLM
if not isinstance(extracted_response, str) and hasattr(extracted_response, "model_dump"):
Expand Down

0 comments on commit d5e30e0

Please sign in to comment.