diff --git a/api/core/model_runtime/model_providers/anthropic/llm/llm.py b/api/core/model_runtime/model_providers/anthropic/llm/llm.py index b24324708b0067..3faf5abbe87f58 100644 --- a/api/core/model_runtime/model_providers/anthropic/llm/llm.py +++ b/api/core/model_runtime/model_providers/anthropic/llm/llm.py @@ -483,6 +483,10 @@ def _convert_prompt_messages(self, prompt_messages: Sequence[PromptMessage]) -> if isinstance(message, UserPromptMessage): message = cast(UserPromptMessage, message) if isinstance(message.content, str): + # handle empty user prompt see #10013 #10520 + # responses, ignore user prompts containing only whitespace, the Claude API can't handle it. + if not message.content.strip(): + continue message_dict = {"role": "user", "content": message.content} prompt_message_dicts.append(message_dict) else: