fix(core): handle empty content in Anthropic response gracefully (#9345)
Fixes Datadog issue a47619fa-d5b8-11f0-9fd7-da7ad0900000 Handle empty content in Anthropic responses gracefully by replacing RuntimeError with LLMServerError. Now logs detailed debugging information (response ID, model, stop_reason) and returns a user-friendly error instead of crashing. 🐾 Generated with [Letta Code](https://letta.com) Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
@@ -1182,7 +1182,23 @@ class AnthropicClient(LLMClientBase):
|
||||
redacted_reasoning_content = content_part.data
|
||||
|
||||
else:
|
||||
raise RuntimeError("Unexpected empty content in response")
|
||||
# Log the full response for debugging
|
||||
logger.error(
|
||||
"[Anthropic] Received response with empty content. Response ID: %s, Model: %s, Stop reason: %s, Full response: %s",
|
||||
response.id,
|
||||
response.model,
|
||||
response.stop_reason,
|
||||
json.dumps(response_data),
|
||||
)
|
||||
raise LLMServerError(
|
||||
message=f"LLM provider returned empty content in response (ID: {response.id}, model: {response.model}, stop_reason: {response.stop_reason})",
|
||||
code=ErrorCode.INTERNAL_SERVER_ERROR,
|
||||
details={
|
||||
"response_id": response.id,
|
||||
"model": response.model,
|
||||
"stop_reason": response.stop_reason,
|
||||
},
|
||||
)
|
||||
|
||||
assert response.role == "assistant"
|
||||
choice = Choice(
|
||||
|
||||
Reference in New Issue
Block a user