fix: handle httpx.RemoteProtocolError during LLM streaming (#8206)

This commit is contained in:
github-actions[bot]
2026-01-02 12:20:12 -05:00
committed by Caren Thomas
parent abb325f32d
commit 76008c61f4
4 changed files with 52 additions and 1 deletions

View File

@@ -5,6 +5,7 @@ import re
from typing import Dict, List, Optional, Union
import anthropic
import httpx
from anthropic import AsyncStream
from anthropic.types.beta import BetaMessage as AnthropicMessage, BetaRawMessageStreamEvent
from anthropic.types.beta.message_create_params import MessageCreateParamsNonStreaming
@@ -749,6 +750,17 @@ class AnthropicClient(LLMClientBase):
details={"cause": str(e.__cause__) if e.__cause__ else None},
)
# Handle httpx.RemoteProtocolError which can occur during streaming
# when the remote server closes the connection unexpectedly
# (e.g., "peer closed connection without sending complete message body")
if isinstance(e, httpx.RemoteProtocolError):
logger.warning(f"[Anthropic] Remote protocol error during streaming: {e}")
return LLMConnectionError(
message=f"Connection error during Anthropic streaming: {str(e)}",
code=ErrorCode.INTERNAL_SERVER_ERROR,
details={"cause": str(e.__cause__) if e.__cause__ else None},
)
if isinstance(e, anthropic.RateLimitError):
logger.warning("[Anthropic] Rate limited (429). Consider backoff.")
return LLMRateLimitError(