From 30dab0abb9ceb1bfdce36e98a995938f316d8bde Mon Sep 17 00:00:00 2001 From: Ari Webb Date: Mon, 24 Nov 2025 14:28:46 -0800 Subject: [PATCH] fix: handle llm error during streaming [LET-6280] (#6341) handle llm error during streaming Co-authored-by: Ari Webb --- letta/adapters/simple_llm_stream_adapter.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/letta/adapters/simple_llm_stream_adapter.py b/letta/adapters/simple_llm_stream_adapter.py index bb8b98f9..1b575147 100644 --- a/letta/adapters/simple_llm_stream_adapter.py +++ b/letta/adapters/simple_llm_stream_adapter.py @@ -117,9 +117,13 @@ class SimpleLLMStreamAdapter(LettaLLMStreamAdapter): raise self.llm_client.handle_llm_error(e) # Process the stream and yield chunks immediately for TTFT - async for chunk in self.interface.process(stream): # TODO: add ttft span - # Yield each chunk immediately as it arrives - yield chunk + try: + async for chunk in self.interface.process(stream): # TODO: add ttft span + # Yield each chunk immediately as it arrives + yield chunk + except Exception as e: + # Map provider-specific errors during streaming to common LLMError types + raise self.llm_client.handle_llm_error(e) # After streaming completes, extract the accumulated data self.llm_request_finish_timestamp_ns = get_utc_timestamp_ns()