fix: re-raise LLMError before wrapping with handle_llm_error (#9482)
LLMError exceptions are already properly formatted errors that should propagate directly. Without this check, they get unnecessarily wrapped by handle_llm_error, losing their original error information.
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from letta.adapters.letta_llm_request_adapter import LettaLLMRequestAdapter
|
||||
from letta.errors import LLMError
|
||||
from letta.helpers.datetime_helpers import get_utc_timestamp_ns
|
||||
from letta.schemas.enums import LLMCallType
|
||||
from letta.schemas.letta_message import LettaMessage
|
||||
@@ -54,6 +55,8 @@ class SimpleLLMRequestAdapter(LettaLLMRequestAdapter):
|
||||
try:
|
||||
self.response_data = await self.llm_client.request_async_with_telemetry(request_data, self.llm_config)
|
||||
except Exception as e:
|
||||
if isinstance(e, LLMError):
|
||||
raise
|
||||
raise self.llm_client.handle_llm_error(e, llm_config=self.llm_config)
|
||||
|
||||
self.llm_request_finish_timestamp_ns = get_utc_timestamp_ns()
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
from typing import AsyncGenerator, List
|
||||
|
||||
from letta.adapters.letta_llm_stream_adapter import LettaLLMStreamAdapter
|
||||
from letta.errors import LLMError
|
||||
from letta.log import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -151,6 +152,8 @@ class SimpleLLMStreamAdapter(LettaLLMStreamAdapter):
|
||||
error_msg=str(e),
|
||||
error_type=type(e).__name__,
|
||||
)
|
||||
if isinstance(e, LLMError):
|
||||
raise
|
||||
raise self.llm_client.handle_llm_error(e, llm_config=self.llm_config)
|
||||
|
||||
# Process the stream and yield chunks immediately for TTFT
|
||||
@@ -169,6 +172,8 @@ class SimpleLLMStreamAdapter(LettaLLMStreamAdapter):
|
||||
error_msg=str(e),
|
||||
error_type=type(e).__name__,
|
||||
)
|
||||
if isinstance(e, LLMError):
|
||||
raise
|
||||
raise self.llm_client.handle_llm_error(e, llm_config=self.llm_config)
|
||||
|
||||
# After streaming completes, extract the accumulated data
|
||||
|
||||
@@ -20,7 +20,7 @@ from letta.agents.helpers import (
|
||||
generate_step_id,
|
||||
)
|
||||
from letta.constants import DEFAULT_MAX_STEPS, NON_USER_MSG_PREFIX, REQUEST_HEARTBEAT_PARAM
|
||||
from letta.errors import ContextWindowExceededError
|
||||
from letta.errors import ContextWindowExceededError, LLMError
|
||||
from letta.helpers import ToolRulesSolver
|
||||
from letta.helpers.datetime_helpers import AsyncTimer, get_utc_time, get_utc_timestamp_ns, ns_to_ms
|
||||
from letta.helpers.reasoning_helper import scrub_inner_thoughts_from_messages
|
||||
@@ -1546,6 +1546,8 @@ class LettaAgent(BaseAgent):
|
||||
run_id=run_id,
|
||||
step_id=step_id,
|
||||
)
|
||||
elif isinstance(e, LLMError):
|
||||
raise
|
||||
else:
|
||||
raise llm_client.handle_llm_error(e, llm_config=llm_config)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user