fix(core): return 400 for Google GenAI ClientError bad requests (#9357)
Google genai.errors.ClientError with code 400 was being caught and wrapped as LLMBadRequestError but returned to clients as 502 because no dedicated FastAPI exception handler existed for LLMBadRequestError. - Add LLMBadRequestError exception handler in app.py returning HTTP 400 - Fix ErrorCode on Google 400 bad requests from INTERNAL_SERVER_ERROR to INVALID_ARGUMENT - Route Google API errors through handle_llm_error in stream_async path Datadog: https://us5.datadoghq.com/error-tracking/issue/4eb3ff3c-d937-11f0-8177-da7ad0900000 🤖 Generated with [Letta Code](https://letta.com) Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
@@ -200,6 +200,8 @@ class GoogleVertexClient(LLMClientBase):
|
||||
f"Please check your tool definitions. Error: {str(e)}",
|
||||
code=ErrorCode.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except errors.APIError as e:
|
||||
raise self.handle_llm_error(e)
|
||||
except Exception as e:
|
||||
logger.error(f"Error streaming {self._provider_name()} request: {e} with request data: {json.dumps(request_data)}")
|
||||
raise e
|
||||
@@ -878,7 +880,7 @@ class GoogleVertexClient(LLMClientBase):
|
||||
else:
|
||||
return LLMBadRequestError(
|
||||
message=f"Bad request to {self._provider_name()}: {str(e)}",
|
||||
code=ErrorCode.INTERNAL_SERVER_ERROR,
|
||||
code=ErrorCode.INVALID_ARGUMENT,
|
||||
details={"is_byok": is_byok},
|
||||
)
|
||||
elif e.code == 401:
|
||||
|
||||
@@ -54,6 +54,7 @@ from letta.errors import (
|
||||
LettaUnsupportedFileUploadError,
|
||||
LettaUserNotFoundError,
|
||||
LLMAuthenticationError,
|
||||
LLMBadRequestError,
|
||||
LLMError,
|
||||
LLMProviderOverloaded,
|
||||
LLMRateLimitError,
|
||||
@@ -729,6 +730,19 @@ def create_application() -> "FastAPI":
|
||||
},
|
||||
)
|
||||
|
||||
@app.exception_handler(LLMBadRequestError)
|
||||
async def llm_bad_request_error_handler(request: Request, exc: LLMBadRequestError):
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={
|
||||
"error": {
|
||||
"type": "llm_bad_request",
|
||||
"message": "The request to the LLM model provider was invalid.",
|
||||
"detail": str(exc),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@app.exception_handler(LLMError)
|
||||
async def llm_error_handler(request: Request, exc: LLMError):
|
||||
return JSONResponse(
|
||||
|
||||
Reference in New Issue
Block a user