feat: refactor logs to parse as a single log line each and filter out 404s from sentry (#5242)
* add multiline log auto detect * implement logger.exception() * filter out 404 * remove potentially problematic changes
This commit is contained in:
@@ -445,9 +445,7 @@ class LettaAgentV3(LettaAgentV2):
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
self.logger.error(f"Error during step processing: {e}")
|
||||
self.logger.error(f"Error traceback: {traceback.format_exc()}")
|
||||
# self.logger.error(f"Error during step processing: {e}")
|
||||
self.logger.exception(f"Error during step processing: {e}")
|
||||
self.job_update_metadata = {"error": str(e)}
|
||||
|
||||
# This indicates we failed after we decided to stop stepping, which indicates a bug with our flow.
|
||||
|
||||
@@ -235,7 +235,7 @@ class AnthropicStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
@@ -734,7 +734,7 @@ class SimpleAnthropicStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
|
||||
@@ -138,7 +138,7 @@ class SimpleGeminiStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
|
||||
@@ -203,7 +203,7 @@ class OpenAIStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
@@ -632,7 +632,7 @@ class SimpleOpenAIStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
@@ -873,7 +873,7 @@ class SimpleOpenAIResponsesStreamingInterface:
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
logger.error("Error processing stream: %s\n%s", e, traceback.format_exc())
|
||||
logger.exception("Error processing stream: %s", e)
|
||||
if ttft_span:
|
||||
ttft_span.add_event(
|
||||
name="stop_reason",
|
||||
|
||||
@@ -189,15 +189,32 @@ def create_application() -> "FastAPI":
|
||||
print(f"\n[[ Letta server // v{letta_version} ]]")
|
||||
|
||||
if SENTRY_ENABLED:
|
||||
|
||||
def before_send_filter(event, hint):
|
||||
"""Filter out 404 errors and other noise from Sentry"""
|
||||
# Skip 404 errors to avoid noise from user navigation issues
|
||||
if "exc_info" in hint and hint["exc_info"]:
|
||||
exc_type, exc_value, exc_tb = hint["exc_info"]
|
||||
# Check if this is a 404-related exception
|
||||
if hasattr(exc_value, "status_code") and exc_value.status_code == 404:
|
||||
return None
|
||||
|
||||
# Skip events that look like 404s based on tags or context
|
||||
if event.get("tags", {}).get("status_code") == 404:
|
||||
return None
|
||||
|
||||
return event
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=os.getenv("SENTRY_DSN"),
|
||||
environment=os.getenv("LETTA_ENVIRONMENT", "undefined"),
|
||||
traces_sample_rate=1.0,
|
||||
before_send=before_send_filter,
|
||||
_experiments={
|
||||
"continuous_profiling_auto_start": True,
|
||||
},
|
||||
)
|
||||
logger.info("Sentry enabled.")
|
||||
logger.info("Sentry enabled with 404 filtering.")
|
||||
|
||||
debug_mode = "--debug" in sys.argv
|
||||
app = FastAPI(
|
||||
@@ -230,7 +247,8 @@ def create_application() -> "FastAPI":
|
||||
|
||||
async def error_handler_with_code(request: Request, exc: Exception, code: int, detail: str | None = None):
|
||||
logger.error(f"{type(exc).__name__}", exc_info=exc)
|
||||
if SENTRY_ENABLED:
|
||||
# Skip Sentry for 404 errors to avoid noise from user navigation issues
|
||||
if SENTRY_ENABLED and code != 404:
|
||||
sentry_sdk.capture_exception(exc)
|
||||
|
||||
if not detail:
|
||||
|
||||
@@ -6,10 +6,13 @@ import traceback
|
||||
import websockets
|
||||
|
||||
import letta.server.ws_api.protocol as protocol
|
||||
from letta.log import get_logger
|
||||
from letta.server.constants import WS_DEFAULT_PORT
|
||||
from letta.server.server import SyncServer
|
||||
from letta.server.ws_api.interface import SyncWebSocketInterface
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class WebSocketServer:
|
||||
def __init__(self, host="localhost", port=WS_DEFAULT_PORT):
|
||||
@@ -68,8 +71,7 @@ class WebSocketServer:
|
||||
await websocket.send(protocol.server_command_response("OK: Agent initialized"))
|
||||
except Exception as e:
|
||||
self.agent = None
|
||||
print(f"[server] self.create_new_agent failed with:\n{e}")
|
||||
print(f"{traceback.format_exc()}")
|
||||
logger.exception(f"[server] self.create_new_agent failed with: {e}")
|
||||
await websocket.send(protocol.server_command_response(f"Error: Failed to init agent - {str(e)}"))
|
||||
|
||||
else:
|
||||
@@ -88,8 +90,7 @@ class WebSocketServer:
|
||||
# self.run_step(user_message)
|
||||
self.server.user_message(user_id="NULL", agent_id=data["agent_id"], message=user_message)
|
||||
except Exception as e:
|
||||
print(f"[server] self.server.user_message failed with:\n{e}")
|
||||
print(f"{traceback.format_exc()}")
|
||||
logger.exception(f"[server] self.server.user_message failed with: {e}")
|
||||
await websocket.send(protocol.server_agent_response_error(f"server.user_message failed with: {e}"))
|
||||
await asyncio.sleep(1) # pause before sending the terminating message, w/o this messages may be missed
|
||||
await websocket.send(protocol.server_agent_response_end())
|
||||
|
||||
@@ -106,7 +106,7 @@ class Summarizer:
|
||||
try:
|
||||
t.result() # This re-raises exceptions from the task
|
||||
except Exception:
|
||||
logger.error("Background task failed: %s", traceback.format_exc())
|
||||
logger.exception("Background task failed")
|
||||
|
||||
task.add_done_callback(callback)
|
||||
return task
|
||||
|
||||
@@ -235,9 +235,7 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
|
||||
if isinstance(e, TimeoutError):
|
||||
raise e
|
||||
|
||||
logger.error(f"Subprocess execution for tool {self.tool_name} encountered an error: {e}")
|
||||
logger.error(e.__class__.__name__)
|
||||
logger.error(e.__traceback__)
|
||||
logger.exception(f"Subprocess execution for tool {self.tool_name} encountered an error: {e}")
|
||||
func_return = get_friendly_error_msg(
|
||||
function_name=self.tool_name,
|
||||
exception_name=type(e).__name__,
|
||||
|
||||
@@ -1367,7 +1367,7 @@ def fire_and_forget(coro, task_name: Optional[str] = None, error_callback: Optio
|
||||
t.result() # this re-raises exceptions from the task
|
||||
except Exception as e:
|
||||
task_desc = f"Background task {task_name}" if task_name else "Background task"
|
||||
logger.error(f"{task_desc} failed: {str(e)}\n{traceback.format_exc()}")
|
||||
logger.exception(f"{task_desc} failed: {str(e)}")
|
||||
|
||||
if error_callback:
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user