chore: proper logging for sentry (#3195)

This commit is contained in:
Andy Li
2025-07-07 12:37:39 -07:00
committed by GitHub
parent 8f1640b2ef
commit fa23566f24
3 changed files with 3 additions and 16 deletions

View File

@@ -160,18 +160,6 @@ async def lifespan(app_: FastAPI):
logger.info(f"[Worker {worker_id}] Lifespan shutdown completed")
# TODO: Make this more robust
def filter_out_sentry_errors(event, hint):
if (
"File processing failed" in str(event.get("exception"))
or "Default chunking also failed for" in str(event.get("exception"))
or "Failed to embed batch of size 32" in str(event.get("exception"))
or "`inputs` must have less than 512 tokens" in str(event.get("exception"))
):
return None
return event
def create_application() -> "FastAPI":
"""the application start routine"""
# global server
@@ -187,7 +175,6 @@ def create_application() -> "FastAPI":
_experiments={
"continuous_profiling_auto_start": True,
},
before_send=filter_out_sentry_errors,
)
debug_mode = "--debug" in sys.argv

View File

@@ -91,7 +91,7 @@ class OpenAIEmbedder(BaseEmbedder):
try:
return await self._embed_batch(batch, indices)
except Exception as e:
logger.error(f"Failed to embed batch of size {len(batch)}: {str(e)}")
logger.error("Failed to embed batch of size %s: %s", len(batch), e)
log_event("embedder.batch_failed", {"batch_size": len(batch), "error": str(e), "error_type": type(e).__name__})
raise

View File

@@ -90,7 +90,7 @@ class FileProcessor:
return all_passages
except Exception as fallback_error:
logger.error(f"Default chunking also failed for {filename}: {str(fallback_error)}")
logger.error("Default chunking also failed for %s: %s", filename, fallback_error)
log_event(
"file_processor.default_chunking_also_failed",
{"filename": filename, "fallback_error": str(fallback_error), "fallback_error_type": type(fallback_error).__name__},
@@ -200,7 +200,7 @@ class FileProcessor:
return all_passages
except Exception as e:
logger.error(f"File processing failed for {filename}: {str(e)}")
logger.error("File processing failed for %s: %s", filename, e)
log_event(
"file_processor.processing_failed",
{