chore: sentry log grouping and file upload stats
This commit is contained in:
@@ -58,7 +58,12 @@ class MetricRegistry:
|
||||
def tool_execution_counter(self) -> Counter:
|
||||
return self._get_or_create_metric(
|
||||
"count_tool_execution",
|
||||
partial(self._meter.create_counter, name="count_tool_execution", description="Counts the number of tools executed.", unit="1"),
|
||||
partial(
|
||||
self._meter.create_counter,
|
||||
name="count_tool_execution",
|
||||
description="Counts the number of tools executed.",
|
||||
unit="1",
|
||||
),
|
||||
)
|
||||
|
||||
# project_id + model
|
||||
@@ -66,7 +71,12 @@ class MetricRegistry:
|
||||
def ttft_ms_histogram(self) -> Histogram:
|
||||
return self._get_or_create_metric(
|
||||
"hist_ttft_ms",
|
||||
partial(self._meter.create_histogram, name="hist_ttft_ms", description="Histogram for the Time to First Token (ms)", unit="ms"),
|
||||
partial(
|
||||
self._meter.create_histogram,
|
||||
name="hist_ttft_ms",
|
||||
description="Histogram for the Time to First Token (ms)",
|
||||
unit="ms",
|
||||
),
|
||||
)
|
||||
|
||||
# (includes model name)
|
||||
@@ -158,3 +168,15 @@ class MetricRegistry:
|
||||
unit="1",
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def file_process_bytes_histogram(self) -> Histogram:
|
||||
return self._get_or_create_metric(
|
||||
"hist_file_process_bytes",
|
||||
partial(
|
||||
self._meter.create_histogram,
|
||||
name="hist_file_process_bytes",
|
||||
description="Histogram for file process in bytes",
|
||||
unit="By",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -77,9 +77,8 @@ class Tool(BaseTool):
|
||||
|
||||
if self.tool_type is ToolType.CUSTOM:
|
||||
if not self.source_code:
|
||||
error_msg = f"Custom tool with id={self.id} is missing source_code field."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
logger.error("Custom tool with id=%s is missing source_code field", self.id)
|
||||
raise ValueError(f"Custom tool with id={self.id} is missing source_code field.")
|
||||
|
||||
# Always derive json_schema for freshest possible json_schema
|
||||
if self.args_json_schema is not None:
|
||||
@@ -96,8 +95,7 @@ class Tool(BaseTool):
|
||||
try:
|
||||
self.json_schema = derive_openai_json_schema(source_code=self.source_code)
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to derive json schema for tool with id={self.id} name={self.name}. Error: {str(e)}"
|
||||
logger.error(error_msg)
|
||||
logger.error("Failed to derive json schema for tool with id=%s name=%s: %s", self.id, self.name, e)
|
||||
elif self.tool_type in {ToolType.LETTA_CORE, ToolType.LETTA_MEMORY_CORE, ToolType.LETTA_SLEEPTIME_CORE}:
|
||||
# If it's letta core tool, we generate the json_schema on the fly here
|
||||
self.json_schema = get_json_schema_from_module(module_name=LETTA_CORE_TOOL_MODULE_NAME, function_name=self.name)
|
||||
@@ -119,9 +117,8 @@ class Tool(BaseTool):
|
||||
|
||||
# At this point, we need to validate that at least json_schema is populated
|
||||
if not self.json_schema:
|
||||
error_msg = f"Tool with id={self.id} name={self.name} tool_type={self.tool_type} is missing a json_schema."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
logger.error("Tool with id=%s name=%s tool_type=%s is missing a json_schema", self.id, self.name, self.tool_type)
|
||||
raise ValueError(f"Tool with id={self.id} name={self.name} tool_type={self.tool_type} is missing a json_schema.")
|
||||
|
||||
# Derive name from the JSON schema if not provided
|
||||
if not self.name:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from letta.log import get_logger
|
||||
from letta.otel.context import get_ctx_attributes
|
||||
from letta.otel.tracing import log_event, trace_method
|
||||
from letta.schemas.agent import AgentState
|
||||
from letta.schemas.enums import FileProcessingStatus
|
||||
@@ -122,6 +123,10 @@ class FileProcessor:
|
||||
if isinstance(content, str):
|
||||
content = content.encode("utf-8")
|
||||
|
||||
from letta.otel.metric_registry import MetricRegistry
|
||||
|
||||
MetricRegistry().file_process_bytes_histogram.record(len(content), attributes=get_ctx_attributes())
|
||||
|
||||
if len(content) > self.max_file_size:
|
||||
log_event(
|
||||
"file_processor.size_limit_exceeded",
|
||||
|
||||
Reference in New Issue
Block a user