chore: enable F821, F401, W293 (#9503)

* auto fixes

* auto fix pt2 and transitive deps and undefined var checking locals()

* manual fixes (ignored or letta-code fixed)

* fix circular import
This commit is contained in:
Kian Jones
2026-02-17 10:07:40 -08:00
committed by Caren Thomas
parent fa70e09963
commit 25d54dd896
211 changed files with 534 additions and 2243 deletions

View File

@@ -146,7 +146,7 @@ class SimpleAnthropicStreamingInterface:
return tool_calls[0]
return None
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -232,7 +232,7 @@ class SimpleAnthropicStreamingInterface:
async def process(
self,
stream: AsyncStream[BetaRawMessageStreamEvent],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
prev_message_type = None
message_index = 0
@@ -287,7 +287,7 @@ class SimpleAnthropicStreamingInterface:
async def _process_event(
self,
event: BetaRawMessageStreamEvent,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:

View File

@@ -128,7 +128,7 @@ class AnthropicStreamingInterface:
arguments = str(json.dumps(tool_input, indent=2))
return ToolCall(id=self.tool_call_id, function=FunctionCall(arguments=arguments, name=self.tool_call_name))
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -222,7 +222,7 @@ class AnthropicStreamingInterface:
async def process(
self,
stream: AsyncStream[BetaRawMessageStreamEvent],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
prev_message_type = None
message_index = 0
@@ -276,7 +276,7 @@ class AnthropicStreamingInterface:
async def _process_event(
self,
event: BetaRawMessageStreamEvent,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
@@ -662,7 +662,7 @@ class SimpleAnthropicStreamingInterface:
arguments = str(json.dumps(tool_input, indent=2))
return ToolCall(id=self.tool_call_id, function=FunctionCall(arguments=arguments, name=self.tool_call_name))
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -754,7 +754,7 @@ class SimpleAnthropicStreamingInterface:
async def process(
self,
stream: AsyncStream[BetaRawMessageStreamEvent],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
prev_message_type = None
message_index = 0
@@ -803,7 +803,7 @@ class SimpleAnthropicStreamingInterface:
async def _process_event(
self,
event: BetaRawMessageStreamEvent,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:

View File

@@ -124,7 +124,7 @@ class SimpleGeminiStreamingInterface:
"""Return all finalized tool calls collected during this message (parallel supported)."""
return list(self.collected_tool_calls)
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -148,7 +148,7 @@ class SimpleGeminiStreamingInterface:
async def process(
self,
stream: AsyncIterator[GenerateContentResponse],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
"""
Iterates over the Gemini stream, yielding SSE events.
@@ -202,7 +202,7 @@ class SimpleGeminiStreamingInterface:
async def _process_event(
self,
event: GenerateContentResponse,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:

View File

@@ -194,7 +194,7 @@ class OpenAIStreamingInterface:
function=FunctionCall(arguments=self._get_current_function_arguments(), name=function_name),
)
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -219,7 +219,7 @@ class OpenAIStreamingInterface:
async def process(
self,
stream: AsyncStream[ChatCompletionChunk],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
"""
Iterates over the OpenAI stream, yielding SSE events.
@@ -307,7 +307,7 @@ class OpenAIStreamingInterface:
async def _process_chunk(
self,
chunk: ChatCompletionChunk,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
@@ -694,7 +694,7 @@ class SimpleOpenAIStreamingInterface:
raise ValueError("No tool calls available")
return calls[0]
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -719,7 +719,7 @@ class SimpleOpenAIStreamingInterface:
async def process(
self,
stream: AsyncStream[ChatCompletionChunk],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
"""
Iterates over the OpenAI stream, yielding SSE events.
@@ -833,7 +833,7 @@ class SimpleOpenAIStreamingInterface:
async def _process_chunk(
self,
chunk: ChatCompletionChunk,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
@@ -1120,7 +1120,7 @@ class SimpleOpenAIResponsesStreamingInterface:
raise ValueError("No tool calls available")
return calls[0]
def get_usage_statistics(self) -> "LettaUsageStatistics":
def get_usage_statistics(self) -> "LettaUsageStatistics": # noqa: F821
"""Extract usage statistics from accumulated streaming data.
Returns:
@@ -1141,7 +1141,7 @@ class SimpleOpenAIResponsesStreamingInterface:
async def process(
self,
stream: AsyncStream[ResponseStreamEvent],
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]:
"""
Iterates over the OpenAI stream, yielding SSE events.
@@ -1227,7 +1227,7 @@ class SimpleOpenAIResponsesStreamingInterface:
async def _process_event(
self,
event: ResponseStreamEvent,
ttft_span: Optional["Span"] = None,
ttft_span: Optional["Span"] = None, # noqa: F821
prev_message_type: Optional[str] = None,
message_index: int = 0,
) -> AsyncGenerator[LettaMessage | LettaStopReason, None]: