chore: add ty + pre-commit hook and repeal even more ruff rules (#9504)
* auto fixes * auto fix pt2 and transitive deps and undefined var checking locals() * manual fixes (ignored or letta-code fixed) * fix circular import * remove all ignores, add FastAPI rules and Ruff rules * add ty and precommit * ruff stuff * ty check fixes * ty check fixes pt 2 * error on invalid
This commit is contained in:
@@ -1,4 +1,7 @@
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from letta.services.summarizer.summarizer_config import CompactionSettings
|
||||
|
||||
import numpy as np
|
||||
from anthropic.types.beta.messages import BetaMessageBatch, BetaMessageBatchIndividualResponse
|
||||
@@ -113,7 +116,7 @@ def deserialize_embedding_config(data: Optional[Dict]) -> Optional[EmbeddingConf
|
||||
# --------------------------
|
||||
|
||||
|
||||
def serialize_compaction_settings(config: Union[Optional["CompactionSettings"], Dict]) -> Optional[Dict]: # noqa: F821
|
||||
def serialize_compaction_settings(config: Union[Optional["CompactionSettings"], Dict]) -> Optional[Dict]:
|
||||
"""Convert a CompactionSettings object into a JSON-serializable dictionary."""
|
||||
if config:
|
||||
# Import here to avoid circular dependency
|
||||
@@ -124,7 +127,7 @@ def serialize_compaction_settings(config: Union[Optional["CompactionSettings"],
|
||||
return config
|
||||
|
||||
|
||||
def deserialize_compaction_settings(data: Optional[Dict]) -> Optional["CompactionSettings"]: # noqa: F821
|
||||
def deserialize_compaction_settings(data: Optional[Dict]) -> Optional["CompactionSettings"]:
|
||||
"""Convert a dictionary back into a CompactionSettings object."""
|
||||
if data:
|
||||
# Import here to avoid circular dependency
|
||||
|
||||
@@ -306,7 +306,9 @@ async def search_pinecone_index(query: str, limit: int, filter: Dict[str, Any],
|
||||
|
||||
@pinecone_retry()
|
||||
@trace_method
|
||||
async def list_pinecone_index_for_files(file_id: str, actor: User, limit: int = None, pagination_token: str = None) -> List[str]:
|
||||
async def list_pinecone_index_for_files(
|
||||
file_id: str, actor: User, limit: int | None = None, pagination_token: str | None = None
|
||||
) -> List[str]:
|
||||
if not PINECONE_AVAILABLE:
|
||||
raise ImportError("Pinecone is not available. Please install pinecone to use this feature.")
|
||||
|
||||
|
||||
@@ -201,7 +201,7 @@ def add_pre_execution_message(tool_schema: Dict[str, Any], description: Optional
|
||||
|
||||
# Ensure pre-execution message is the first required field
|
||||
if PRE_EXECUTION_MESSAGE_ARG not in required:
|
||||
required = [PRE_EXECUTION_MESSAGE_ARG] + required
|
||||
required = [PRE_EXECUTION_MESSAGE_ARG, *required]
|
||||
|
||||
# Update the schema with ordered properties and required list
|
||||
schema["parameters"] = {
|
||||
|
||||
@@ -6,7 +6,11 @@ import logging
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, List, Optional, Tuple, TypeVar
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, TypeVar
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from letta.schemas.tool import Tool as PydanticTool
|
||||
from letta.schemas.user import User as PydanticUser
|
||||
|
||||
import httpx
|
||||
|
||||
@@ -95,7 +99,6 @@ def async_retry_with_backoff(
|
||||
async def wrapper(*args, **kwargs) -> Any:
|
||||
num_retries = 0
|
||||
delay = initial_delay
|
||||
last_error: Optional[Exception] = None
|
||||
|
||||
while True:
|
||||
try:
|
||||
@@ -106,7 +109,6 @@ def async_retry_with_backoff(
|
||||
# Not a transient error, re-raise immediately
|
||||
raise
|
||||
|
||||
last_error = e
|
||||
num_retries += 1
|
||||
|
||||
# Log the retry attempt
|
||||
@@ -161,11 +163,11 @@ def _run_turbopuffer_write_in_thread(
|
||||
api_key: str,
|
||||
region: str,
|
||||
namespace_name: str,
|
||||
upsert_columns: dict = None,
|
||||
deletes: list = None,
|
||||
delete_by_filter: tuple = None,
|
||||
upsert_columns: dict | None = None,
|
||||
deletes: list | None = None,
|
||||
delete_by_filter: tuple | None = None,
|
||||
distance_metric: str = "cosine_distance",
|
||||
schema: dict = None,
|
||||
schema: dict | None = None,
|
||||
):
|
||||
"""
|
||||
Sync wrapper to run turbopuffer write in isolated event loop.
|
||||
@@ -229,7 +231,7 @@ class TurbopufferClient:
|
||||
embedding_chunk_size=DEFAULT_EMBEDDING_CHUNK_SIZE,
|
||||
)
|
||||
|
||||
def __init__(self, api_key: str = None, region: str = None):
|
||||
def __init__(self, api_key: str | None = None, region: str | None = None):
|
||||
"""Initialize Turbopuffer client."""
|
||||
self.api_key = api_key or settings.tpuf_api_key
|
||||
self.region = region or settings.tpuf_region
|
||||
@@ -244,7 +246,7 @@ class TurbopufferClient:
|
||||
raise ValueError("Turbopuffer API key not provided")
|
||||
|
||||
@trace_method
|
||||
async def _generate_embeddings(self, texts: List[str], actor: "PydanticUser") -> List[List[float]]: # noqa: F821
|
||||
async def _generate_embeddings(self, texts: List[str], actor: "PydanticUser") -> List[List[float]]:
|
||||
"""Generate embeddings using the default embedding configuration.
|
||||
|
||||
Args:
|
||||
@@ -311,7 +313,7 @@ class TurbopufferClient:
|
||||
|
||||
return namespace_name
|
||||
|
||||
def _extract_tool_text(self, tool: "PydanticTool") -> str: # noqa: F821
|
||||
def _extract_tool_text(self, tool: "PydanticTool") -> str:
|
||||
"""Extract searchable text from a tool for embedding.
|
||||
|
||||
Combines name, description, and JSON schema into a structured format
|
||||
@@ -361,9 +363,9 @@ class TurbopufferClient:
|
||||
@async_retry_with_backoff()
|
||||
async def insert_tools(
|
||||
self,
|
||||
tools: List["PydanticTool"], # noqa: F821
|
||||
tools: List["PydanticTool"],
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
) -> bool:
|
||||
"""Insert tools into Turbopuffer.
|
||||
|
||||
@@ -456,7 +458,7 @@ class TurbopufferClient:
|
||||
text_chunks: List[str],
|
||||
passage_ids: List[str],
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
tags: Optional[List[str]] = None,
|
||||
created_at: Optional[datetime] = None,
|
||||
embeddings: Optional[List[List[float]]] = None,
|
||||
@@ -607,7 +609,7 @@ class TurbopufferClient:
|
||||
message_texts: List[str],
|
||||
message_ids: List[str],
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
roles: List[MessageRole],
|
||||
created_ats: List[datetime],
|
||||
project_id: Optional[str] = None,
|
||||
@@ -867,7 +869,7 @@ class TurbopufferClient:
|
||||
async def query_passages(
|
||||
self,
|
||||
archive_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
query_text: Optional[str] = None,
|
||||
search_mode: str = "vector", # "vector", "fts", "hybrid"
|
||||
top_k: int = 10,
|
||||
@@ -1012,7 +1014,7 @@ class TurbopufferClient:
|
||||
self,
|
||||
agent_id: str,
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
query_text: Optional[str] = None,
|
||||
search_mode: str = "vector", # "vector", "fts", "hybrid", "timestamp"
|
||||
top_k: int = 10,
|
||||
@@ -1188,7 +1190,7 @@ class TurbopufferClient:
|
||||
async def query_messages_by_org_id(
|
||||
self,
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
query_text: Optional[str] = None,
|
||||
search_mode: str = "hybrid", # "vector", "fts", "hybrid"
|
||||
top_k: int = 10,
|
||||
@@ -1654,7 +1656,7 @@ class TurbopufferClient:
|
||||
file_id: str,
|
||||
text_chunks: List[str],
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
created_at: Optional[datetime] = None,
|
||||
) -> List[PydanticPassage]:
|
||||
"""Insert file passages into Turbopuffer using org-scoped namespace.
|
||||
@@ -1767,7 +1769,7 @@ class TurbopufferClient:
|
||||
self,
|
||||
source_ids: List[str],
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
query_text: Optional[str] = None,
|
||||
search_mode: str = "vector", # "vector", "fts", "hybrid"
|
||||
top_k: int = 10,
|
||||
@@ -1991,7 +1993,7 @@ class TurbopufferClient:
|
||||
async def query_tools(
|
||||
self,
|
||||
organization_id: str,
|
||||
actor: "PydanticUser", # noqa: F821
|
||||
actor: "PydanticUser",
|
||||
query_text: Optional[str] = None,
|
||||
search_mode: str = "hybrid", # "vector", "fts", "hybrid", "timestamp"
|
||||
top_k: int = 50,
|
||||
|
||||
Reference in New Issue
Block a user