feat(desktop): allow specifying a remote letta server in the letta desktop (#3709)

This commit is contained in:
Charles Packer
2025-08-05 22:21:07 -07:00
committed by GitHub
parent 642f03a7e6
commit bd56635ba9
5 changed files with 18 additions and 8 deletions

View File

@@ -152,7 +152,7 @@ def register_functions(dbapi_connection, connection_record):
if is_aiosqlite_connection:
# For aiosqlite connections, we cannot use async operations in sync event handlers
# The extension will need to be loaded per-connection when actually used
logger.info("Detected aiosqlite connection - sqlite-vec will be loaded per-query")
logger.debug("Detected aiosqlite connection - sqlite-vec will be loaded per-query")
else:
# For sync connections
# dbapi_connection.enable_load_extension(True)
@@ -173,7 +173,7 @@ def register_functions(dbapi_connection, connection_record):
raw_conn = getattr(actual_connection, "_connection", actual_connection)
if hasattr(raw_conn, "create_function"):
raw_conn.create_function("cosine_distance", 2, cosine_distance)
logger.info("Successfully registered cosine_distance for aiosqlite")
logger.debug("Successfully registered cosine_distance for aiosqlite")
else:
dbapi_connection.create_function("cosine_distance", 2, cosine_distance)
logger.info("Successfully registered cosine_distance for sync connection")

View File

@@ -226,7 +226,12 @@ class DatabaseRegistry:
def _build_sqlalchemy_engine_args(self, *, is_async: bool) -> dict:
"""Prepare keyword arguments for create_engine / create_async_engine."""
use_null_pool = settings.disable_sqlalchemy_pooling
# For async SQLite, always use NullPool to avoid cleanup issues during cancellation
if is_async and settings.database_engine is DatabaseChoice.SQLITE:
use_null_pool = True
logger.info("Forcing NullPool for async SQLite to avoid cancellation cleanup issues")
else:
use_null_pool = settings.disable_sqlalchemy_pooling
if use_null_pool:
logger.info("Disabling pooling on SqlAlchemy")
@@ -262,7 +267,8 @@ class DatabaseRegistry:
}
)
elif is_async:
elif is_async and settings.database_engine is DatabaseChoice.POSTGRES:
# Invalid for SQLite, results in [0] TypeError: 'prepared_statement_name_func' is an invalid keyword argument for Connection()
# For asyncpg, statement_cache_size should be in connect_args
base_args.update(
{

View File

@@ -120,6 +120,10 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
else:
# If not using venv, use whatever Python we are running on
python_executable = sys.executable
# For embedded/desktop environments, preserve Python paths
# This ensures the subprocess can find bundled modules
if "PYTHONPATH" in os.environ:
exec_env["PYTHONPATH"] = os.environ["PYTHONPATH"]
# handle unwanted terminal behavior
exec_env.update(
@@ -202,7 +206,7 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
except asyncio.TimeoutError:
process.kill()
raise TimeoutError(f"Executing tool {self.tool_name} timed out after 60 seconds.")
raise TimeoutError(f"Executing tool {self.tool_name} timed out after {tool_settings.tool_sandbox_timeout} seconds.")
stderr = stderr_bytes.decode("utf-8") if stderr_bytes else ""
log_event(name="finish subprocess")

4
poetry.lock generated
View File

@@ -8719,7 +8719,7 @@ cffi = ["cffi (>=1.11)"]
all = ["autoflake", "black", "docker", "fastapi", "google-cloud-profiler", "granian", "isort", "langchain", "langchain-community", "locust", "pexpect", "pg8000", "pgvector", "pinecone", "pre-commit", "psycopg2", "psycopg2-binary", "pyright", "pytest-asyncio", "pytest-order", "redis", "turbopuffer", "uvicorn", "uvloop", "wikipedia"]
bedrock = ["aioboto3", "boto3"]
cloud-tool-sandbox = ["e2b-code-interpreter", "modal"]
desktop = ["docker", "fastapi", "langchain", "langchain-community", "locust", "pyright", "sqlite-vec", "uvicorn", "wikipedia"]
desktop = ["docker", "fastapi", "langchain", "langchain-community", "locust", "pgvector", "pyright", "sqlite-vec", "uvicorn", "wikipedia"]
dev = ["autoflake", "black", "isort", "locust", "pexpect", "pre-commit", "pyright", "pytest-asyncio", "pytest-order"]
experimental = ["google-cloud-profiler", "granian", "uvloop"]
external-tools = ["docker", "firecrawl-py", "langchain", "langchain-community", "wikipedia"]
@@ -8733,4 +8733,4 @@ tests = ["wikipedia"]
[metadata]
lock-version = "2.0"
python-versions = "<3.14,>=3.11"
content-hash = "349c668d93463b0f4e4526311fd32aad465e139148dd597d1411c17ffcd43c89"
content-hash = "157ecc4bb05492f7b8c7758e2af195d635e42ba7e8cb17e6be276844b1dbd4b4"

View File

@@ -120,7 +120,7 @@ external-tools = ["docker", "langchain", "wikipedia", "langchain-community", "fi
tests = ["wikipedia"]
bedrock = ["boto3", "aioboto3"]
google = ["google-genai"]
desktop = ["pyright", "websockets", "fastapi", "uvicorn", "docker", "langchain", "wikipedia", "langchain-community", "locust", "sqlite-vec"]
desktop = ["pyright", "websockets", "fastapi", "uvicorn", "docker", "langchain", "wikipedia", "langchain-community", "locust", "sqlite-vec", "pgvector"]
all = ["pgvector", "turbopuffer", "pg8000", "psycopg2-binary", "psycopg2", "pytest", "pytest-asyncio", "pexpect", "black", "pre-commit", "pyright", "pytest-order", "autoflake", "isort", "websockets", "fastapi", "uvicorn", "docker", "langchain", "wikipedia", "langchain-community", "locust", "uvloop", "granian", "redis", "pinecone", "google-cloud-profiler"]
[tool.poetry.group.dev.dependencies]