merge this (#4759)
* wait I forgot to comit locally * cp the entire core directory and then rm the .git subdir
This commit is contained in:
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
**/__pycache__
|
||||||
|
**/.pytest_cache
|
||||||
|
**/*.pyc
|
||||||
|
**/*.pyo
|
||||||
|
**/*.pyd
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.env
|
||||||
|
*.log
|
||||||
20
.gitattributes
vendored
Normal file
20
.gitattributes
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Set the default behavior, in case people don't have core.autocrlf set.
|
||||||
|
* text=auto
|
||||||
|
|
||||||
|
# Explicitly declare text files you want to always be normalized and converted
|
||||||
|
# to LF on checkout.
|
||||||
|
*.py text eol=lf
|
||||||
|
*.txt text eol=lf
|
||||||
|
*.md text eol=lf
|
||||||
|
*.json text eol=lf
|
||||||
|
*.yml text eol=lf
|
||||||
|
*.yaml text eol=lf
|
||||||
|
|
||||||
|
# Declare files that will always have CRLF line endings on checkout.
|
||||||
|
# (Only if you have specific Windows-only files)
|
||||||
|
*.bat text eol=crlf
|
||||||
|
|
||||||
|
# Denote all files that are truly binary and should not be modified.
|
||||||
|
*.png binary
|
||||||
|
*.jpg binary
|
||||||
|
*.gif binary
|
||||||
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**Please describe your setup**
|
||||||
|
- [ ] How are you running Letta?
|
||||||
|
- Docker
|
||||||
|
- pip (legacy)
|
||||||
|
- From source
|
||||||
|
- Desktop
|
||||||
|
- [ ] Describe your setup
|
||||||
|
- What's your OS (Windows/MacOS/Linux)?
|
||||||
|
- What is your `docker run ...` command (if applicable)
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
|
- What model you are using
|
||||||
|
|
||||||
|
**Agent File (optional)**
|
||||||
|
Please attach your `.af` file, as this helps with reproducing issues.
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
If you're not using OpenAI, please provide additional information on your local LLM setup:
|
||||||
|
|
||||||
|
**Local LLM details**
|
||||||
|
|
||||||
|
If you are trying to run Letta with local LLMs, please provide the following information:
|
||||||
|
|
||||||
|
- [ ] The exact model you're trying to use (e.g. `dolphin-2.1-mistral-7b.Q6_K.gguf`)
|
||||||
|
- [ ] The local LLM backend you are using (web UI? LM Studio?)
|
||||||
|
- [ ] Your hardware for the local LLM backend (local computer? operating system? remote RunPod?)
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
17
.github/pull_request_template.md
vendored
Normal file
17
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
**Please describe the purpose of this pull request.**
|
||||||
|
Is it to add a new feature? Is it to fix a bug?
|
||||||
|
|
||||||
|
**How to test**
|
||||||
|
How can we test your PR during review? What commands should we run? What outcomes should we expect?
|
||||||
|
|
||||||
|
**Have you tested this PR?**
|
||||||
|
Have you tested the latest commit on the PR? If so please provide outputs from your tests.
|
||||||
|
|
||||||
|
**Related issues or PRs**
|
||||||
|
Please link any related GitHub [issues](https://github.com/letta-ai/letta/issues) or [PRs](https://github.com/letta-ai/letta/pulls).
|
||||||
|
|
||||||
|
**Is your PR over 500 lines of code?**
|
||||||
|
If so, please break up your PR into multiple smaller PRs so that we can review them quickly, or provide justification for its length.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the PR here.
|
||||||
286
.github/scripts/model-sweep/conftest.py
vendored
Normal file
286
.github/scripts/model-sweep/conftest.py
vendored
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Generator
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
from anthropic.types.beta.messages import BetaMessageBatch, BetaMessageBatchRequestCounts
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from letta_client import AsyncLetta, Letta
|
||||||
|
|
||||||
|
from letta.schemas.agent import AgentState
|
||||||
|
from letta.schemas.llm_config import LLMConfig
|
||||||
|
from letta.services.organization_manager import OrganizationManager
|
||||||
|
from letta.services.user_manager import UserManager
|
||||||
|
from letta.settings import tool_settings
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config):
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def disable_e2b_api_key() -> Generator[None, None, None]:
|
||||||
|
"""
|
||||||
|
Temporarily disables the E2B API key by setting `tool_settings.e2b_api_key` to None
|
||||||
|
for the duration of the test. Restores the original value afterward.
|
||||||
|
"""
|
||||||
|
from letta.settings import tool_settings
|
||||||
|
|
||||||
|
original_api_key = tool_settings.e2b_api_key
|
||||||
|
tool_settings.e2b_api_key = None
|
||||||
|
yield
|
||||||
|
tool_settings.e2b_api_key = original_api_key
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def check_e2b_key_is_set():
|
||||||
|
from letta.settings import tool_settings
|
||||||
|
|
||||||
|
original_api_key = tool_settings.e2b_api_key
|
||||||
|
assert original_api_key is not None, "Missing e2b key! Cannot execute these tests."
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def default_organization():
|
||||||
|
"""Fixture to create and return the default organization."""
|
||||||
|
manager = OrganizationManager()
|
||||||
|
org = manager.create_default_organization()
|
||||||
|
yield org
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def default_user(default_organization):
|
||||||
|
"""Fixture to create and return the default user within the default organization."""
|
||||||
|
manager = UserManager()
|
||||||
|
user = manager.create_default_user(org_id=default_organization.id)
|
||||||
|
yield user
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def check_composio_key_set():
|
||||||
|
original_api_key = tool_settings.composio_api_key
|
||||||
|
assert original_api_key is not None, "Missing composio key! Cannot execute this test."
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
# --- Tool Fixtures ---
|
||||||
|
@pytest.fixture
|
||||||
|
def weather_tool_func():
|
||||||
|
def get_weather(location: str) -> str:
|
||||||
|
"""
|
||||||
|
Fetches the current weather for a given location.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
location (str): The location to get the weather for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: A formatted string describing the weather in the given location.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If the request to fetch weather data fails.
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
|
||||||
|
url = f"https://wttr.in/{location}?format=%C+%t"
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
weather_data = response.text
|
||||||
|
return f"The weather in {location} is {weather_data}."
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Failed to get weather data, status code: {response.status_code}")
|
||||||
|
|
||||||
|
yield get_weather
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def print_tool_func():
|
||||||
|
"""Fixture to create a tool with default settings and clean up after the test."""
|
||||||
|
|
||||||
|
def print_tool(message: str):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
message (str): The message to print.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The message that was printed.
|
||||||
|
"""
|
||||||
|
print(message)
|
||||||
|
return message
|
||||||
|
|
||||||
|
yield print_tool
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def roll_dice_tool_func():
|
||||||
|
def roll_dice():
|
||||||
|
"""
|
||||||
|
Rolls a 6 sided die.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The roll result.
|
||||||
|
"""
|
||||||
|
import time
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
return "Rolled a 10!"
|
||||||
|
|
||||||
|
yield roll_dice
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def dummy_beta_message_batch() -> BetaMessageBatch:
|
||||||
|
return BetaMessageBatch(
|
||||||
|
id="msgbatch_013Zva2CMHLNnXjNJJKqJ2EF",
|
||||||
|
archived_at=datetime(2024, 8, 20, 18, 37, 24, 100435, tzinfo=timezone.utc),
|
||||||
|
cancel_initiated_at=datetime(2024, 8, 20, 18, 37, 24, 100435, tzinfo=timezone.utc),
|
||||||
|
created_at=datetime(2024, 8, 20, 18, 37, 24, 100435, tzinfo=timezone.utc),
|
||||||
|
ended_at=datetime(2024, 8, 20, 18, 37, 24, 100435, tzinfo=timezone.utc),
|
||||||
|
expires_at=datetime(2024, 8, 20, 18, 37, 24, 100435, tzinfo=timezone.utc),
|
||||||
|
processing_status="in_progress",
|
||||||
|
request_counts=BetaMessageBatchRequestCounts(
|
||||||
|
canceled=10,
|
||||||
|
errored=30,
|
||||||
|
expired=10,
|
||||||
|
processing=100,
|
||||||
|
succeeded=50,
|
||||||
|
),
|
||||||
|
results_url="https://api.anthropic.com/v1/messages/batches/msgbatch_013Zva2CMHLNnXjNJJKqJ2EF/results",
|
||||||
|
type="message_batch",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# --- Model Sweep ---
|
||||||
|
# Global flag to track server state
|
||||||
|
_server_started = False
|
||||||
|
_server_url = None
|
||||||
|
|
||||||
|
|
||||||
|
def _start_server_once() -> str:
|
||||||
|
"""Start server exactly once, return URL"""
|
||||||
|
global _server_started, _server_url
|
||||||
|
|
||||||
|
if _server_started and _server_url:
|
||||||
|
return _server_url
|
||||||
|
|
||||||
|
url = os.getenv("LETTA_SERVER_URL", "http://localhost:8283")
|
||||||
|
|
||||||
|
# Check if already running
|
||||||
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
|
if s.connect_ex(("localhost", 8283)) == 0:
|
||||||
|
_server_started = True
|
||||||
|
_server_url = url
|
||||||
|
return url
|
||||||
|
|
||||||
|
# Start server (your existing logic)
|
||||||
|
if not os.getenv("LETTA_SERVER_URL"):
|
||||||
|
|
||||||
|
def _run_server():
|
||||||
|
load_dotenv()
|
||||||
|
from letta.server.rest_api.app import start_server
|
||||||
|
|
||||||
|
start_server(debug=True)
|
||||||
|
|
||||||
|
thread = threading.Thread(target=_run_server, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
|
||||||
|
# Poll until up
|
||||||
|
timeout_seconds = 30
|
||||||
|
deadline = time.time() + timeout_seconds
|
||||||
|
while time.time() < deadline:
|
||||||
|
try:
|
||||||
|
resp = requests.get(url + "/v1/health")
|
||||||
|
if resp.status_code < 500:
|
||||||
|
break
|
||||||
|
except requests.exceptions.RequestException:
|
||||||
|
pass
|
||||||
|
time.sleep(0.1)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Could not reach {url} within {timeout_seconds}s")
|
||||||
|
|
||||||
|
_server_started = True
|
||||||
|
_server_url = url
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Fixtures
|
||||||
|
# ------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def server_url() -> str:
|
||||||
|
"""Return URL of already-started server"""
|
||||||
|
return _start_server_once()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def client(server_url: str) -> Letta:
|
||||||
|
"""
|
||||||
|
Creates and returns a synchronous Letta REST client for testing.
|
||||||
|
"""
|
||||||
|
client_instance = Letta(base_url=server_url)
|
||||||
|
yield client_instance
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def async_client(server_url: str) -> AsyncLetta:
|
||||||
|
"""
|
||||||
|
Creates and returns an asynchronous Letta REST client for testing.
|
||||||
|
"""
|
||||||
|
async_client_instance = AsyncLetta(base_url=server_url)
|
||||||
|
yield async_client_instance
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def agent_state(client: Letta) -> AgentState:
|
||||||
|
"""
|
||||||
|
Creates and returns an agent state for testing with a pre-configured agent.
|
||||||
|
The agent is named 'supervisor' and is configured with base tools and the roll_dice tool.
|
||||||
|
"""
|
||||||
|
client.tools.upsert_base_tools()
|
||||||
|
|
||||||
|
send_message_tool = client.tools.list(name="send_message")[0]
|
||||||
|
agent_state_instance = client.agents.create(
|
||||||
|
name="supervisor",
|
||||||
|
include_base_tools=False,
|
||||||
|
tool_ids=[send_message_tool.id],
|
||||||
|
model="openai/gpt-4o",
|
||||||
|
embedding="letta/letta-free",
|
||||||
|
tags=["supervisor"],
|
||||||
|
)
|
||||||
|
yield agent_state_instance
|
||||||
|
|
||||||
|
client.agents.delete(agent_state_instance.id)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def all_available_llm_configs(client: Letta) -> [LLMConfig]:
|
||||||
|
"""
|
||||||
|
Returns a list of all available LLM configs.
|
||||||
|
"""
|
||||||
|
llm_configs = client.models.list()
|
||||||
|
return llm_configs
|
||||||
|
|
||||||
|
|
||||||
|
# create a client to the started server started at
|
||||||
|
def get_available_llm_configs() -> [LLMConfig]:
|
||||||
|
"""Get configs, starting server if needed"""
|
||||||
|
server_url = _start_server_once()
|
||||||
|
temp_client = Letta(base_url=server_url)
|
||||||
|
return temp_client.models.list()
|
||||||
|
|
||||||
|
|
||||||
|
# dynamically insert llm_config paramter at collection time
|
||||||
|
def pytest_generate_tests(metafunc):
|
||||||
|
"""Dynamically parametrize tests that need llm_config."""
|
||||||
|
if "llm_config" in metafunc.fixturenames:
|
||||||
|
configs = get_available_llm_configs()
|
||||||
|
if configs:
|
||||||
|
metafunc.parametrize("llm_config", configs, ids=[c.model for c in configs])
|
||||||
21
.github/scripts/model-sweep/feature_mappings.json
vendored
Normal file
21
.github/scripts/model-sweep/feature_mappings.json
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"Basic": [
|
||||||
|
"test_greeting_with_assistant_message",
|
||||||
|
"test_greeting_without_assistant_message",
|
||||||
|
"test_async_greeting_with_assistant_message",
|
||||||
|
"test_agent_loop_error",
|
||||||
|
"test_step_stream_agent_loop_error",
|
||||||
|
"test_step_streaming_greeting_with_assistant_message",
|
||||||
|
"test_step_streaming_greeting_without_assistant_message",
|
||||||
|
"test_step_streaming_tool_call",
|
||||||
|
"test_tool_call",
|
||||||
|
"test_auto_summarize"
|
||||||
|
],
|
||||||
|
"Token Streaming": [
|
||||||
|
"test_token_streaming_greeting_with_assistant_message",
|
||||||
|
"test_token_streaming_greeting_without_assistant_message",
|
||||||
|
"test_token_streaming_agent_loop_error",
|
||||||
|
"test_token_streaming_tool_call"
|
||||||
|
],
|
||||||
|
"Multimodal": ["test_base64_image_input", "test_url_image_input"]
|
||||||
|
}
|
||||||
495
.github/scripts/model-sweep/generate_model_sweep_markdown.py
vendored
Normal file
495
.github/scripts/model-sweep/generate_model_sweep_markdown.py
vendored
Normal file
@@ -0,0 +1,495 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
def load_feature_mappings(config_file=None):
|
||||||
|
"""Load feature mappings from config file."""
|
||||||
|
if config_file is None:
|
||||||
|
# Default to feature_mappings.json in the same directory as this script
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
config_file = os.path.join(script_dir, "feature_mappings.json")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config_file, "r") as f:
|
||||||
|
return json.load(f)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: Could not find feature mappings config file '{config_file}'")
|
||||||
|
sys.exit(1)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f"Error: Invalid JSON in feature mappings config file '{config_file}'")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_support_status(passed_tests, feature_tests):
|
||||||
|
"""Determine support status for a feature category."""
|
||||||
|
if not feature_tests:
|
||||||
|
return "❓" # Unknown - no tests for this feature
|
||||||
|
|
||||||
|
# Filter out error tests when checking for support
|
||||||
|
non_error_tests = [test for test in feature_tests if not test.endswith("_error")]
|
||||||
|
error_tests = [test for test in feature_tests if test.endswith("_error")]
|
||||||
|
|
||||||
|
# Check which non-error tests passed
|
||||||
|
passed_non_error_tests = [test for test in non_error_tests if test in passed_tests]
|
||||||
|
|
||||||
|
# If there are no non-error tests, only error tests, treat as unknown
|
||||||
|
if not non_error_tests:
|
||||||
|
return "❓" # Only error tests available
|
||||||
|
|
||||||
|
# Support is based only on non-error tests
|
||||||
|
if len(passed_non_error_tests) == len(non_error_tests):
|
||||||
|
return "✅" # Full support
|
||||||
|
elif len(passed_non_error_tests) == 0:
|
||||||
|
return "❌" # No support
|
||||||
|
else:
|
||||||
|
return "⚠️" # Partial support
|
||||||
|
|
||||||
|
|
||||||
|
def categorize_tests(all_test_names, feature_mapping):
|
||||||
|
"""Categorize test names into feature buckets."""
|
||||||
|
categorized = {feature: [] for feature in feature_mapping.keys()}
|
||||||
|
|
||||||
|
for test_name in all_test_names:
|
||||||
|
for feature, test_patterns in feature_mapping.items():
|
||||||
|
if test_name in test_patterns:
|
||||||
|
categorized[feature].append(test_name)
|
||||||
|
break
|
||||||
|
|
||||||
|
return categorized
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_support_score(feature_support, feature_order):
|
||||||
|
"""Calculate a numeric support score for ranking models.
|
||||||
|
|
||||||
|
For partial support, the score is weighted by the position of the feature
|
||||||
|
in the feature_order list (earlier features get higher weight).
|
||||||
|
"""
|
||||||
|
score = 0
|
||||||
|
max_features = len(feature_order)
|
||||||
|
|
||||||
|
for feature, status in feature_support.items():
|
||||||
|
# Get position weight (earlier features get higher weight)
|
||||||
|
if feature in feature_order:
|
||||||
|
position_weight = (max_features - feature_order.index(feature)) / max_features
|
||||||
|
else:
|
||||||
|
position_weight = 0.5 # Default weight for unmapped features
|
||||||
|
|
||||||
|
if status == "✅": # Full support
|
||||||
|
score += 10 * position_weight
|
||||||
|
elif status == "⚠️": # Partial support - weighted by column position
|
||||||
|
score += 5 * position_weight
|
||||||
|
elif status == "❌": # No support
|
||||||
|
score += 1 * position_weight
|
||||||
|
# Unknown (❓) gets 0 points
|
||||||
|
return score
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_provider_support_score(models_data, feature_order):
|
||||||
|
"""Calculate a provider-level support score based on all models' support scores."""
|
||||||
|
if not models_data:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Calculate the average support score across all models in the provider
|
||||||
|
total_score = sum(model["support_score"] for model in models_data)
|
||||||
|
return total_score / len(models_data)
|
||||||
|
|
||||||
|
|
||||||
|
def get_test_function_line_numbers(test_file_path):
|
||||||
|
"""Extract line numbers for test functions from the test file."""
|
||||||
|
test_line_numbers = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(test_file_path, "r") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
for i, line in enumerate(lines, 1):
|
||||||
|
if "def test_" in line and line.strip().startswith("def test_"):
|
||||||
|
# Extract function name
|
||||||
|
func_name = line.strip().split("def ")[1].split("(")[0]
|
||||||
|
test_line_numbers[func_name] = i
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Warning: Could not find test file at {test_file_path}")
|
||||||
|
|
||||||
|
return test_line_numbers
|
||||||
|
|
||||||
|
|
||||||
|
def get_github_repo_info():
|
||||||
|
"""Get GitHub repository information from git remote."""
|
||||||
|
try:
|
||||||
|
# Try to get the GitHub repo URL from git remote
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
result = subprocess.run(["git", "remote", "get-url", "origin"], capture_output=True, text=True, cwd=os.path.dirname(__file__))
|
||||||
|
if result.returncode == 0:
|
||||||
|
remote_url = result.stdout.strip()
|
||||||
|
# Parse GitHub URL
|
||||||
|
if "github.com" in remote_url:
|
||||||
|
if remote_url.startswith("https://"):
|
||||||
|
# https://github.com/user/repo.git -> user/repo
|
||||||
|
repo_path = remote_url.replace("https://github.com/", "").replace(".git", "")
|
||||||
|
elif remote_url.startswith("git@"):
|
||||||
|
# git@github.com:user/repo.git -> user/repo
|
||||||
|
repo_path = remote_url.split(":")[1].replace(".git", "")
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
return repo_path
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Default fallback
|
||||||
|
return "letta-ai/letta"
|
||||||
|
|
||||||
|
|
||||||
|
def generate_test_details(model_info, feature_mapping):
|
||||||
|
"""Generate detailed test results for a model."""
|
||||||
|
details = []
|
||||||
|
|
||||||
|
# Get test function line numbers
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
test_file_path = os.path.join(script_dir, "model_sweep.py")
|
||||||
|
test_line_numbers = get_test_function_line_numbers(test_file_path)
|
||||||
|
|
||||||
|
# Use the main branch GitHub URL
|
||||||
|
base_github_url = "https://github.com/letta-ai/letta/blob/main/.github/scripts/model-sweep/model_sweep.py"
|
||||||
|
|
||||||
|
for feature, tests in model_info["categorized_tests"].items():
|
||||||
|
if not tests:
|
||||||
|
continue
|
||||||
|
|
||||||
|
details.append(f"### {feature}")
|
||||||
|
details.append("")
|
||||||
|
|
||||||
|
for test in sorted(tests):
|
||||||
|
if test in model_info["passed_tests"]:
|
||||||
|
status = "✅"
|
||||||
|
elif test in model_info["failed_tests"]:
|
||||||
|
status = "❌"
|
||||||
|
else:
|
||||||
|
status = "❓"
|
||||||
|
|
||||||
|
# Create GitHub link if we have line number info
|
||||||
|
if test in test_line_numbers:
|
||||||
|
line_num = test_line_numbers[test]
|
||||||
|
github_link = f"{base_github_url}#L{line_num}"
|
||||||
|
details.append(f"- {status} [`{test}`]({github_link})")
|
||||||
|
else:
|
||||||
|
details.append(f"- {status} `{test}`")
|
||||||
|
details.append("")
|
||||||
|
|
||||||
|
return details
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_column_widths(all_provider_data, feature_mapping):
|
||||||
|
"""Calculate the maximum width needed for each column across all providers."""
|
||||||
|
widths = {"model": len("Model"), "context_window": len("Context Window"), "last_scanned": len("Last Scanned")}
|
||||||
|
|
||||||
|
# Feature column widths
|
||||||
|
for feature in feature_mapping.keys():
|
||||||
|
widths[feature] = len(feature)
|
||||||
|
|
||||||
|
# Check all model data for maximum widths
|
||||||
|
for provider_data in all_provider_data.values():
|
||||||
|
for model_info in provider_data:
|
||||||
|
# Model name width (including backticks)
|
||||||
|
model_width = len(f"`{model_info['name']}`")
|
||||||
|
widths["model"] = max(widths["model"], model_width)
|
||||||
|
|
||||||
|
# Context window width (with commas)
|
||||||
|
context_width = len(f"{model_info['context_window']:,}")
|
||||||
|
widths["context_window"] = max(widths["context_window"], context_width)
|
||||||
|
|
||||||
|
# Last scanned width
|
||||||
|
widths["last_scanned"] = max(widths["last_scanned"], len(str(model_info["last_scanned"])))
|
||||||
|
|
||||||
|
# Feature support symbols are always 2 chars, so no need to check
|
||||||
|
|
||||||
|
return widths
|
||||||
|
|
||||||
|
|
||||||
|
def process_model_sweep_report(input_file, output_file, config_file=None, debug=False):
|
||||||
|
"""Convert model sweep JSON data to MDX report."""
|
||||||
|
|
||||||
|
# Load feature mappings from config file
|
||||||
|
feature_mapping = load_feature_mappings(config_file)
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print("DEBUG: Feature mappings loaded:")
|
||||||
|
# for feature, tests in feature_mapping.items():
|
||||||
|
# print(f" {feature}: {tests}")
|
||||||
|
# print()
|
||||||
|
|
||||||
|
# Read the JSON data
|
||||||
|
with open(input_file, "r") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
tests = data.get("tests", [])
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print("DEBUG: Tests loaded:")
|
||||||
|
# print([test['outcome'] for test in tests if 'haiku' in test['nodeid']])
|
||||||
|
|
||||||
|
# Calculate summary statistics
|
||||||
|
providers = set(test["metadata"]["llm_config"]["provider_name"] for test in tests)
|
||||||
|
models = set(test["metadata"]["llm_config"]["model"] for test in tests)
|
||||||
|
total_tests = len(tests)
|
||||||
|
|
||||||
|
# Start building the MDX
|
||||||
|
mdx_lines = [
|
||||||
|
"---",
|
||||||
|
"title: Support Models",
|
||||||
|
f"generated: {datetime.now().isoformat()}",
|
||||||
|
"---",
|
||||||
|
"",
|
||||||
|
"# Supported Models",
|
||||||
|
"",
|
||||||
|
"## Overview",
|
||||||
|
"",
|
||||||
|
"Letta routinely runs automated scans against available providers and models. These are the results of the latest scan.",
|
||||||
|
"",
|
||||||
|
f"Ran {total_tests} tests against {len(models)} models across {len(providers)} providers on {datetime.now().strftime('%B %dth, %Y')}",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Group tests by provider
|
||||||
|
provider_groups = defaultdict(list)
|
||||||
|
for test in tests:
|
||||||
|
provider_name = test["metadata"]["llm_config"]["provider_name"]
|
||||||
|
provider_groups[provider_name].append(test)
|
||||||
|
|
||||||
|
# Process all providers first to collect model data
|
||||||
|
all_provider_data = {}
|
||||||
|
provider_support_scores = {}
|
||||||
|
|
||||||
|
for provider_name in provider_groups.keys():
|
||||||
|
provider_tests = provider_groups[provider_name]
|
||||||
|
|
||||||
|
# Group tests by model within this provider
|
||||||
|
model_groups = defaultdict(list)
|
||||||
|
for test in provider_tests:
|
||||||
|
model_name = test["metadata"]["llm_config"]["model"]
|
||||||
|
model_groups[model_name].append(test)
|
||||||
|
|
||||||
|
# Process all models to calculate support scores for ranking
|
||||||
|
model_data = []
|
||||||
|
for model_name in model_groups.keys():
|
||||||
|
model_tests = model_groups[model_name]
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print(f"DEBUG: Processing model '{model_name}' in provider '{provider_name}'")
|
||||||
|
|
||||||
|
# Extract unique test names for passed and failed tests
|
||||||
|
passed_tests = set()
|
||||||
|
failed_tests = set()
|
||||||
|
all_test_names = set()
|
||||||
|
|
||||||
|
for test in model_tests:
|
||||||
|
# Extract test name from nodeid (split on :: and [)
|
||||||
|
test_name = test["nodeid"].split("::")[1].split("[")[0]
|
||||||
|
all_test_names.add(test_name)
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print(f" Test name: {test_name}")
|
||||||
|
# print(f" Outcome: {test}")
|
||||||
|
if test["outcome"] == "passed":
|
||||||
|
passed_tests.add(test_name)
|
||||||
|
elif test["outcome"] == "failed":
|
||||||
|
failed_tests.add(test_name)
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print(f" All test names found: {sorted(all_test_names)}")
|
||||||
|
# print(f" Passed tests: {sorted(passed_tests)}")
|
||||||
|
# print(f" Failed tests: {sorted(failed_tests)}")
|
||||||
|
|
||||||
|
# Categorize tests into features
|
||||||
|
categorized_tests = categorize_tests(all_test_names, feature_mapping)
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print(f" Categorized tests:")
|
||||||
|
# for feature, tests in categorized_tests.items():
|
||||||
|
# print(f" {feature}: {tests}")
|
||||||
|
|
||||||
|
# Determine support status for each feature
|
||||||
|
feature_support = {}
|
||||||
|
for feature_name in feature_mapping.keys():
|
||||||
|
feature_support[feature_name] = get_support_status(passed_tests, categorized_tests[feature_name])
|
||||||
|
|
||||||
|
# if debug:
|
||||||
|
# print(f" Feature support:")
|
||||||
|
# for feature, status in feature_support.items():
|
||||||
|
# print(f" {feature}: {status}")
|
||||||
|
# print()
|
||||||
|
|
||||||
|
# Get context window and last scanned time
|
||||||
|
context_window = model_tests[0]["metadata"]["llm_config"]["context_window"]
|
||||||
|
|
||||||
|
# Try to get time_last_scanned from metadata, fallback to current time
|
||||||
|
try:
|
||||||
|
last_scanned = model_tests[0]["metadata"].get(
|
||||||
|
"time_last_scanned", model_tests[0]["metadata"].get("timestamp", datetime.now().isoformat())
|
||||||
|
)
|
||||||
|
# Format timestamp if it's a full ISO string
|
||||||
|
if "T" in str(last_scanned):
|
||||||
|
last_scanned = str(last_scanned).split("T")[0] # Just the date part
|
||||||
|
except:
|
||||||
|
last_scanned = "Unknown"
|
||||||
|
|
||||||
|
# Calculate support score for ranking
|
||||||
|
feature_order = list(feature_mapping.keys())
|
||||||
|
support_score = calculate_support_score(feature_support, feature_order)
|
||||||
|
|
||||||
|
# Store model data for sorting
|
||||||
|
model_data.append(
|
||||||
|
{
|
||||||
|
"name": model_name,
|
||||||
|
"feature_support": feature_support,
|
||||||
|
"context_window": context_window,
|
||||||
|
"last_scanned": last_scanned,
|
||||||
|
"support_score": support_score,
|
||||||
|
"failed_tests": failed_tests,
|
||||||
|
"passed_tests": passed_tests,
|
||||||
|
"categorized_tests": categorized_tests,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sort models by support score (descending) then by name (ascending)
|
||||||
|
model_data.sort(key=lambda x: (-x["support_score"], x["name"]))
|
||||||
|
|
||||||
|
# Store provider data
|
||||||
|
all_provider_data[provider_name] = model_data
|
||||||
|
provider_support_scores[provider_name] = calculate_provider_support_score(model_data, list(feature_mapping.keys()))
|
||||||
|
|
||||||
|
# Calculate column widths for consistent formatting (add details column)
|
||||||
|
column_widths = calculate_column_widths(all_provider_data, feature_mapping)
|
||||||
|
column_widths["details"] = len("Details")
|
||||||
|
|
||||||
|
# Sort providers by support score (descending) then by name (ascending)
|
||||||
|
sorted_providers = sorted(provider_support_scores.keys(), key=lambda x: (-provider_support_scores[x], x))
|
||||||
|
|
||||||
|
# Generate tables for all providers first
|
||||||
|
for provider_name in sorted_providers:
|
||||||
|
model_data = all_provider_data[provider_name]
|
||||||
|
support_score = provider_support_scores[provider_name]
|
||||||
|
|
||||||
|
# Create dynamic headers with proper padding and centering
|
||||||
|
feature_names = list(feature_mapping.keys())
|
||||||
|
|
||||||
|
# Build header row with left-aligned first column, centered others
|
||||||
|
header_parts = [f"{'Model':<{column_widths['model']}}"]
|
||||||
|
for feature in feature_names:
|
||||||
|
header_parts.append(f"{feature:^{column_widths[feature]}}")
|
||||||
|
header_parts.extend(
|
||||||
|
[
|
||||||
|
f"{'Context Window':^{column_widths['context_window']}}",
|
||||||
|
f"{'Last Scanned':^{column_widths['last_scanned']}}",
|
||||||
|
f"{'Details':^{column_widths['details']}}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
header_row = "| " + " | ".join(header_parts) + " |"
|
||||||
|
|
||||||
|
# Build separator row with left-aligned first column, centered others
|
||||||
|
separator_parts = [f"{'-' * column_widths['model']}"]
|
||||||
|
for feature in feature_names:
|
||||||
|
separator_parts.append(f":{'-' * (column_widths[feature] - 2)}:")
|
||||||
|
separator_parts.extend(
|
||||||
|
[
|
||||||
|
f":{'-' * (column_widths['context_window'] - 2)}:",
|
||||||
|
f":{'-' * (column_widths['last_scanned'] - 2)}:",
|
||||||
|
f":{'-' * (column_widths['details'] - 2)}:",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
separator_row = "|" + "|".join(separator_parts) + "|"
|
||||||
|
|
||||||
|
# Add provider section without percentage
|
||||||
|
mdx_lines.extend([f"## {provider_name}", "", header_row, separator_row])
|
||||||
|
|
||||||
|
# Generate table rows for sorted models with proper padding
|
||||||
|
for model_info in model_data:
|
||||||
|
# Create anchor for model details
|
||||||
|
model_anchor = model_info["name"].replace("/", "_").replace(":", "_").replace("-", "_").lower()
|
||||||
|
details_anchor = f"{provider_name.lower().replace(' ', '_')}_{model_anchor}_details"
|
||||||
|
|
||||||
|
# Build row with left-aligned first column, centered others
|
||||||
|
row_parts = [f"`{model_info['name']}`".ljust(column_widths["model"])]
|
||||||
|
for feature in feature_names:
|
||||||
|
row_parts.append(f"{model_info['feature_support'][feature]:^{column_widths[feature]}}")
|
||||||
|
row_parts.extend(
|
||||||
|
[
|
||||||
|
f"{model_info['context_window']:,}".center(column_widths["context_window"]),
|
||||||
|
f"{model_info['last_scanned']}".center(column_widths["last_scanned"]),
|
||||||
|
f"[View](#{details_anchor})".center(column_widths["details"]),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
row = "| " + " | ".join(row_parts) + " |"
|
||||||
|
mdx_lines.append(row)
|
||||||
|
|
||||||
|
# Add spacing between provider tables
|
||||||
|
mdx_lines.extend(["", ""])
|
||||||
|
|
||||||
|
# Add detailed test results section after all tables
|
||||||
|
mdx_lines.extend(["---", "", "# Detailed Test Results", ""])
|
||||||
|
|
||||||
|
for provider_name in sorted_providers:
|
||||||
|
model_data = all_provider_data[provider_name]
|
||||||
|
mdx_lines.extend([f"## {provider_name}", ""])
|
||||||
|
|
||||||
|
for model_info in model_data:
|
||||||
|
model_anchor = model_info["name"].replace("/", "_").replace(":", "_").replace("-", "_").lower()
|
||||||
|
details_anchor = f"{provider_name.lower().replace(' ', '_')}_{model_anchor}_details"
|
||||||
|
mdx_lines.append(f'<a id="{details_anchor}"></a>')
|
||||||
|
mdx_lines.append(f"### {model_info['name']}")
|
||||||
|
mdx_lines.append("")
|
||||||
|
|
||||||
|
# Add test details
|
||||||
|
test_details = generate_test_details(model_info, feature_mapping)
|
||||||
|
mdx_lines.extend(test_details)
|
||||||
|
|
||||||
|
# Add spacing between providers in details section
|
||||||
|
mdx_lines.extend(["", ""])
|
||||||
|
|
||||||
|
# Write the MDX file
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
f.write("\n".join(mdx_lines))
|
||||||
|
|
||||||
|
print(f"Model sweep report saved to {output_file}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
input_file = "model_sweep_report.json"
|
||||||
|
output_file = "model_sweep_report.mdx"
|
||||||
|
config_file = None
|
||||||
|
debug = False
|
||||||
|
|
||||||
|
# Allow command line arguments
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
# Use the file located in the same directory as this script
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
input_file = os.path.join(script_dir, sys.argv[1])
|
||||||
|
if len(sys.argv) > 2:
|
||||||
|
# Use the file located in the same directory as this script
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
output_file = os.path.join(script_dir, sys.argv[2])
|
||||||
|
if len(sys.argv) > 3:
|
||||||
|
config_file = sys.argv[3]
|
||||||
|
if len(sys.argv) > 4 and sys.argv[4] == "--debug":
|
||||||
|
debug = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
process_model_sweep_report(input_file, output_file, config_file, debug)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: Could not find input file '{input_file}'")
|
||||||
|
sys.exit(1)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f"Error: Invalid JSON in file '{input_file}'")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
787
.github/scripts/model-sweep/model_sweep.py
vendored
Normal file
787
.github/scripts/model-sweep/model_sweep.py
vendored
Normal file
@@ -0,0 +1,787 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from letta_client import Letta, MessageCreate, Run
|
||||||
|
from letta_client.core.api_error import ApiError
|
||||||
|
from letta_client.types import (
|
||||||
|
AssistantMessage,
|
||||||
|
Base64Image,
|
||||||
|
ImageContent,
|
||||||
|
LettaUsageStatistics,
|
||||||
|
ReasoningMessage,
|
||||||
|
TextContent,
|
||||||
|
ToolCallMessage,
|
||||||
|
ToolReturnMessage,
|
||||||
|
UrlImage,
|
||||||
|
UserMessage,
|
||||||
|
)
|
||||||
|
|
||||||
|
from letta.schemas.agent import AgentState
|
||||||
|
from letta.schemas.llm_config import LLMConfig
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Helper Functions and Constants
|
||||||
|
# ------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def get_llm_config(filename: str, llm_config_dir: str = "tests/configs/llm_model_configs") -> LLMConfig:
|
||||||
|
filename = os.path.join(llm_config_dir, filename)
|
||||||
|
with open(filename, "r") as f:
|
||||||
|
config_data = json.load(f)
|
||||||
|
llm_config = LLMConfig(**config_data)
|
||||||
|
return llm_config
|
||||||
|
|
||||||
|
|
||||||
|
def roll_dice(num_sides: int) -> int:
|
||||||
|
"""
|
||||||
|
Returns a random number between 1 and num_sides.
|
||||||
|
Args:
|
||||||
|
num_sides (int): The number of sides on the die.
|
||||||
|
Returns:
|
||||||
|
int: A random integer between 1 and num_sides, representing the die roll.
|
||||||
|
"""
|
||||||
|
import random
|
||||||
|
|
||||||
|
return random.randint(1, num_sides)
|
||||||
|
|
||||||
|
|
||||||
|
USER_MESSAGE_OTID = str(uuid.uuid4())
|
||||||
|
USER_MESSAGE_RESPONSE: str = "Teamwork makes the dream work"
|
||||||
|
USER_MESSAGE_FORCE_REPLY: List[MessageCreate] = [
|
||||||
|
MessageCreate(
|
||||||
|
role="user",
|
||||||
|
content=f"This is an automated test message. Call the send_message tool with the message '{USER_MESSAGE_RESPONSE}'.",
|
||||||
|
otid=USER_MESSAGE_OTID,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
USER_MESSAGE_ROLL_DICE: List[MessageCreate] = [
|
||||||
|
MessageCreate(
|
||||||
|
role="user",
|
||||||
|
content="This is an automated test message. Call the roll_dice tool with 16 sides and tell me the outcome.",
|
||||||
|
otid=USER_MESSAGE_OTID,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
URL_IMAGE = "https://upload.wikimedia.org/wikipedia/commons/a/a7/Camponotus_flavomarginatus_ant.jpg"
|
||||||
|
USER_MESSAGE_URL_IMAGE: List[MessageCreate] = [
|
||||||
|
MessageCreate(
|
||||||
|
role="user",
|
||||||
|
content=[
|
||||||
|
ImageContent(source=UrlImage(url=URL_IMAGE)),
|
||||||
|
TextContent(text="What is in this image?"),
|
||||||
|
],
|
||||||
|
otid=USER_MESSAGE_OTID,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
BASE64_IMAGE = base64.standard_b64encode(httpx.get(URL_IMAGE).content).decode("utf-8")
|
||||||
|
USER_MESSAGE_BASE64_IMAGE: List[MessageCreate] = [
|
||||||
|
MessageCreate(
|
||||||
|
role="user",
|
||||||
|
content=[
|
||||||
|
ImageContent(source=Base64Image(data=BASE64_IMAGE, media_type="image/jpeg")),
|
||||||
|
TextContent(text="What is in this image?"),
|
||||||
|
],
|
||||||
|
otid=USER_MESSAGE_OTID,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
all_configs = [
|
||||||
|
"openai-gpt-4o-mini.json",
|
||||||
|
# "azure-gpt-4o-mini.json", # TODO: Re-enable on new agent loop
|
||||||
|
"claude-3-5-sonnet.json",
|
||||||
|
"claude-4-sonnet-extended.json",
|
||||||
|
"claude-3-7-sonnet-extended.json",
|
||||||
|
"gemini-1.5-pro.json",
|
||||||
|
"gemini-2.5-flash-vertex.json",
|
||||||
|
"gemini-2.5-pro-vertex.json",
|
||||||
|
"together-qwen-2.5-72b-instruct.json",
|
||||||
|
"ollama.json",
|
||||||
|
]
|
||||||
|
requested = os.getenv("LLM_CONFIG_FILE")
|
||||||
|
filenames = [requested] if requested else all_configs
|
||||||
|
TESTED_LLM_CONFIGS: List[LLMConfig] = [get_llm_config(fn) for fn in filenames]
|
||||||
|
|
||||||
|
|
||||||
|
def assert_greeting_with_assistant_message_response(
|
||||||
|
messages: List[Any],
|
||||||
|
streaming: bool = False,
|
||||||
|
token_streaming: bool = False,
|
||||||
|
from_db: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Asserts that the messages list follows the expected sequence:
|
||||||
|
ReasoningMessage -> AssistantMessage.
|
||||||
|
"""
|
||||||
|
expected_message_count = 3 if streaming or from_db else 2
|
||||||
|
assert len(messages) == expected_message_count
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
if from_db:
|
||||||
|
assert isinstance(messages[index], UserMessage)
|
||||||
|
assert messages[index].otid == USER_MESSAGE_OTID
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 1
|
||||||
|
assert isinstance(messages[index], ReasoningMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
assert isinstance(messages[index], AssistantMessage)
|
||||||
|
if not token_streaming:
|
||||||
|
assert USER_MESSAGE_RESPONSE in messages[index].content
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "1"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if streaming:
|
||||||
|
assert isinstance(messages[index], LettaUsageStatistics)
|
||||||
|
assert messages[index].prompt_tokens > 0
|
||||||
|
assert messages[index].completion_tokens > 0
|
||||||
|
assert messages[index].total_tokens > 0
|
||||||
|
assert messages[index].step_count > 0
|
||||||
|
|
||||||
|
|
||||||
|
def assert_greeting_without_assistant_message_response(
|
||||||
|
messages: List[Any],
|
||||||
|
streaming: bool = False,
|
||||||
|
token_streaming: bool = False,
|
||||||
|
from_db: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Asserts that the messages list follows the expected sequence:
|
||||||
|
ReasoningMessage -> ToolCallMessage -> ToolReturnMessage.
|
||||||
|
"""
|
||||||
|
expected_message_count = 4 if streaming or from_db else 3
|
||||||
|
assert len(messages) == expected_message_count
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
if from_db:
|
||||||
|
assert isinstance(messages[index], UserMessage)
|
||||||
|
assert messages[index].otid == USER_MESSAGE_OTID
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 1
|
||||||
|
assert isinstance(messages[index], ReasoningMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
assert isinstance(messages[index], ToolCallMessage)
|
||||||
|
assert messages[index].tool_call.name == "send_message"
|
||||||
|
if not token_streaming:
|
||||||
|
assert USER_MESSAGE_RESPONSE in messages[index].tool_call.arguments
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "1"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 2
|
||||||
|
assert isinstance(messages[index], ToolReturnMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if streaming:
|
||||||
|
assert isinstance(messages[index], LettaUsageStatistics)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_tool_call_response(
|
||||||
|
messages: List[Any],
|
||||||
|
streaming: bool = False,
|
||||||
|
from_db: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Asserts that the messages list follows the expected sequence:
|
||||||
|
ReasoningMessage -> ToolCallMessage -> ToolReturnMessage ->
|
||||||
|
ReasoningMessage -> AssistantMessage.
|
||||||
|
"""
|
||||||
|
expected_message_count = 6 if streaming else 7 if from_db else 5
|
||||||
|
assert len(messages) == expected_message_count
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
if from_db:
|
||||||
|
assert isinstance(messages[index], UserMessage)
|
||||||
|
assert messages[index].otid == USER_MESSAGE_OTID
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 1
|
||||||
|
assert isinstance(messages[index], ReasoningMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
assert isinstance(messages[index], ToolCallMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "1"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 2
|
||||||
|
assert isinstance(messages[index], ToolReturnMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Hidden User Message
|
||||||
|
if from_db:
|
||||||
|
assert isinstance(messages[index], UserMessage)
|
||||||
|
assert "request_heartbeat=true" in messages[index].content
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 3
|
||||||
|
assert isinstance(messages[index], ReasoningMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
assert isinstance(messages[index], AssistantMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "1"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if streaming:
|
||||||
|
assert isinstance(messages[index], LettaUsageStatistics)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_image_input_response(
|
||||||
|
messages: List[Any],
|
||||||
|
streaming: bool = False,
|
||||||
|
token_streaming: bool = False,
|
||||||
|
from_db: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Asserts that the messages list follows the expected sequence:
|
||||||
|
ReasoningMessage -> AssistantMessage.
|
||||||
|
"""
|
||||||
|
expected_message_count = 3 if streaming or from_db else 2
|
||||||
|
assert len(messages) == expected_message_count
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
if from_db:
|
||||||
|
assert isinstance(messages[index], UserMessage)
|
||||||
|
assert messages[index].otid == USER_MESSAGE_OTID
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
# Agent Step 1
|
||||||
|
assert isinstance(messages[index], ReasoningMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "0"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
assert isinstance(messages[index], AssistantMessage)
|
||||||
|
assert messages[index].otid and messages[index].otid[-1] == "1"
|
||||||
|
index += 1
|
||||||
|
|
||||||
|
if streaming:
|
||||||
|
assert isinstance(messages[index], LettaUsageStatistics)
|
||||||
|
assert messages[index].prompt_tokens > 0
|
||||||
|
assert messages[index].completion_tokens > 0
|
||||||
|
assert messages[index].total_tokens > 0
|
||||||
|
assert messages[index].step_count > 0
|
||||||
|
|
||||||
|
|
||||||
|
def accumulate_chunks(chunks: List[Any]) -> List[Any]:
|
||||||
|
"""
|
||||||
|
Accumulates chunks into a list of messages.
|
||||||
|
"""
|
||||||
|
messages = []
|
||||||
|
current_message = None
|
||||||
|
prev_message_type = None
|
||||||
|
for chunk in chunks:
|
||||||
|
current_message_type = chunk.message_type
|
||||||
|
if prev_message_type != current_message_type:
|
||||||
|
messages.append(current_message)
|
||||||
|
current_message = None
|
||||||
|
if current_message is None:
|
||||||
|
current_message = chunk
|
||||||
|
else:
|
||||||
|
pass # TODO: actually accumulate the chunks. For now we only care about the count
|
||||||
|
prev_message_type = current_message_type
|
||||||
|
messages.append(current_message)
|
||||||
|
return [m for m in messages if m is not None]
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_run_completion(client: Letta, run_id: str, timeout: float = 30.0, interval: float = 0.5) -> Run:
|
||||||
|
start = time.time()
|
||||||
|
while True:
|
||||||
|
run = client.runs.retrieve(run_id)
|
||||||
|
if run.status == "completed":
|
||||||
|
return run
|
||||||
|
if run.status == "failed":
|
||||||
|
raise RuntimeError(f"Run {run_id} did not complete: status = {run.status}")
|
||||||
|
if time.time() - start > timeout:
|
||||||
|
raise TimeoutError(f"Run {run_id} did not complete within {timeout} seconds (last status: {run.status})")
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_tool_response_dict_messages(messages: List[Dict[str, Any]]) -> None:
|
||||||
|
"""
|
||||||
|
Asserts that a list of message dictionaries contains the expected types and statuses.
|
||||||
|
|
||||||
|
Expected order:
|
||||||
|
1. reasoning_message
|
||||||
|
2. tool_call_message
|
||||||
|
3. tool_return_message (with status 'success')
|
||||||
|
4. reasoning_message
|
||||||
|
5. assistant_message
|
||||||
|
"""
|
||||||
|
assert isinstance(messages, list)
|
||||||
|
assert messages[0]["message_type"] == "reasoning_message"
|
||||||
|
assert messages[1]["message_type"] == "assistant_message"
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------
|
||||||
|
# Test Cases
|
||||||
|
# ------------------------------
|
||||||
|
|
||||||
|
# def test_that_ci_workflow_works(
|
||||||
|
# disable_e2b_api_key: Any,
|
||||||
|
# client: Letta,
|
||||||
|
# agent_state: AgentState,
|
||||||
|
# llm_config: LLMConfig,
|
||||||
|
# json_metadata: pytest.FixtureRequest,
|
||||||
|
# ) -> None:
|
||||||
|
# """
|
||||||
|
# Tests that the CI workflow works.
|
||||||
|
# """
|
||||||
|
# json_metadata["test_type"] = "debug"
|
||||||
|
|
||||||
|
|
||||||
|
def test_greeting_with_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
agent_state: AgentState,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that the response messages follow the expected order.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
)
|
||||||
|
assert_greeting_with_assistant_message_response(response.messages)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_greeting_with_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_greeting_without_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that the response messages follow the expected order.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
use_assistant_message=False,
|
||||||
|
)
|
||||||
|
assert_greeting_without_assistant_message_response(response.messages)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id, use_assistant_message=False)
|
||||||
|
assert_greeting_without_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_tool_call(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that the response messages follow the expected order.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
dice_tool = client.tools.upsert_from_function(func=roll_dice)
|
||||||
|
client.agents.tools.attach(agent_id=agent_state.id, tool_id=dice_tool.id)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_ROLL_DICE,
|
||||||
|
)
|
||||||
|
assert_tool_call_response(response.messages)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_tool_call_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_url_image_input(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that the response messages follow the expected order.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_URL_IMAGE,
|
||||||
|
)
|
||||||
|
assert_image_input_response(response.messages)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_image_input_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_base64_image_input(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that the response messages follow the expected order.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_BASE64_IMAGE,
|
||||||
|
)
|
||||||
|
assert_image_input_response(response.messages)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_image_input_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_agent_loop_error(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that no new messages are persisted on error.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
tools = agent_state.tools
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config, tool_ids=[])
|
||||||
|
with pytest.raises(ApiError):
|
||||||
|
client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert len(messages_from_db) == 0
|
||||||
|
client.agents.modify(agent_id=agent_state.id, tool_ids=[t.id for t in tools])
|
||||||
|
|
||||||
|
|
||||||
|
def test_step_streaming_greeting_with_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_greeting_with_assistant_message_response(messages, streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_greeting_with_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_step_streaming_greeting_without_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
use_assistant_message=False,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_greeting_without_assistant_message_response(messages, streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id, use_assistant_message=False)
|
||||||
|
assert_greeting_without_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_step_streaming_tool_call(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
dice_tool = client.tools.upsert_from_function(func=roll_dice)
|
||||||
|
agent_state = client.agents.tools.attach(agent_id=agent_state.id, tool_id=dice_tool.id)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_ROLL_DICE,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_tool_call_response(messages, streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_tool_call_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_step_stream_agent_loop_error(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that no new messages are persisted on error.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
tools = agent_state.tools
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config, tool_ids=[])
|
||||||
|
with pytest.raises(ApiError):
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
)
|
||||||
|
list(response)
|
||||||
|
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert len(messages_from_db) == 0
|
||||||
|
client.agents.modify(agent_id=agent_state.id, tool_ids=[t.id for t in tools])
|
||||||
|
|
||||||
|
|
||||||
|
def test_token_streaming_greeting_with_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
stream_tokens=True,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_greeting_with_assistant_message_response(messages, streaming=True, token_streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_greeting_with_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_token_streaming_greeting_without_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
use_assistant_message=False,
|
||||||
|
stream_tokens=True,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_greeting_without_assistant_message_response(messages, streaming=True, token_streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id, use_assistant_message=False)
|
||||||
|
assert_greeting_without_assistant_message_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_token_streaming_tool_call(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a streaming message with a synchronous client.
|
||||||
|
Checks that each chunk in the stream has the correct message types.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
dice_tool = client.tools.upsert_from_function(func=roll_dice)
|
||||||
|
agent_state = client.agents.tools.attach(agent_id=agent_state.id, tool_id=dice_tool.id)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_ROLL_DICE,
|
||||||
|
stream_tokens=True,
|
||||||
|
)
|
||||||
|
chunks = list(response)
|
||||||
|
messages = accumulate_chunks(chunks)
|
||||||
|
assert_tool_call_response(messages, streaming=True)
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert_tool_call_response(messages_from_db, from_db=True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_token_streaming_agent_loop_error(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message with a synchronous client.
|
||||||
|
Verifies that no new messages are persisted on error.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
last_message = client.agents.messages.list(agent_id=agent_state.id, limit=1)
|
||||||
|
tools = agent_state.tools
|
||||||
|
agent_state = client.agents.modify(agent_id=agent_state.id, llm_config=llm_config, tool_ids=[])
|
||||||
|
try:
|
||||||
|
response = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
stream_tokens=True,
|
||||||
|
)
|
||||||
|
list(response)
|
||||||
|
except:
|
||||||
|
pass # only some models throw an error TODO: make this consistent
|
||||||
|
|
||||||
|
messages_from_db = client.agents.messages.list(agent_id=agent_state.id, after=last_message[0].id)
|
||||||
|
assert len(messages_from_db) == 0
|
||||||
|
client.agents.modify(agent_id=agent_state.id, tool_ids=[t.id for t in tools])
|
||||||
|
|
||||||
|
|
||||||
|
def test_async_greeting_with_assistant_message(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
agent_state: AgentState,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Tests sending a message as an asynchronous job using the synchronous client.
|
||||||
|
Waits for job completion and asserts that the result messages are as expected.
|
||||||
|
"""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
client.agents.modify(agent_id=agent_state.id, llm_config=llm_config)
|
||||||
|
|
||||||
|
run = client.agents.messages.create_async(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=USER_MESSAGE_FORCE_REPLY,
|
||||||
|
)
|
||||||
|
run = wait_for_run_completion(client, run.id)
|
||||||
|
|
||||||
|
result = run.metadata.get("result")
|
||||||
|
assert result is not None, "Run metadata missing 'result' key"
|
||||||
|
|
||||||
|
messages = result["messages"]
|
||||||
|
assert_tool_response_dict_messages(messages)
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_summarize(
|
||||||
|
disable_e2b_api_key: Any,
|
||||||
|
client: Letta,
|
||||||
|
llm_config: LLMConfig,
|
||||||
|
json_metadata: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
|
"""Test that summarization is automatically triggered."""
|
||||||
|
json_metadata["llm_config"] = dict(llm_config)
|
||||||
|
|
||||||
|
# pydantic prevents us for overriding the context window paramter in the passed LLMConfig
|
||||||
|
new_llm_config = llm_config.model_dump()
|
||||||
|
new_llm_config["context_window"] = 3000
|
||||||
|
pinned_context_window_llm_config = LLMConfig(**new_llm_config)
|
||||||
|
|
||||||
|
send_message_tool = client.tools.list(name="send_message")[0]
|
||||||
|
temp_agent_state = client.agents.create(
|
||||||
|
include_base_tools=False,
|
||||||
|
tool_ids=[send_message_tool.id],
|
||||||
|
llm_config=pinned_context_window_llm_config,
|
||||||
|
embedding="letta/letta-free",
|
||||||
|
tags=["supervisor"],
|
||||||
|
)
|
||||||
|
|
||||||
|
philosophical_question = """
|
||||||
|
You know, sometimes I wonder if the entire structure of our lives is built on a series of unexamined assumptions we just silently agreed to somewhere along the way—like how we all just decided that five days a week of work and two days of “rest” constitutes balance, or how 9-to-5 became the default rhythm of a meaningful life, or even how the idea of “success” got boiled down to job titles and property ownership and productivity metrics on a LinkedIn profile, when maybe none of that is actually what makes a life feel full, or grounded, or real. And then there’s the weird paradox of ambition, how we're taught to chase it like a finish line that keeps moving, constantly redefining itself right as you’re about to grasp it—because even when you get the job, or the degree, or the validation, there's always something next, something more, like a treadmill with invisible settings you didn’t realize were turned up all the way.
|
||||||
|
|
||||||
|
And have you noticed how we rarely stop to ask who set those definitions for us? Like was there ever a council that decided, yes, owning a home by thirty-five and retiring by sixty-five is the universal template for fulfillment? Or did it just accumulate like cultural sediment over generations, layered into us so deeply that questioning it feels uncomfortable, even dangerous? And isn’t it strange that we spend so much of our lives trying to optimize things—our workflows, our diets, our sleep, our morning routines—as though the point of life is to operate more efficiently rather than to experience it more richly? We build these intricate systems, these rulebooks for being a “high-functioning” human, but where in all of that is the space for feeling lost, for being soft, for wandering without a purpose just because it’s a sunny day and your heart is tugging you toward nowhere in particular?
|
||||||
|
|
||||||
|
Sometimes I lie awake at night and wonder if all the noise we wrap around ourselves—notifications, updates, performance reviews, even our internal monologues—might be crowding out the questions we were meant to live into slowly, like how to love better, or how to forgive ourselves, or what the hell we’re even doing here in the first place. And when you strip it all down—no goals, no KPIs, no curated identity—what’s actually left of us? Are we just a sum of the roles we perform, or is there something quieter underneath that we've forgotten how to hear?
|
||||||
|
|
||||||
|
And if there is something underneath all of it—something real, something worth listening to—then how do we begin to uncover it, gently, without rushing or reducing it to another task on our to-do list?
|
||||||
|
"""
|
||||||
|
|
||||||
|
MAX_ATTEMPTS = 10
|
||||||
|
prev_length = None
|
||||||
|
|
||||||
|
for attempt in range(MAX_ATTEMPTS):
|
||||||
|
client.agents.messages.create(
|
||||||
|
agent_id=temp_agent_state.id,
|
||||||
|
messages=[MessageCreate(role="user", content=philosophical_question)],
|
||||||
|
)
|
||||||
|
|
||||||
|
temp_agent_state = client.agents.retrieve(agent_id=temp_agent_state.id)
|
||||||
|
message_ids = temp_agent_state.message_ids
|
||||||
|
current_length = len(message_ids)
|
||||||
|
|
||||||
|
print("LENGTH OF IN_CONTEXT_MESSAGES:", current_length)
|
||||||
|
|
||||||
|
if prev_length is not None and current_length <= prev_length:
|
||||||
|
# TODO: Add more stringent checks here
|
||||||
|
print(f"Summarization was triggered, detected current_length {current_length} is at least prev_length {prev_length}.")
|
||||||
|
break
|
||||||
|
|
||||||
|
prev_length = current_length
|
||||||
|
else:
|
||||||
|
raise AssertionError("Summarization was not triggered after 10 messages")
|
||||||
4551
.github/scripts/model-sweep/supported-models.mdx
vendored
Normal file
4551
.github/scripts/model-sweep/supported-models.mdx
vendored
Normal file
File diff suppressed because it is too large
Load Diff
123
.github/workflows/alembic-validation.yml
vendored
Normal file
123
.github/workflows/alembic-validation.yml
vendored
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
name: Alembic Migration Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
pull_request_target:
|
||||||
|
branches: [ main ]
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changed-files:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: changed-files
|
||||||
|
outputs:
|
||||||
|
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v44
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
alembic/**
|
||||||
|
.github/workflows/alembic-validation.yml
|
||||||
|
|
||||||
|
test-sqlite:
|
||||||
|
needs: [ changed-files ]
|
||||||
|
if: ${{ needs.changed-files.outputs.any_changed == 'true' }}
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
timeout-minutes: 15
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: uv sync --no-install-project ${{ inputs.install-args || '--extra sqlite --extra external-tools --extra dev --extra cloud-tool-sandbox' }}
|
||||||
|
- name: Test alembic migration
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
uv run alembic upgrade head
|
||||||
|
# kinda janky but I think this might not matter for sqlite?
|
||||||
|
# uv run alembic check
|
||||||
|
|
||||||
|
- name: Cleanup persistent data
|
||||||
|
if: ${{ always() }}
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
echo "Cleaning up persistent data..."
|
||||||
|
sudo rm -rf ~/.letta || true
|
||||||
|
|
||||||
|
test-postgres:
|
||||||
|
needs: [ changed-files ]
|
||||||
|
if: ${{ needs.changed-files.outputs.any_changed == 'true' }}
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
timeout-minutes: 15
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: uv sync --no-install-project ${{ inputs.install-args || '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox' }}
|
||||||
|
- name: Test alembic migration
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION IF NOT EXISTS vector;'
|
||||||
|
uv run alembic upgrade head
|
||||||
|
uv run alembic check
|
||||||
|
|
||||||
|
- name: Print docker logs if tests fail
|
||||||
|
if: ${{ failure() || cancelled() }}
|
||||||
|
run: |
|
||||||
|
echo "Printing Docker Logs..."
|
||||||
|
docker logs $(docker ps -aq --filter "ancestor=pgvector/pgvector:pg17") || true
|
||||||
|
|
||||||
|
- name: Cleanup containers and volumes
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: |
|
||||||
|
echo "Cleaning up containers and volumes..."
|
||||||
|
docker stop $(docker ps -aq --filter "ancestor=pgvector/pgvector:pg17") || true
|
||||||
|
docker rm $(docker ps -aq --filter "ancestor=pgvector/pgvector:pg17") || true
|
||||||
|
docker volume prune -f || true
|
||||||
|
docker system prune -f || true
|
||||||
22
.github/workflows/close_stale_issues.yml
vendored
Normal file
22
.github/workflows/close_stale_issues.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Close inactive issues
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "30 1 * * *"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
close-issues:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v5
|
||||||
|
with:
|
||||||
|
days-before-issue-stale: 30
|
||||||
|
days-before-issue-close: 14
|
||||||
|
stale-issue-label: "stale"
|
||||||
|
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity."
|
||||||
|
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
|
||||||
|
days-before-pr-stale: -1
|
||||||
|
days-before-pr-close: -1
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
52
.github/workflows/core-integration-tests.yml
vendored
Normal file
52
.github/workflows/core-integration-tests.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: 🐍🧪 [Core] Integration Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
integration-tests:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: 'integration'
|
||||||
|
use-redis: true
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
changed-files-pattern: |
|
||||||
|
**
|
||||||
|
.github/workflows/reusable-test-workflow.yml
|
||||||
|
.github/workflows/core-integration-tests.yml
|
||||||
|
install-args: '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox'
|
||||||
|
timeout-minutes: 15
|
||||||
|
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"test_suite": [
|
||||||
|
"integration_test_summarizer.py",
|
||||||
|
"integration_test_async_tool_sandbox.py",
|
||||||
|
"integration_test_sleeptime_agent.py",
|
||||||
|
"integration_test_agent_tool_graph.py",
|
||||||
|
"integration_test_composio.py",
|
||||||
|
"integration_test_chat_completions.py",
|
||||||
|
"integration_test_multi_agent.py",
|
||||||
|
"integration_test_batch_api_cron_jobs.py",
|
||||||
|
"integration_test_batch_sdk.py",
|
||||||
|
"integration_test_builtin_tools.py",
|
||||||
|
"integration_test_turbopuffer.py",
|
||||||
|
"integration_test_human_in_the_loop.py"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
67
.github/workflows/core-lint.yml
vendored
Normal file
67
.github/workflows/core-lint.yml
vendored
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
name: 🐍🧹 [Core] Lint and Test
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changed-files:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: changed-files
|
||||||
|
outputs:
|
||||||
|
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v44
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
letta/**
|
||||||
|
tests/**
|
||||||
|
*.py
|
||||||
|
pyproject.toml
|
||||||
|
.github/workflows/core-lint.yml
|
||||||
|
main:
|
||||||
|
needs: [ changed-files ]
|
||||||
|
if: ${{ needs.changed-files.outputs.any_changed == 'true' }}
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.12"] # Adjust Python version matrix if needed
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: uv sync --no-install-project ${{ inputs.install-args || '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox' }}
|
||||||
|
- name: Validate PR Title
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: amannn/action-semantic-pull-request@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Run Pyright
|
||||||
|
uses: jakebailey/pyright-action@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
level: "error"
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Run Ruff Check
|
||||||
|
working-directory: .
|
||||||
|
run: uv run ruff check --config pyproject.toml --diff .
|
||||||
|
|
||||||
|
- name: Run Ruff Format
|
||||||
|
working-directory: .
|
||||||
|
run: uv run ruff format --config pyproject.toml --check --diff .
|
||||||
63
.github/workflows/core-unit-sqlite-test.yaml
vendored
Normal file
63
.github/workflows/core-unit-sqlite-test.yaml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: 🐍👨🔬 [Core] Unit Tests (SQLite)
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: 'sqlite'
|
||||||
|
use-redis: true
|
||||||
|
changed-files-pattern: |
|
||||||
|
apps/core/**
|
||||||
|
.github/workflows/reusable-test-workflow.yml
|
||||||
|
.github/workflows/core-unit-sqlite-test.yml
|
||||||
|
install-args: '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox --extra google --extra sqlite'
|
||||||
|
timeout-minutes: 15
|
||||||
|
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
|
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"include": [
|
||||||
|
{"test_suite": "test_client.py"},
|
||||||
|
{"test_suite": "test_sdk_client.py"},
|
||||||
|
{"test_suite": "test_server.py"},
|
||||||
|
{"test_suite": "test_tool_schema_parsing.py"},
|
||||||
|
{"test_suite": "test_tool_rule_solver.py"},
|
||||||
|
{"test_suite": "test_memory.py"},
|
||||||
|
{"test_suite": "test_utils.py"},
|
||||||
|
{"test_suite": "test_stream_buffer_readers.py"},
|
||||||
|
{"test_suite": "test_agent_serialization.py"},
|
||||||
|
{"test_suite": "test_optimistic_json_parser.py"},
|
||||||
|
{"test_suite": "test_llm_clients.py"},
|
||||||
|
{"test_suite": "test_letta_agent_batch.py"},
|
||||||
|
{"test_suite": "test_providers.py"},
|
||||||
|
{"test_suite": "test_sources.py"},
|
||||||
|
{"test_suite": "test_managers.py"},
|
||||||
|
{"test_suite": "sdk/"},
|
||||||
|
{"test_suite": "mcp_tests/", "use_experimental": true},
|
||||||
|
{"test_suite": "test_timezone_formatting.py"},
|
||||||
|
{"test_suite": "test_plugins.py"},
|
||||||
|
{"test_suite": "test_embeddings.py"},
|
||||||
|
{"test_suite": "test_crypto_utils.py"},
|
||||||
|
{"test_suite": "test_mcp_encryption.py"},
|
||||||
|
{"test_suite": "test_secret.py"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
64
.github/workflows/core-unit-test.yml
vendored
Normal file
64
.github/workflows/core-unit-test.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: 🐍👨🔬 [Core] Unit Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: 'unit'
|
||||||
|
use-redis: true
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
changed-files-pattern: |
|
||||||
|
**
|
||||||
|
.github/workflows/reusable-test-workflow.yml
|
||||||
|
.github/workflows/core-unit-test.yml
|
||||||
|
install-args: '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox --extra google'
|
||||||
|
timeout-minutes: 15
|
||||||
|
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"include": [
|
||||||
|
{"test_suite": "test_client.py"},
|
||||||
|
{"test_suite": "test_sdk_client.py"},
|
||||||
|
{"test_suite": "test_server.py"},
|
||||||
|
{"test_suite": "test_managers.py"},
|
||||||
|
{"test_suite": "test_tool_schema_parsing.py"},
|
||||||
|
{"test_suite": "test_tool_rule_solver.py"},
|
||||||
|
{"test_suite": "test_memory.py"},
|
||||||
|
{"test_suite": "test_utils.py"},
|
||||||
|
{"test_suite": "test_stream_buffer_readers.py"},
|
||||||
|
{"test_suite": "test_agent_serialization.py"},
|
||||||
|
{"test_suite": "test_agent_serialization_v2.py"},
|
||||||
|
{"test_suite": "test_optimistic_json_parser.py"},
|
||||||
|
{"test_suite": "test_llm_clients.py"},
|
||||||
|
{"test_suite": "test_letta_agent_batch.py"},
|
||||||
|
{"test_suite": "test_providers.py"},
|
||||||
|
{"test_suite": "test_sources.py"},
|
||||||
|
{"test_suite": "sdk/"},
|
||||||
|
{"test_suite": "mcp_tests/", "use_experimental": true},
|
||||||
|
{"test_suite": "test_timezone_formatting.py"},
|
||||||
|
{"test_suite": "test_plugins.py"},
|
||||||
|
{"test_suite": "test_embeddings.py"},
|
||||||
|
{"test_suite": "test_crypto_utils.py"},
|
||||||
|
{"test_suite": "test_mcp_encryption.py"},
|
||||||
|
{"test_suite": "test_secret.py"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
40
.github/workflows/docker-image.yml
vendored
Normal file
40
.github/workflows/docker-image.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: Docker Image CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Extract version number
|
||||||
|
id: extract_version
|
||||||
|
run: echo "CURRENT_VERSION=$(awk -F '\"' '/version =/ { print $2 }' pyproject.toml | head -n 1)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
letta/letta:${{ env.CURRENT_VERSION }}
|
||||||
|
letta/letta:latest
|
||||||
|
memgpt/letta:${{ env.CURRENT_VERSION }}
|
||||||
|
memgpt/letta:latest
|
||||||
66
.github/workflows/docker-integration-tests.yaml
vendored
Normal file
66
.github/workflows/docker-integration-tests.yaml
vendored
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Run Docker integration tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 15
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up python 3.11
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
|
- name: Set permissions for log directory
|
||||||
|
run: |
|
||||||
|
mkdir -p /home/runner/.letta/logs
|
||||||
|
sudo chown -R $USER:$USER /home/runner/.letta/logs
|
||||||
|
chmod -R 755 /home/runner/.letta/logs
|
||||||
|
|
||||||
|
- name: Build and run docker dev server
|
||||||
|
env:
|
||||||
|
LETTA_PG_DB: letta
|
||||||
|
LETTA_PG_USER: letta
|
||||||
|
LETTA_PG_PASSWORD: letta
|
||||||
|
LETTA_PG_PORT: 8888
|
||||||
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||||
|
|
||||||
|
run: |
|
||||||
|
docker compose -f dev-compose.yaml up --build -d
|
||||||
|
|
||||||
|
- name: Wait for service
|
||||||
|
run: bash scripts/wait_for_service.sh http://localhost:8283 -- echo "Service is ready"
|
||||||
|
|
||||||
|
- name: Run tests with pytest
|
||||||
|
env:
|
||||||
|
LETTA_PG_DB: letta
|
||||||
|
LETTA_PG_USER: letta
|
||||||
|
LETTA_PG_PASSWORD: letta
|
||||||
|
LETTA_PG_PORT: 8888
|
||||||
|
LETTA_SERVER_PASS: test_server_token
|
||||||
|
LETTA_SERVER_URL: http://localhost:8283
|
||||||
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||||
|
PYTHONPATH: ${{ github.workspace }}:${{ env.PYTHONPATH }}
|
||||||
|
run: |
|
||||||
|
uv sync --extra dev --extra postgres --extra sqlite
|
||||||
|
uv run pytest -s tests/test_client.py
|
||||||
|
|
||||||
|
- name: Print docker logs if tests fail
|
||||||
|
if: failure()
|
||||||
|
run: |
|
||||||
|
echo "Printing Docker Logs..."
|
||||||
|
docker compose -f dev-compose.yaml logs
|
||||||
20
.github/workflows/fern-check.yml
vendored
Normal file
20
.github/workflows/fern-check.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
name: 🌿 Fern Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run:
|
||||||
|
runs-on: [self-hosted, small]
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check API is valid
|
||||||
|
working-directory: fern
|
||||||
|
run: fern check
|
||||||
45
.github/workflows/fern-docs-preview.yml
vendored
Normal file
45
.github/workflows/fern-docs-preview.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
name: Preview Docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run:
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
runs-on: [self-hosted, small]
|
||||||
|
permissions: write-all
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Generate preview URL
|
||||||
|
id: generate-docs
|
||||||
|
if: github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'safe to test')
|
||||||
|
working-directory: fern
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: |
|
||||||
|
OUTPUT=$(fern generate --docs --preview 2>&1) || true
|
||||||
|
echo "$OUTPUT"
|
||||||
|
URL=$(echo "$OUTPUT" | grep -oP 'Published docs to \K.*(?= \()')
|
||||||
|
echo "Preview URL: $URL"
|
||||||
|
echo "🌿 Preview your docs: $URL" > preview_url.txt
|
||||||
|
|
||||||
|
- name: Comment URL in PR
|
||||||
|
uses: thollander/actions-comment-pull-request@v3
|
||||||
|
with:
|
||||||
|
file-path: fern/preview_url.txt
|
||||||
21
.github/workflows/fern-docs-publish.yml
vendored
Normal file
21
.github/workflows/fern-docs-publish.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
name: 🌿 Publish Docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run:
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
if: ${{ github.event_name == 'push' && contains(github.ref, 'refs/heads/main') && github.run_number > 1 }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Publish Docs
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: fern generate --docs --log-level debug
|
||||||
173
.github/workflows/fern-sdk-python-preview.yml
vendored
Normal file
173
.github/workflows/fern-sdk-python-preview.yml
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: 🌿 Preview Python SDK
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'fern/openapi.json'
|
||||||
|
- 'fern/openapi-overrides.yml'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changed-files:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
runs-on: [self-hosted, small]
|
||||||
|
name: changed-files
|
||||||
|
outputs:
|
||||||
|
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v44
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
fern/openapi.json
|
||||||
|
fern/openapi-overrides.yml
|
||||||
|
|
||||||
|
preview-python-sdk:
|
||||||
|
needs: [changed-files]
|
||||||
|
name: preview-python-sdk
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
outputs:
|
||||||
|
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Generate cache key
|
||||||
|
id: cache-key
|
||||||
|
run: |
|
||||||
|
echo "key=sdk-${{ github.ref_name }}-${{ hashFiles('fern/*', 'pyproject.toml') }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Try to restore SDK cache
|
||||||
|
id: restore-cache
|
||||||
|
uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
fern/.preview/fern-python-sdk/
|
||||||
|
key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
|
||||||
|
- name: Inject env vars into environment
|
||||||
|
if: github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'safe to test')
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ -n "$line" ]]; then
|
||||||
|
value=$(echo "$line" | cut -d= -f2-)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$line" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
done < <(letta_secrets_helper --env dev --service ci)
|
||||||
|
|
||||||
|
- name: Debug environment
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "=== Environment Debug ==="
|
||||||
|
echo "PATH: $PATH"
|
||||||
|
echo "USER: $(whoami)"
|
||||||
|
echo "HOME: $HOME"
|
||||||
|
echo "Shell: $SHELL"
|
||||||
|
echo "Working directory: $(pwd)"
|
||||||
|
echo ""
|
||||||
|
echo "=== UV Debug ==="
|
||||||
|
which uv || echo "uv not found in PATH"
|
||||||
|
ls -la /usr/local/bin/uv || echo "/usr/local/bin/uv not found"
|
||||||
|
ls -la /home/ci-runner/.local/bin/uv || echo "ci-runner uv not found"
|
||||||
|
echo ""
|
||||||
|
echo "=== Test uv command ==="
|
||||||
|
uv --version || echo "uv --version failed"
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: uv sync --no-install-project ${{ inputs.install-args || '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox' }}
|
||||||
|
|
||||||
|
- name: Migrate database
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION vector'
|
||||||
|
uv run alembic upgrade head
|
||||||
|
|
||||||
|
- name: Run letta server
|
||||||
|
if: github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'safe to test')
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
OPENAI_API_KEY: ${{ env.OPENAI_API_KEY }}
|
||||||
|
E2B_SANDBOX_TEMPLATE_ID: ${{ env.E2B_SANDBOX_TEMPLATE_ID }}
|
||||||
|
run: |
|
||||||
|
# Run server in background
|
||||||
|
uv run letta server &
|
||||||
|
# Wait for server to be ready
|
||||||
|
timeout 60 bash -c 'until curl -s http://localhost:8283/health; do sleep 1; done'
|
||||||
|
|
||||||
|
- name: Generate Python SDK Preview
|
||||||
|
if: (github.event_name != 'pull_request_target' || contains(github.event.pull_request.labels.*.name, 'safe to test')) && steps.restore-cache.outputs.cache-hit != 'true'
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: |
|
||||||
|
fern generate --group python-sdk --preview
|
||||||
|
cd fern/.preview/fern-python-sdk
|
||||||
|
poetry install
|
||||||
|
poetry build --format wheel
|
||||||
|
poetry run mypy .
|
||||||
|
poetry run pytest -rP tests/custom/test_client.py --env localhost
|
||||||
|
ls -lah
|
||||||
|
|
||||||
|
- name: Save SDK to cache
|
||||||
|
if: steps.restore-cache.outputs.cache-hit != 'true'
|
||||||
|
uses: actions/cache/save@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
fern/.preview/fern-python-sdk/
|
||||||
|
key: ${{ steps.cache-key.outputs.key }}
|
||||||
50
.github/workflows/fern-sdk-python-publish.yml
vendored
Normal file
50
.github/workflows/fern-sdk-python-publish.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: 🌿 Release Python SDK
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "The version of the Python SDK that you would like to release"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["🌿 Preview Python SDK"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
if: |
|
||||||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event_name == 'workflow_run' &&
|
||||||
|
github.event.workflow_run.event == 'push' &&
|
||||||
|
github.event.workflow_run.conclusion == 'success')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Download Fern
|
||||||
|
run: npm install -g fern-api
|
||||||
|
|
||||||
|
- name: Generate Python SDK
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||||
|
fern generate --group python-sdk --version ${{ inputs.version }} --log-level debug
|
||||||
|
else
|
||||||
|
fern generate --group python-sdk --log-level debug
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Publish Docs
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: fern generate --docs
|
||||||
117
.github/workflows/fern-sdk-typescript-preview.yml
vendored
Normal file
117
.github/workflows/fern-sdk-typescript-preview.yml
vendored
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
name: 🌿 Preview TypeScript SDK
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'fern/openapi.json'
|
||||||
|
- 'fern/openapi-overrides.yml'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changed-files:
|
||||||
|
runs-on: [self-hosted, small]
|
||||||
|
name: changed-files
|
||||||
|
outputs:
|
||||||
|
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v44
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
fern/openapi.json
|
||||||
|
fern/openapi-overrides.yml
|
||||||
|
preview-typescript-sdk:
|
||||||
|
if: ${{ needs.changed-files.outputs.any_changed == 'true' }}
|
||||||
|
needs: [changed-files]
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: uv sync --no-install-project ${{ inputs.install-args || '--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox' }}
|
||||||
|
|
||||||
|
- name: Inject env vars into environment
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ -n "$line" ]]; then
|
||||||
|
value=$(echo "$line" | cut -d= -f2-)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$line" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
done < <(letta_secrets_helper --env dev --service ci)
|
||||||
|
|
||||||
|
- name: Migrate database
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION vector'
|
||||||
|
uv run alembic upgrade head
|
||||||
|
|
||||||
|
- name: Run letta server
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
OPENAI_API_KEY: ${{ env.OPENAI_API_KEY }}
|
||||||
|
E2B_SANDBOX_TEMPLATE_ID: ${{ env.E2B_SANDBOX_TEMPLATE_ID }}
|
||||||
|
run: |
|
||||||
|
# Run server in background
|
||||||
|
uv run letta server &
|
||||||
|
# Wait for server to be ready
|
||||||
|
timeout 60 bash -c 'until curl -s http://localhost:8283/health; do sleep 1; done'
|
||||||
|
|
||||||
|
- name: Generate TypeScript SDK Preview
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_ENV: localhost
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: |
|
||||||
|
fern generate --group ts-sdk --preview
|
||||||
|
cd fern/.preview/fern-typescript-node-sdk
|
||||||
|
yarn install
|
||||||
|
yarn build
|
||||||
|
yarn test tests/custom.test.ts
|
||||||
50
.github/workflows/fern-sdk-typescript-publish.yml
vendored
Normal file
50
.github/workflows/fern-sdk-typescript-publish.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: 🌿 Release TypeScript SDK
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: "The version of the TypeScript SDK that you would like to release"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
workflow_run:
|
||||||
|
workflows: ["🌿 Preview TypeScript SDK"]
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
if: |
|
||||||
|
github.event_name == 'workflow_dispatch' ||
|
||||||
|
(github.event_name == 'workflow_run' &&
|
||||||
|
github.event.workflow_run.event == 'push' &&
|
||||||
|
github.event.workflow_run.conclusion == 'success')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Download Fern
|
||||||
|
run: npm install -g fern-api
|
||||||
|
|
||||||
|
- name: Generate TypeScript SDK
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||||
|
fern generate --group ts-sdk --version ${{ inputs.version }} --log-level debug
|
||||||
|
else
|
||||||
|
fern generate --group ts-sdk --log-level debug
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Publish Docs
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
FERN_TOKEN: ${{ secrets.FERN_TOKEN }}
|
||||||
|
run: fern generate --docs
|
||||||
19
.github/workflows/letta-code-sync.yml
vendored
Normal file
19
.github/workflows/letta-code-sync.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
name: Sync Code
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ !contains(github.event.head_commit.message, '[sync-skip]') }}
|
||||||
|
steps:
|
||||||
|
- name: Trigger repository_dispatch
|
||||||
|
run: |
|
||||||
|
curl -X POST \
|
||||||
|
-H "Authorization: token ${{ secrets.SYNC_PAT }}" \
|
||||||
|
-H "Accept: application/vnd.github.v3+json" \
|
||||||
|
https://api.github.com/repos/letta-ai/letta-cloud/dispatches \
|
||||||
|
-d '{"event_type":"oss-update"}'
|
||||||
161
.github/workflows/lint-command.yml
vendored
Normal file
161
.github/workflows/lint-command.yml
vendored
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
name: Lint Command
|
||||||
|
|
||||||
|
on:
|
||||||
|
issue_comment:
|
||||||
|
types: [created]
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
pr_number:
|
||||||
|
description: 'PR number to run lint on'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint-command:
|
||||||
|
name: Handle /lint command
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: |
|
||||||
|
(github.event_name == 'workflow_dispatch' && github.event.inputs.pr_number) ||
|
||||||
|
(github.event_name == 'issue_comment' &&
|
||||||
|
github.event.issue.pull_request &&
|
||||||
|
contains(github.event.comment.body, '/lint') &&
|
||||||
|
startsWith(github.event.comment.body, '/lint'))
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Add acknowledgment reaction
|
||||||
|
if: github.event_name == 'issue_comment'
|
||||||
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
|
with:
|
||||||
|
comment-id: ${{ github.event.comment.id }}
|
||||||
|
reactions: eyes
|
||||||
|
|
||||||
|
- name: Check permissions
|
||||||
|
if: github.event_name == 'issue_comment'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const { data: collaborator } = await github.rest.repos.getCollaboratorPermissionLevel({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
username: context.actor
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!['admin', 'write'].includes(collaborator.permission)) {
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
body: '❌ You need write permissions to run lint commands.'
|
||||||
|
});
|
||||||
|
core.setFailed('Insufficient permissions');
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Get PR information
|
||||||
|
id: pr
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const pr_number = context.eventName === 'issue_comment'
|
||||||
|
? context.issue.number
|
||||||
|
: ${{ github.event.inputs.pr_number || 'null' }};
|
||||||
|
|
||||||
|
const { data: pr } = await github.rest.pulls.get({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
pull_number: pr_number
|
||||||
|
});
|
||||||
|
|
||||||
|
core.setOutput('branch', pr.head.ref);
|
||||||
|
core.setOutput('repo', pr.head.repo.full_name);
|
||||||
|
core.setOutput('sha', pr.head.sha);
|
||||||
|
core.setOutput('number', pr_number);
|
||||||
|
|
||||||
|
- name: Checkout PR branch
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ steps.pr.outputs.branch }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up python 3.12
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: false
|
||||||
|
activate-environment: true
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: uv sync --extra dev --extra postgres --extra external-tools
|
||||||
|
working-directory: .
|
||||||
|
|
||||||
|
# - name: Run ruff check with fixes
|
||||||
|
# run: uv run ruff check --fix .
|
||||||
|
#
|
||||||
|
# - name: Run ruff format
|
||||||
|
# run: uv run ruff format .
|
||||||
|
|
||||||
|
- name: Run isort, black, autoflake
|
||||||
|
run: uv run isort . --profile black && uv run black . && uv run autoflake --remove-all-unused-imports --remove-unused-variables --in-place --recursive --ignore-init-module-imports .
|
||||||
|
working-directory: .
|
||||||
|
|
||||||
|
|
||||||
|
- name: Check for changes
|
||||||
|
id: changes
|
||||||
|
run: |
|
||||||
|
if [[ -n $(git status --porcelain) ]]; then
|
||||||
|
echo "changes=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "changes=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Commit and push changes
|
||||||
|
if: steps.changes.outputs.changes == 'true'
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git add .
|
||||||
|
git commit -m "style: lint / fmt
|
||||||
|
|
||||||
|
Triggered by /lint command from @${{ github.actor }}"
|
||||||
|
git push
|
||||||
|
|
||||||
|
- name: Comment on success
|
||||||
|
if: steps.changes.outputs.changes == 'true'
|
||||||
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr.outputs.number }}
|
||||||
|
body: |
|
||||||
|
✅ **Lint fixes applied successfully!**
|
||||||
|
|
||||||
|
Ruff has automatically fixed linting issues and formatted the code.
|
||||||
|
Changes have been committed to the PR branch.
|
||||||
|
|
||||||
|
- name: Comment on no changes
|
||||||
|
if: steps.changes.outputs.changes == 'false'
|
||||||
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr.outputs.number }}
|
||||||
|
body: |
|
||||||
|
✅ **No lint issues found!**
|
||||||
|
|
||||||
|
The code is already properly formatted and passes all linting checks.
|
||||||
|
|
||||||
|
- name: Comment on failure
|
||||||
|
if: failure()
|
||||||
|
uses: peter-evans/create-or-update-comment@v4
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr.outputs.number }}
|
||||||
|
body: |
|
||||||
|
❌ **Lint command failed!**
|
||||||
|
|
||||||
|
There was an error while running the lint fixes. Please check the [workflow run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details.
|
||||||
25
.github/workflows/manually_clear_old_issues.yml
vendored
Normal file
25
.github/workflows/manually_clear_old_issues.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: Clear Old Issues
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cleanup-old-issues:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v5
|
||||||
|
with:
|
||||||
|
days-before-issue-stale: 60
|
||||||
|
days-before-issue-close: 0
|
||||||
|
stale-issue-label: "auto-closed"
|
||||||
|
stale-issue-message: ""
|
||||||
|
close-issue-message: "This issue has been automatically closed due to 60 days of inactivity."
|
||||||
|
days-before-pr-stale: -1
|
||||||
|
days-before-pr-close: -1
|
||||||
|
exempt-issue-labels: ""
|
||||||
|
only-issue-labels: ""
|
||||||
|
remove-stale-when-updated: true
|
||||||
|
operations-per-run: 1000
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
54
.github/workflows/migration-test.yml
vendored
Normal file
54
.github/workflows/migration-test.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
name: Alembic Migration Tester
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**.py'
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 15
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- run: psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION vector'
|
||||||
|
|
||||||
|
- name: Set up python 3.11
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
uv sync --all-extras
|
||||||
|
|
||||||
|
- name: Test alembic migration
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
uv run alembic upgrade head
|
||||||
|
uv run alembic check
|
||||||
144
.github/workflows/model-sweep.yaml
vendored
Normal file
144
.github/workflows/model-sweep.yaml
vendored
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
name: Model Sweep
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
branch-name:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
model-sweep:
|
||||||
|
runs-on: [self-hosted, medium]
|
||||||
|
services:
|
||||||
|
qdrant:
|
||||||
|
image: qdrant/qdrant
|
||||||
|
ports:
|
||||||
|
- 6333:6333
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check if gh is installed
|
||||||
|
run: |
|
||||||
|
if ! command -v gh >/dev/null 2>&1
|
||||||
|
then
|
||||||
|
echo "gh could not be found, installing now..."
|
||||||
|
# install gh cli
|
||||||
|
(type -p wget >/dev/null || (sudo apt update && sudo apt-get install wget -y)) \
|
||||||
|
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
|
||||||
|
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||||
|
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
||||||
|
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||||
|
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||||
|
&& sudo apt update \
|
||||||
|
&& sudo apt install gh -y
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Inject env vars into environment
|
||||||
|
run: |
|
||||||
|
# Get secrets and mask them before adding to environment
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ -n "$line" ]]; then
|
||||||
|
value=$(echo "$line" | cut -d= -f2-)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$line" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
done < <(letta_secrets_helper --env dev --service ci)
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: uv sync --extra dev --extra postgres --extra external-tools --extra cloud-tool-sandbox --extra google
|
||||||
|
- name: Migrate database
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION vector'
|
||||||
|
uv run alembic upgrade head
|
||||||
|
|
||||||
|
- name: Run integration tests
|
||||||
|
# if any of the 1000+ test cases fail, pytest reports exit code 1 and won't procces/upload the results
|
||||||
|
continue-on-error: true
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
LETTA_SERVER_PASS: test_server_token
|
||||||
|
OPENAI_API_KEY: ${{ env.OPENAI_API_KEY }}
|
||||||
|
ANTHROPIC_API_KEY: ${{ env.ANTHROPIC_API_KEY }}
|
||||||
|
AZURE_API_KEY: ${{ env.AZURE_API_KEY }}
|
||||||
|
AZURE_BASE_URL: ${{ secrets.AZURE_BASE_URL }}
|
||||||
|
GEMINI_API_KEY: ${{ env.GEMINI_API_KEY }}
|
||||||
|
COMPOSIO_API_KEY: ${{ env.COMPOSIO_API_KEY }}
|
||||||
|
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT}}
|
||||||
|
GOOGLE_CLOUD_LOCATION: ${{ secrets.GOOGLE_CLOUD_LOCATION}}
|
||||||
|
DEEPSEEK_API_KEY: ${{ env.DEEPSEEK_API_KEY}}
|
||||||
|
LETTA_USE_EXPERIMENTAL: 1
|
||||||
|
run: |
|
||||||
|
uv run pytest \
|
||||||
|
-s -vv \
|
||||||
|
.github/scripts/model-sweep/model_sweep.py \
|
||||||
|
--json-report --json-report-file=.github/scripts/model-sweep/model_sweep_report.json --json-report-indent=4
|
||||||
|
|
||||||
|
- name: Convert report to markdown
|
||||||
|
continue-on-error: true
|
||||||
|
# file path args to generate_model_sweep_markdown.py are relative to the script
|
||||||
|
run: |
|
||||||
|
uv run python \
|
||||||
|
.github/scripts/model-sweep/generate_model_sweep_markdown.py \
|
||||||
|
.github/scripts/model-sweep/model_sweep_report.json \
|
||||||
|
.github/scripts/model-sweep/supported-models.mdx
|
||||||
|
echo "Model sweep report saved to .github/scripts/model-sweep/supported-models.mdx"
|
||||||
|
|
||||||
|
- id: date
|
||||||
|
run: echo "date=$(date +%Y-%m-%d)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: commit and open pull request
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
BRANCH_NAME=model-sweep/${{ inputs.branch-name || format('{0}', steps.date.outputs.date) }}
|
||||||
|
gh auth setup-git
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git checkout -b $BRANCH_NAME
|
||||||
|
git add .github/scripts/model-sweep/supported-models.mdx
|
||||||
|
git commit -m "Update model sweep report"
|
||||||
|
# only push if changes were made
|
||||||
|
if git diff main --quiet; then
|
||||||
|
echo "No changes detected, skipping push"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
git push origin $BRANCH_NAME
|
||||||
|
gh pr create \
|
||||||
|
--base main \
|
||||||
|
--head $BRANCH_NAME \
|
||||||
|
--title "chore: update model sweep report" \
|
||||||
|
--body "Automated PR to update model sweep report"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Upload model sweep report
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: model-sweep-report
|
||||||
|
path: .github/scripts/model-sweep/model_sweep_report.json
|
||||||
29
.github/workflows/notify-on-update.yaml
vendored
Normal file
29
.github/workflows/notify-on-update.yaml
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
name: Notify Submodule Repos
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Generate GitHub App Token
|
||||||
|
id: app-token
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
|
with:
|
||||||
|
app-id: ${{ secrets.NOTIFIER_APP_ID }}
|
||||||
|
private-key: ${{ secrets.NOTIFIER_PRIVATE_KEY }}
|
||||||
|
repositories: letta-cloud
|
||||||
|
|
||||||
|
- name: Repository Dispatch
|
||||||
|
uses: peter-evans/repository-dispatch@v3.0.0
|
||||||
|
with:
|
||||||
|
token: ${{ steps.app-token.outputs.token }}
|
||||||
|
repository: letta-ai/letta-cloud
|
||||||
|
event-type: letta-main-updated
|
||||||
|
client-payload: |
|
||||||
|
{
|
||||||
|
"commit_sha": "${{ github.sha }}",
|
||||||
|
"ref": "${{ github.ref }}"
|
||||||
|
}
|
||||||
65
.github/workflows/poetry-publish-nightly.yml
vendored
Normal file
65
.github/workflows/poetry-publish-nightly.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: uv-publish-nightly
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '35 10 * * *' # 10:35am UTC, 2:35am PST, 5:35am EST
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# nightly release check from https://stackoverflow.com/a/67527144
|
||||||
|
check-date:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
should_run: ${{ steps.should_run.outputs.should_run }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: print latest_commit
|
||||||
|
run: echo ${{ github.sha }}
|
||||||
|
- id: should_run
|
||||||
|
continue-on-error: true
|
||||||
|
name: check latest commit is less than a day
|
||||||
|
if: ${{ github.event_name == 'schedule' }}
|
||||||
|
run: test -z $(git rev-list --after="24 hours" ${{ github.sha }}) && echo "::set-output name=should_run::false"
|
||||||
|
|
||||||
|
build-and-publish-nightly:
|
||||||
|
name: Build and Publish to PyPI (nightly)
|
||||||
|
if: github.repository == 'letta-ai/letta' # TODO: if the repo org ever changes, this must be updated
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: check-date
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up python 3.12
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
activate-environment: true
|
||||||
|
|
||||||
|
- name: Set release version
|
||||||
|
run: |
|
||||||
|
# Extract the version number from pyproject.toml using awk
|
||||||
|
CURRENT_VERSION=$(awk -F '"' '/version =/ { print $2 }' pyproject.toml | head -n 1)
|
||||||
|
# Export the CURRENT_VERSION with the .dev and current date suffix
|
||||||
|
NIGHTLY_VERSION="${CURRENT_VERSION}.dev$(date +%Y%m%d%H%M%S)"
|
||||||
|
# Overwrite pyproject.toml with nightly config
|
||||||
|
sed -i "0,/version = \"${CURRENT_VERSION}\"/s//version = \"${NIGHTLY_VERSION}\"/" pyproject.toml
|
||||||
|
sed -i 's/name = "letta"/name = "letta-nightly"/g' pyproject.toml
|
||||||
|
sed -i "s/__version__ = '.*'/__version__ = '${NIGHTLY_VERSION}'/g" letta/__init__.py
|
||||||
|
cat pyproject.toml
|
||||||
|
cat letta/__init__.py
|
||||||
|
|
||||||
|
- name: Build the Python package
|
||||||
|
run: uv build
|
||||||
|
|
||||||
|
- name: Publish the package to PyPI
|
||||||
|
env:
|
||||||
|
UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
run: uv publish
|
||||||
35
.github/workflows/poetry-publish.yml
vendored
Normal file
35
.github/workflows/poetry-publish.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: uv-publish
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-publish:
|
||||||
|
name: Build and Publish to PyPI
|
||||||
|
if: github.repository == 'letta-ai/letta' # TODO: if the repo org ever changes, this must be updated
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out the repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up python 3.12
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
activate-environment: true
|
||||||
|
cache-dependency-glob: "uv.lock"
|
||||||
|
|
||||||
|
- name: Build the Python package
|
||||||
|
run: uv build
|
||||||
|
|
||||||
|
- name: Publish the package to PyPI
|
||||||
|
env:
|
||||||
|
UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
run: uv publish
|
||||||
477
.github/workflows/reusable-test-workflow.yml
vendored
Normal file
477
.github/workflows/reusable-test-workflow.yml
vendored
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
name: Reusable Test Workflow
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
test-type:
|
||||||
|
description: 'Type of tests to run (unit, integration, docker, send-message, sqlite)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
core-directory:
|
||||||
|
description: 'Working directory for commands. Uses . (root) by default.'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: '.'
|
||||||
|
install-args:
|
||||||
|
description: 'uv sync arguments'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
test-command:
|
||||||
|
description: 'Command to run tests'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: 'uv run --frozen pytest -svv'
|
||||||
|
test-path-prefix:
|
||||||
|
description: 'Prefix for test path (e.g., tests/)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: 'tests/'
|
||||||
|
timeout-minutes:
|
||||||
|
description: 'Timeout in minutes'
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 15
|
||||||
|
runner:
|
||||||
|
description: 'Runner to use'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: '["self-hosted", "small"]'
|
||||||
|
matrix-strategy:
|
||||||
|
description: 'JSON string for matrix strategy'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: '{}'
|
||||||
|
changed-files-pattern:
|
||||||
|
description: 'Pattern for changed files detection'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: |
|
||||||
|
**
|
||||||
|
.github/workflows/reusable-test-workflow.yml
|
||||||
|
skip-fern-generation:
|
||||||
|
description: 'Skip Fern SDK generation'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
use-docker:
|
||||||
|
description: 'Use Docker for tests'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
ref:
|
||||||
|
description: 'Git ref to wait for checks on'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ${{ github.sha }}
|
||||||
|
use-redis:
|
||||||
|
description: 'Use Redis for tests'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
is-external-pr:
|
||||||
|
description: 'Whether this is an external PR that needs protection'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
changed-files:
|
||||||
|
runs-on: ${{ fromJSON(inputs.runner) }}
|
||||||
|
name: changed-files
|
||||||
|
outputs:
|
||||||
|
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Get changed files
|
||||||
|
id: changed-files
|
||||||
|
uses: tj-actions/changed-files@v46.0.4
|
||||||
|
with:
|
||||||
|
files: ${{ inputs.changed-files-pattern }}
|
||||||
|
|
||||||
|
cache-check:
|
||||||
|
needs: [changed-files]
|
||||||
|
runs-on: ${{ fromJSON(inputs.runner) }}
|
||||||
|
name: Check cache key
|
||||||
|
outputs:
|
||||||
|
cache_key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
cache_hit: ${{ steps.cache.outputs.cache-hit }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
|
||||||
|
- name: Generate cache key
|
||||||
|
if: inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))
|
||||||
|
id: cache-key
|
||||||
|
run: |
|
||||||
|
echo "key=sdk-${{ github.ref_name }}-${{ hashFiles('fern/*', 'pyproject.toml') }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Restore SDK cache
|
||||||
|
# skip if "skip-fern-generation" is true or if the upstream workflow would've generated an sdk preview (changes to openapi files)
|
||||||
|
if: inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))
|
||||||
|
id: cache
|
||||||
|
uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
fern/.preview/fern-python-sdk/
|
||||||
|
key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
fail-on-cache-miss: false
|
||||||
|
|
||||||
|
block-until-sdk-preview-finishes:
|
||||||
|
needs: [changed-files, cache-check]
|
||||||
|
if: |
|
||||||
|
needs.cache-check.outputs.cache_hit != 'true'
|
||||||
|
timeout-minutes: ${{ inputs.timeout-minutes }}
|
||||||
|
runs-on: ${{ fromJSON(inputs.runner) }}
|
||||||
|
name: block-until-sdk-preview-finishes
|
||||||
|
steps:
|
||||||
|
- name: Debug ref information
|
||||||
|
run: |
|
||||||
|
echo "Input ref: ${{ inputs.ref }}"
|
||||||
|
echo "GitHub SHA: ${{ github.sha }}"
|
||||||
|
echo "GitHub ref: ${{ github.ref }}"
|
||||||
|
echo "PR head SHA: ${{ github.event.pull_request.head.sha }}"
|
||||||
|
echo "Event name: ${{ github.event_name }}"
|
||||||
|
|
||||||
|
- name: Wait for Preview SDK workflow
|
||||||
|
if: inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
echo "Waiting for 'preview-python-sdk' check to complete on ref: ${{ inputs.ref }}"
|
||||||
|
|
||||||
|
# Wait for the check to complete with timeout
|
||||||
|
timeout_seconds=1800
|
||||||
|
interval_seconds=60
|
||||||
|
elapsed=0
|
||||||
|
|
||||||
|
while [ $elapsed -lt $timeout_seconds ]; do
|
||||||
|
echo "Checking status... (elapsed: ${elapsed}s)"
|
||||||
|
|
||||||
|
# Get check runs using pr checks syntax with branch name or PR number
|
||||||
|
if [ "${{ github.event_name }}" = "pull_request" ]; then
|
||||||
|
pr_identifier="${{ github.event.pull_request.number }}"
|
||||||
|
else
|
||||||
|
pr_identifier="${{ github.ref_name }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
check_info=$(gh pr checks "$pr_identifier" -R ${{ github.repository }} --json name,state,startedAt \
|
||||||
|
| jq -r '.[] | select(.name == "preview-python-sdk") | [.startedAt, .state] | @tsv' | sort -r | head -1 | cut -f2)
|
||||||
|
|
||||||
|
if [ -n "$check_info" ]; then
|
||||||
|
echo "Check state: $check_info"
|
||||||
|
|
||||||
|
if [ "$check_info" = "SUCCESS" ] || [ "$check_info" = "SKIPPED" ]; then
|
||||||
|
echo "Check completed with state: $check_info"
|
||||||
|
exit 0
|
||||||
|
elif [ "$check_info" = "FAILURE" ] || [ "$check_info" = "CANCELLED" ]; then
|
||||||
|
echo "❌ Preview Python SDK build failed with state: $check_info"
|
||||||
|
echo "🚫 Blocking dependent test jobs to prevent extraneous failures"
|
||||||
|
echo "📋 To fix: Check the 'preview-python-sdk' job logs for build errors"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Check 'preview-python-sdk' not found yet"
|
||||||
|
fi
|
||||||
|
|
||||||
|
sleep $interval_seconds
|
||||||
|
elapsed=$((elapsed + interval_seconds))
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Timeout waiting for check to complete"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
test-run:
|
||||||
|
needs: [changed-files, block-until-sdk-preview-finishes]
|
||||||
|
if: |
|
||||||
|
always() &&
|
||||||
|
needs.changed-files.outputs.any_changed == 'true' &&
|
||||||
|
(needs.block-until-sdk-preview-finishes.result == 'success' ||
|
||||||
|
needs.block-until-sdk-preview-finishes.result == 'skipped')
|
||||||
|
|
||||||
|
runs-on: ${{ fromJSON(inputs.runner) }}
|
||||||
|
timeout-minutes: ${{ inputs.timeout-minutes }}
|
||||||
|
strategy: ${{ fromJSON(inputs.matrix-strategy) }}
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg17
|
||||||
|
ports:
|
||||||
|
# avoids conflict with docker postgres
|
||||||
|
- ${{ inputs.use-docker && '9999:5432' || '5432:5432' }}
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
redis:
|
||||||
|
image: ${{ inputs.use-redis && 'redis:8-alpine' || '' }}
|
||||||
|
options: >-
|
||||||
|
--health-cmd "redis-cli ping"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v6
|
||||||
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
|
- name: Set core directory
|
||||||
|
id: detect-core-dir
|
||||||
|
run: |
|
||||||
|
echo "dir=${{ inputs.core-directory }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "detected=manual" >> $GITHUB_OUTPUT
|
||||||
|
echo "Using core directory: $(cat $GITHUB_OUTPUT | grep '^dir=' | cut -d'=' -f2)"
|
||||||
|
|
||||||
|
- name: Generate cache key
|
||||||
|
if: inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))
|
||||||
|
id: cache-key
|
||||||
|
run: |
|
||||||
|
echo "key=sdk-${{ github.ref_name }}-${{ hashFiles('fern/*', 'pyproject.toml') }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Restore SDK cache
|
||||||
|
# skip if "skip-fern-generation" is true or if the upstream workflow would've generated an sdk preview (changes to openapi files)
|
||||||
|
if: inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))
|
||||||
|
id: restore-sdk-cache
|
||||||
|
uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
fern/.preview/fern-python-sdk/
|
||||||
|
key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
fail-on-cache-miss: false
|
||||||
|
|
||||||
|
- name: Check SDK cache availability
|
||||||
|
if: (inputs.skip-fern-generation != true || (!contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi.json') && !contains(needs.changed-files.outputs.all_changed_files, 'fern/openapi-overrides.yml'))) && steps.restore-sdk-cache.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "❌ Preview Python SDK cache expired or missing!"
|
||||||
|
echo "📦 Cache key: ${{ steps.cache-key.outputs.key }}"
|
||||||
|
echo "🔄 To fix: Re-run the 'preview-python-sdk' workflow job to regenerate the SDK"
|
||||||
|
echo "💡 This can happen when:"
|
||||||
|
echo " - The cache entry has expired"
|
||||||
|
echo " - Dependencies in fern/* or pyproject.toml have changed"
|
||||||
|
echo " - The preview-python-sdk job hasn't run successfully for this branch/commit"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Install dependencies with retry
|
||||||
|
shell: bash
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
uv sync --no-install-project ${{ inputs.install-args }}
|
||||||
|
|
||||||
|
- name: Install custom SDK
|
||||||
|
if: inputs.skip-fern-generation != true
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
echo "Fixing Fern SDK pyproject.toml for uv compatibility..."
|
||||||
|
SDK_PYPROJECT="fern/.preview/fern-python-sdk/pyproject.toml"
|
||||||
|
VERSION=$(grep -A 10 '^\[tool\.poetry\]' "$SDK_PYPROJECT" | grep '^version' | head -1 | cut -d'"' -f2)
|
||||||
|
head -n 2 < fern/.preview/fern-python-sdk/pyproject.toml > fern/.preview/fern-python-sdk/pyproject.toml.tmp
|
||||||
|
echo "version = \"$VERSION\"" >> fern/.preview/fern-python-sdk/pyproject.toml.tmp
|
||||||
|
tail -n +3 fern/.preview/fern-python-sdk/pyproject.toml >> fern/.preview/fern-python-sdk/pyproject.toml.tmp
|
||||||
|
mv fern/.preview/fern-python-sdk/pyproject.toml.tmp fern/.preview/fern-python-sdk/pyproject.toml
|
||||||
|
|
||||||
|
uv pip install -e fern/.preview/fern-python-sdk/.
|
||||||
|
- name: Migrate database
|
||||||
|
if: inputs.use-docker != true && inputs.test-type != 'sqlite'
|
||||||
|
working-directory: .
|
||||||
|
env:
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: postgres
|
||||||
|
LETTA_PG_PASSWORD: postgres
|
||||||
|
LETTA_PG_DB: postgres
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
run: |
|
||||||
|
psql -h localhost -U postgres -d postgres -c 'CREATE EXTENSION vector'
|
||||||
|
uv run alembic upgrade head
|
||||||
|
- name: Inject env vars into environment
|
||||||
|
if: inputs.is-external-pr != true
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
# Get secrets and mask them before adding to environment
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ -n "$line" ]]; then
|
||||||
|
value=$(echo "$line" | cut -d= -f2-)
|
||||||
|
echo "::add-mask::$value"
|
||||||
|
echo "$line" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
done < <(letta_secrets_helper --env dev --service ci)
|
||||||
|
|
||||||
|
- name: Docker setup for Docker tests
|
||||||
|
if: inputs.use-docker
|
||||||
|
run: |
|
||||||
|
mkdir -p /home/ci-runner/.letta/logs
|
||||||
|
sudo chown -R $USER:$USER /home/ci-runner/.letta/logs
|
||||||
|
chmod -R 755 /home/ci-runner/.letta/logs
|
||||||
|
|
||||||
|
- name: Build and run docker dev server
|
||||||
|
if: inputs.use-docker && inputs.is-external-pr != true
|
||||||
|
env:
|
||||||
|
LETTA_PG_DB: letta
|
||||||
|
LETTA_PG_USER: letta
|
||||||
|
LETTA_PG_PASSWORD: letta
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
OPENAI_API_KEY: ${{ env.OPENAI_API_KEY }}
|
||||||
|
run: |
|
||||||
|
cd libs/config-core-deploy
|
||||||
|
docker compose -f compose.yaml up --build -d
|
||||||
|
|
||||||
|
- name: Wait for Docker service
|
||||||
|
if: inputs.use-docker
|
||||||
|
working-directory: ${{ steps.detect-core-dir.outputs.dir }}
|
||||||
|
run: |
|
||||||
|
bash scripts/wait_for_service.sh localhost:8083 -- echo "Service is ready"
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
if: inputs.is-external-pr != true
|
||||||
|
working-directory: ${{ steps.detect-core-dir.outputs.dir }}
|
||||||
|
env:
|
||||||
|
# Database configuration (shared, but values depend on Docker usage)
|
||||||
|
LETTA_PG_PORT: 5432
|
||||||
|
LETTA_PG_USER: ${{ inputs.use-docker && 'letta' || 'postgres' }}
|
||||||
|
LETTA_PG_PASSWORD: ${{ inputs.use-docker && 'letta' || 'postgres' }}
|
||||||
|
LETTA_PG_DB: ${{ inputs.use-docker && 'letta' || 'postgres' }}
|
||||||
|
LETTA_PG_HOST: localhost
|
||||||
|
|
||||||
|
# Server configuration (conditional)
|
||||||
|
LETTA_SERVER_PASS: test_server_token
|
||||||
|
|
||||||
|
# LLM Provider API Keys (shared across all test types)
|
||||||
|
OPENAI_API_KEY: ${{ env.OPENAI_API_KEY }}
|
||||||
|
ANTHROPIC_API_KEY: ${{ env.ANTHROPIC_API_KEY }}
|
||||||
|
GEMINI_API_KEY: ${{ env.GEMINI_API_KEY }}
|
||||||
|
GROQ_API_KEY: ${{ env.GROQ_API_KEY }}
|
||||||
|
AZURE_API_KEY: ${{ env.AZURE_API_KEY }}
|
||||||
|
AZURE_BASE_URL: ${{ secrets.AZURE_BASE_URL }}
|
||||||
|
DEEPSEEK_API_KEY: ${{ env.DEEPSEEK_API_KEY }}
|
||||||
|
LETTA_MISTRAL_API_KEY: ${{ secrets.LETTA_MISTRAL_API_KEY }}
|
||||||
|
|
||||||
|
# External service API Keys (shared across all test types)
|
||||||
|
COMPOSIO_API_KEY: ${{ env.COMPOSIO_API_KEY }}
|
||||||
|
E2B_API_KEY: ${{ env.E2B_API_KEY }}
|
||||||
|
E2B_SANDBOX_TEMPLATE_ID: ${{ env.E2B_SANDBOX_TEMPLATE_ID }}
|
||||||
|
|
||||||
|
# Turbopuffer flags
|
||||||
|
LETTA_USE_TPUF: true
|
||||||
|
LETTA_TPUF_API_KEY: ${{ env.LETTA_TPUF_API_KEY }}
|
||||||
|
|
||||||
|
# Encryption key
|
||||||
|
LETTA_ENCRYPTION_KEY: ${{ env.LETTA_ENCRYPTION_KEY }}
|
||||||
|
|
||||||
|
# Google Cloud (shared across all test types)
|
||||||
|
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||||
|
GOOGLE_CLOUD_LOCATION: ${{ secrets.GOOGLE_CLOUD_LOCATION }}
|
||||||
|
|
||||||
|
# Feature flags (shared across all test types)
|
||||||
|
LETTA_ENABLE_BATCH_JOB_POLLING: true
|
||||||
|
|
||||||
|
# Gemini flags
|
||||||
|
GEMINI_FORCE_MINIMUM_THINKING_BUDGET: true
|
||||||
|
GEMINI_MAX_RETRIES: 10
|
||||||
|
|
||||||
|
# Pinecone flags
|
||||||
|
LETTA_PINECONE_API_KEY: ${{ secrets.LETTA_PINECONE_API_KEY }}
|
||||||
|
LETTA_ENABLE_PINECONE: true
|
||||||
|
|
||||||
|
EXA_API_KEY: ${{ env.EXA_API_KEY }}
|
||||||
|
|
||||||
|
# Docker-specific environment variables
|
||||||
|
PYTHONPATH: ${{ inputs.use-docker && format('{0}:{1}', github.workspace, env.PYTHONPATH) || '' }}
|
||||||
|
|
||||||
|
LETTA_REDIS_HOST: localhost
|
||||||
|
run: |
|
||||||
|
set -o xtrace
|
||||||
|
|
||||||
|
# Set LETTA_SERVER_URL only for Docker tests
|
||||||
|
if [[ "${{ inputs.use-docker }}" == "true" ]]; then
|
||||||
|
export LETTA_SERVER_URL="http://localhost:8083"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set LLM_CONFIG_FILE only for send-message tests
|
||||||
|
if [[ "${{ inputs.test-type }}" == "send-message" ]]; then
|
||||||
|
export LLM_CONFIG_FILE="${{ matrix.config_file }}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set Ollama base URL only for Ollama tests
|
||||||
|
if [[ "${{ inputs.test-type }}" == "integration" && "${{ inputs.runner }}" == *"ollama"* ]]; then
|
||||||
|
export LLM_CONFIG_FILE="ollama.json"
|
||||||
|
export OLLAMA_BASE_URL="http://localhost:11434"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set LMStudio base URL only for LMStudio tests
|
||||||
|
if [[ "${{ inputs.test-type }}" == "integration" && "${{ inputs.runner }}" == *"lmstudio"* ]]; then
|
||||||
|
export LLM_CONFIG_FILE="lmstudio.json"
|
||||||
|
export LMSTUDIO_BASE_URL="http://localhost:1234"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set VLLM base URL only for VLLM tests
|
||||||
|
if [[ "${{ inputs.test-type }}" == "integration" && "${{ inputs.runner }}" == *"vllm"* ]]; then
|
||||||
|
export LLM_CONFIG_FILE="vllm.json"
|
||||||
|
export VLLM_BASE_URL="http://localhost:8000"
|
||||||
|
fi
|
||||||
|
|
||||||
|
uv pip install pytest-github-actions-annotate-failures
|
||||||
|
|
||||||
|
# Handle different matrix variable names and test commands based on test type
|
||||||
|
if [[ "${{ inputs.test-type }}" == "integration" ]]; then
|
||||||
|
uv pip install letta
|
||||||
|
uv pip show letta
|
||||||
|
uv pip show letta-client
|
||||||
|
uv run --frozen pytest -svv ${{ inputs.test-path-prefix }}${{ matrix.test_suite }}
|
||||||
|
elif [[ "${{ inputs.test-type }}" == "unit" ]]; then
|
||||||
|
uv pip show letta-client
|
||||||
|
uv run --frozen pytest -svv ${{ inputs.test-path-prefix }}${{ matrix.test_suite }}
|
||||||
|
elif [[ "${{ inputs.test-type }}" == "send-message" ]]; then
|
||||||
|
uv run --frozen pytest -s -vv tests/integration_test_send_message.py --maxfail=1 --durations=10
|
||||||
|
elif [[ "${{ inputs.test-type }}" == "docker" ]]; then
|
||||||
|
uv run --frozen pytest -s tests/test_client.py
|
||||||
|
elif [[ "${{ inputs.test-type }}" == "sqlite" ]]; then
|
||||||
|
# force sqlite
|
||||||
|
unset LETTA_PG_USER
|
||||||
|
unset LETTA_PG_PASSWORD
|
||||||
|
unset LETTA_PG_DB
|
||||||
|
unset LETTA_PG_HOST
|
||||||
|
uv pip show letta-client
|
||||||
|
uv run alembic upgrade head
|
||||||
|
uv run --frozen pytest -svv ${{ inputs.test-path-prefix }}${{ matrix.test_suite }}
|
||||||
|
else
|
||||||
|
${{ inputs.test-command }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Remove sqlite db
|
||||||
|
if: ${{ always() && inputs.test-type == 'sqlite' }}
|
||||||
|
run: sudo rm -rf ~/.letta || true
|
||||||
|
|
||||||
|
- name: Print docker logs if tests fail
|
||||||
|
if: ${{ (failure() || cancelled()) && inputs.use-docker }}
|
||||||
|
working-directory: libs/config-core-deploy
|
||||||
|
run: |
|
||||||
|
echo "Printing Docker Logs..."
|
||||||
|
docker compose -f compose.yaml logs
|
||||||
|
|
||||||
|
- name: Stop docker
|
||||||
|
if: ${{ always() && inputs.use-docker }}
|
||||||
|
working-directory: libs/config-core-deploy
|
||||||
|
run: |
|
||||||
|
docker compose -f compose.yaml down --volumes
|
||||||
|
sudo rm -rf .persist
|
||||||
49
.github/workflows/send-message-integration-tests.yml
vendored
Normal file
49
.github/workflows/send-message-integration-tests.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: 🐍🧪 [Core] Send Message SDK Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request_target:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
send-message-tests:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: 'send-message'
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
changed-files-pattern: |
|
||||||
|
**
|
||||||
|
.github/workflows/reusable-test-workflow.yml
|
||||||
|
.github/workflows/send-message-integration-tests.yml
|
||||||
|
install-args: '--extra dev --extra postgres --extra external-tools --extra cloud-tool-sandbox --extra google --extra redis'
|
||||||
|
timeout-minutes: 15
|
||||||
|
runner: '["self-hosted", "medium"]'
|
||||||
|
ref: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||||
|
use-redis: true
|
||||||
|
# TODO: "azure-gpt-4o-mini.json" add back later, getting content violation
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"config_file": [
|
||||||
|
"openai-gpt-4o-mini.json",
|
||||||
|
"claude-4-sonnet-extended.json",
|
||||||
|
"claude-3-5-sonnet.json",
|
||||||
|
"claude-3-7-sonnet-extended.json",
|
||||||
|
"gemini-1.5-pro.json",
|
||||||
|
"gemini-2.5-pro.json",
|
||||||
|
"gemini-2.5-flash.json"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
48
.github/workflows/test-lmstudio.yml
vendored
Normal file
48
.github/workflows/test-lmstudio.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
name: Self-Hosted Provider Integration - LMStudio
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
# inputs:
|
||||||
|
# ref:
|
||||||
|
# description: 'Git ref to test'
|
||||||
|
# required: false
|
||||||
|
# type: string
|
||||||
|
# default: ${{ github.sha || github.ref || github.event.pull_request.head.sha }}
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-lmstudio.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-lmstudio.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-lmstudio:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: "integration"
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
install-args: "--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox --extra google"
|
||||||
|
test-command: "uv run pytest -svv tests/"
|
||||||
|
timeout-minutes: 60
|
||||||
|
runner: '["self-hosted", "gpu", "lmstudio"]'
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"test_suite": [
|
||||||
|
"integration_test_send_message.py"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
49
.github/workflows/test-ollama.yml
vendored
Normal file
49
.github/workflows/test-ollama.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: Self-Hosted Provider Integration - Ollama
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
# inputs:
|
||||||
|
# ref:
|
||||||
|
# description: 'Git ref to test'
|
||||||
|
# required: false
|
||||||
|
# type: string
|
||||||
|
# default: ${{ github.sha || github.ref || github.event.pull_request.head.sha }}
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-ollama.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-ollama.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-ollama:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: "integration"
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
install-args: "--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox --extra google"
|
||||||
|
test-command: "uv run --frozen pytest -svv tests/"
|
||||||
|
timeout-minutes: 60
|
||||||
|
runner: '["self-hosted", "gpu", "ollama"]'
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"test_suite": [
|
||||||
|
"test_providers.py::test_ollama",
|
||||||
|
"integration_test_send_message.py"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
23
.github/workflows/test-pip-install.yml
vendored
Normal file
23
.github/workflows/test-pip-install.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
name: Test Package Installation
|
||||||
|
|
||||||
|
on: [push, pull_request, workflow_dispatch]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-install:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.11", "3.12", "3.13"] # Adjust Python versions as needed
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install package with extras
|
||||||
|
run: pip install '.[external-tools,postgres,dev,server,ollama]' # Replace 'all' with the key that includes all extras
|
||||||
|
|
||||||
|
- name: Check package installation
|
||||||
|
run: pip list # Or any other command to verify successful installation
|
||||||
45
.github/workflows/test-vllm.yml
vendored
Normal file
45
.github/workflows/test-vllm.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
name: Self-Hosted Provider Integration - vLLM
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
# inputs:
|
||||||
|
# ref:
|
||||||
|
# description: 'Git ref to test'
|
||||||
|
# required: false
|
||||||
|
# type: string
|
||||||
|
# default: ${{ github.sha || github.ref || github.event.pull_request.head.sha }}
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-vllm.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
paths:
|
||||||
|
- '**'
|
||||||
|
- '.github/workflows/test-vllm.yml'
|
||||||
|
- '.github/workflows/reusable-test-workflow.yml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-vllm:
|
||||||
|
# Run on pull_request OR on pull_request_target only when labeled "safe to test"
|
||||||
|
if: github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request' || (github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe to test'))
|
||||||
|
uses: ./.github/workflows/reusable-test-workflow.yml
|
||||||
|
with:
|
||||||
|
test-type: "integration"
|
||||||
|
is-external-pr: ${{ github.event_name == 'pull_request_target' && !contains(github.event.pull_request.labels.*.name, 'safe to test') }}
|
||||||
|
install-args: "--extra postgres --extra external-tools --extra dev --extra cloud-tool-sandbox --extra google"
|
||||||
|
test-command: "uv run --frozen pytest -svv tests/"
|
||||||
|
timeout-minutes: 60
|
||||||
|
runner: '["self-hosted", "gpu", "vllm"]'
|
||||||
|
matrix-strategy: |
|
||||||
|
{
|
||||||
|
"fail-fast": false,
|
||||||
|
"matrix": {
|
||||||
|
"test_suite": [
|
||||||
|
"test_providers.py::test_vllm",
|
||||||
|
"integration_test_send_message.py"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
secrets: inherit
|
||||||
63
.github/workflows/warn_poetry_updates.yml
vendored
Normal file
63
.github/workflows/warn_poetry_updates.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
name: Check uv Dependencies Changes
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'uv.lock'
|
||||||
|
- 'pyproject.toml'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-uv-changes:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Check for uv.lock changes
|
||||||
|
id: check-uv-lock
|
||||||
|
run: |
|
||||||
|
if git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep -q "uv.lock"; then
|
||||||
|
echo "uv_lock_changed=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "uv_lock_changed=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check for pyproject.toml changes
|
||||||
|
id: check-pyproject
|
||||||
|
run: |
|
||||||
|
if git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep -q "pyproject.toml"; then
|
||||||
|
echo "pyproject_changed=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "pyproject_changed=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create PR comment
|
||||||
|
if: steps.check-uv-lock.outputs.uv_lock_changed == 'true' || steps.check-pyproject.outputs.pyproject_changed == 'true'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const uvLockChanged = ${{ steps.check-uv-lock.outputs.uv_lock_changed }};
|
||||||
|
const pyprojectChanged = ${{ steps.check-pyproject.outputs.pyproject_changed }};
|
||||||
|
|
||||||
|
let message = '📦 Dependencies Alert:\n\n';
|
||||||
|
|
||||||
|
if (uvLockChanged && pyprojectChanged) {
|
||||||
|
message += '- Both `uv.lock` and `pyproject.toml` have been modified\n';
|
||||||
|
} else if (uvLockChanged) {
|
||||||
|
message += '- `uv.lock` has been modified\n';
|
||||||
|
} else if (pyprojectChanged) {
|
||||||
|
message += '- `pyproject.toml` has been modified\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
message += '\nPlease review these changes carefully to ensure they are intended (cc @sarahwooders @cpacker).';
|
||||||
|
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: message
|
||||||
|
});
|
||||||
1006
.gitignore
vendored
Normal file
1006
.gitignore
vendored
Normal file
File diff suppressed because it is too large
Load Diff
25
.pre-commit-config.yaml
Normal file
25
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v2.3.0
|
||||||
|
hooks:
|
||||||
|
- id: check-yaml
|
||||||
|
exclude: 'docs/.*|tests/data/.*|configs/.*|helm/.*'
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
exclude: 'docs/.*|tests/data/.*|letta/server/static_files/.*|.*/.*\.(scss|css|html)'
|
||||||
|
- id: trailing-whitespace
|
||||||
|
exclude: 'docs/.*|tests/data/.*|letta/server/static_files/.*'
|
||||||
|
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: trufflehog
|
||||||
|
name: TruffleHog
|
||||||
|
entry: bash -c 'trufflehog git file://. --since-commit HEAD --results=verified,unknown --fail --no-update'
|
||||||
|
language: system
|
||||||
|
stages: ["pre-commit", "pre-push"]
|
||||||
|
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.12.11
|
||||||
|
hooks:
|
||||||
|
- id: ruff-check
|
||||||
|
args: [ --fix ]
|
||||||
|
- id: ruff-format
|
||||||
25
CITATION.cff
Normal file
25
CITATION.cff
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
cff-version: 1.2.0
|
||||||
|
message: "If you use this software, please cite it as below."
|
||||||
|
title: "Letta"
|
||||||
|
url: "https://github.com/letta-ai/letta"
|
||||||
|
preferred-citation:
|
||||||
|
type: article
|
||||||
|
authors:
|
||||||
|
- family-names: "Packer"
|
||||||
|
given-names: "Charles"
|
||||||
|
- family-names: "Wooders"
|
||||||
|
given-names: "Sarah"
|
||||||
|
- family-names: "Lin"
|
||||||
|
given-names: "Kevin"
|
||||||
|
- family-names: "Fang"
|
||||||
|
given-names: "Vivian"
|
||||||
|
- family-names: "Patil"
|
||||||
|
given-names: "Shishir G"
|
||||||
|
- family-names: "Stoica"
|
||||||
|
given-names: "Ion"
|
||||||
|
- family-names: "Gonzalez"
|
||||||
|
given-names: "Joseph E"
|
||||||
|
journal: "arXiv preprint arXiv:2310.08560"
|
||||||
|
month: 10
|
||||||
|
title: "MemGPT: Towards LLMs as Operating Systems"
|
||||||
|
year: 2023
|
||||||
160
CONTRIBUTING.md
Normal file
160
CONTRIBUTING.md
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
# 🚀 How to Contribute to Letta
|
||||||
|
|
||||||
|
Thank you for investing time in contributing to our project! Here's a guide to get you started.
|
||||||
|
|
||||||
|
## 1. 🚀 Getting Started
|
||||||
|
|
||||||
|
### 🍴 Fork the Repository
|
||||||
|
|
||||||
|
First things first, let's get you a personal copy of Letta to play with. Think of it as your very own playground. 🎪
|
||||||
|
|
||||||
|
1. Head over to the Letta repository on GitHub.
|
||||||
|
2. In the upper-right corner, hit the 'Fork' button.
|
||||||
|
|
||||||
|
### 🚀 Clone the Repository
|
||||||
|
|
||||||
|
Now, let's bring your new playground to your local machine.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
git clone https://github.com/your-username/letta.git
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🧩 Install dependencies & configure environment
|
||||||
|
|
||||||
|
#### Install uv and dependencies
|
||||||
|
|
||||||
|
First, install uv using [the official instructions here](https://docs.astral.sh/uv/getting-started/installation/).
|
||||||
|
|
||||||
|
Once uv is installed, navigate to the letta directory and install the Letta project with uv:
|
||||||
|
```shell
|
||||||
|
cd letta
|
||||||
|
eval $(uv env activate)
|
||||||
|
uv sync --all-extras
|
||||||
|
```
|
||||||
|
#### Setup PostgreSQL environment (optional)
|
||||||
|
|
||||||
|
If you are planning to develop letta connected to PostgreSQL database, you need to take the following actions.
|
||||||
|
If you are not planning to use PostgreSQL database, you can skip to the step which talks about [running letta](#running-letta-with-uv).
|
||||||
|
|
||||||
|
Assuming you have a running PostgreSQL instance, first you need to create the user, database and ensure the pgvector
|
||||||
|
extension is ready. Here are sample steps for a case where user and database name is letta and assumes no password is set:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
createuser letta
|
||||||
|
createdb letta --owner=letta
|
||||||
|
psql -d letta -c 'CREATE EXTENSION IF NOT EXISTS vector'
|
||||||
|
```
|
||||||
|
Setup the environment variable to tell letta code to contact PostgreSQL database:
|
||||||
|
```shell
|
||||||
|
export LETTA_PG_URI="postgresql://${POSTGRES_USER:-letta}:${POSTGRES_PASSWORD:-letta}@localhost:5432/${POSTGRES_DB:-letta}"
|
||||||
|
```
|
||||||
|
|
||||||
|
After this you need to prep the database with initial content. You can use alembic upgrade to populate the initial
|
||||||
|
contents from template test data.
|
||||||
|
```shell
|
||||||
|
uv run alembic upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Running letta with uv
|
||||||
|
|
||||||
|
Now when you want to use `letta`, you can use `uv run` to run any letta command:
|
||||||
|
```shell
|
||||||
|
uv run letta run
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Installing pre-commit
|
||||||
|
We recommend installing pre-commit to ensure proper formatting during development:
|
||||||
|
```
|
||||||
|
uv run pre-commit install
|
||||||
|
uv run pre-commit run --all-files
|
||||||
|
```
|
||||||
|
If you don't install pre-commit, you will need to run `uv run black .` before submitting a PR.
|
||||||
|
|
||||||
|
## 2. 🛠️ Making Changes
|
||||||
|
|
||||||
|
### 🌟 Create a Branch
|
||||||
|
|
||||||
|
Time to put on your creative hat and make some magic happen. First, let's create a new branch for your awesome changes. 🧙♂️
|
||||||
|
|
||||||
|
```shell
|
||||||
|
git checkout -b feature/your-feature
|
||||||
|
```
|
||||||
|
|
||||||
|
### ✏️ Make your Changes
|
||||||
|
|
||||||
|
Now, the world is your oyster! Go ahead and craft your fabulous changes. 🎨
|
||||||
|
|
||||||
|
|
||||||
|
#### Handling Database Migrations
|
||||||
|
If you are running Letta for the first time, your database will be automatically be setup. If you are updating Letta, you may need to run migrations. To run migrations, use the following command:
|
||||||
|
```shell
|
||||||
|
uv run alembic upgrade head
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Creating a new Database Migration
|
||||||
|
If you have made changes to the database models, you will need to create a new migration. To create a new migration, use the following command:
|
||||||
|
```shell
|
||||||
|
uv run alembic revision --autogenerate -m "Your migration message here"
|
||||||
|
```
|
||||||
|
|
||||||
|
Visit the [Alembic documentation](https://alembic.sqlalchemy.org/en/latest/tutorial.html) for more information on creating and running migrations.
|
||||||
|
|
||||||
|
## 3. ✅ Testing
|
||||||
|
|
||||||
|
Before we hit the 'Wow, I'm Done' button, let's make sure everything works as expected. Run tests and make sure the existing ones don't throw a fit. And if needed, create new tests. 🕵️
|
||||||
|
|
||||||
|
### Run existing tests
|
||||||
|
|
||||||
|
Running tests:
|
||||||
|
```
|
||||||
|
uv run pytest -s tests
|
||||||
|
```
|
||||||
|
|
||||||
|
Running tests if you installed via pip:
|
||||||
|
```
|
||||||
|
pytest -s tests
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating new tests
|
||||||
|
If you added a major feature change, please add new tests in the `tests/` directory.
|
||||||
|
|
||||||
|
## 4. 🧩 Adding new dependencies
|
||||||
|
If you need to add a new dependency to Letta, please add the package via `uv add <PACKAGE_NAME>`. This will update the `pyproject.toml` and `uv.lock` files. If the dependency does not need to be installed by all users, make sure to mark the dependency as optional in the `pyproject.toml` file and if needed, create a new extra under `[project.optional-dependencies]`.
|
||||||
|
|
||||||
|
## 5. 🚀 Submitting Changes
|
||||||
|
|
||||||
|
### Check Formatting
|
||||||
|
Please ensure your code is formatted correctly by running:
|
||||||
|
```
|
||||||
|
uv run black . -l 140
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🚀 Create a Pull Request
|
||||||
|
|
||||||
|
You're almost there! It's time to share your brilliance with the world. 🌍
|
||||||
|
|
||||||
|
1. Visit [Letta](https://github.com/letta-ai/letta).
|
||||||
|
2. Click "New Pull Request" button.
|
||||||
|
3. Choose the base branch (`main`) and the compare branch (your feature branch).
|
||||||
|
4. Whip up a catchy title and describe your changes in the description. 🪄
|
||||||
|
|
||||||
|
## 6. 🔍 Review and Approval
|
||||||
|
|
||||||
|
The maintainers will take a look and might suggest some cool upgrades or ask for more details. Once they give the thumbs up, your creation becomes part of Letta!
|
||||||
|
|
||||||
|
## 7. 📜 Code of Conduct
|
||||||
|
|
||||||
|
Please be sure to follow the project's Code of Conduct.
|
||||||
|
|
||||||
|
## 8. 📫 Contact
|
||||||
|
|
||||||
|
Need help or just want to say hi? We're here for you. Reach out through filing an issue on this GitHub repository or message us on our [Discord server](https://discord.gg/9GEQrxmVyE).
|
||||||
|
|
||||||
|
Thanks for making Letta even more fantastic!
|
||||||
|
|
||||||
|
## WIP - 🐋 Docker Development
|
||||||
|
If you prefer to keep your resources isolated by developing purely in containers, you can start Letta in development with:
|
||||||
|
```shell
|
||||||
|
docker compose -f compose.yaml -f development.compose.yml up
|
||||||
|
```
|
||||||
|
This will volume mount your local codebase and reload the server on file changes.
|
||||||
89
Dockerfile
Normal file
89
Dockerfile
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
# Start with pgvector base for builder
|
||||||
|
FROM ankane/pgvector:v0.5.1 AS builder
|
||||||
|
|
||||||
|
# Install Python and required packages
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python3 \
|
||||||
|
python3-venv \
|
||||||
|
python3-full \
|
||||||
|
build-essential \
|
||||||
|
libpq-dev \
|
||||||
|
python3-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ARG LETTA_ENVIRONMENT=DEV
|
||||||
|
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT} \
|
||||||
|
UV_NO_PROGRESS=1 \
|
||||||
|
UV_PYTHON_PREFERENCE=system \
|
||||||
|
UV_CACHE_DIR=/tmp/uv_cache
|
||||||
|
|
||||||
|
# Set for other builds
|
||||||
|
ARG LETTA_VERSION
|
||||||
|
ENV LETTA_VERSION=${LETTA_VERSION}
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create and activate virtual environment
|
||||||
|
RUN python3 -m venv /opt/venv
|
||||||
|
ENV PATH="/opt/venv/bin:$PATH"
|
||||||
|
|
||||||
|
# Now install uv and uvx in the virtual environment
|
||||||
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /usr/local/bin/
|
||||||
|
|
||||||
|
|
||||||
|
# Copy dependency files first
|
||||||
|
COPY pyproject.toml uv.lock ./
|
||||||
|
# Then copy the rest of the application code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN uv sync --frozen --no-dev --all-extras --python 3.11
|
||||||
|
|
||||||
|
# Runtime stage
|
||||||
|
FROM ankane/pgvector:v0.5.1 AS runtime
|
||||||
|
|
||||||
|
# Overridable Node.js version with --build-arg NODE_VERSION
|
||||||
|
ARG NODE_VERSION=22
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
# Install curl, Python, and PostgreSQL client libraries
|
||||||
|
apt-get install -y curl python3 python3-venv libpq-dev && \
|
||||||
|
# Install Node.js
|
||||||
|
curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash - && \
|
||||||
|
apt-get install -y nodejs && \
|
||||||
|
# Install OpenTelemetry Collector
|
||||||
|
curl -L https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.96.0/otelcol-contrib_0.96.0_linux_amd64.tar.gz -o /tmp/otel-collector.tar.gz && \
|
||||||
|
tar xzf /tmp/otel-collector.tar.gz -C /usr/local/bin && \
|
||||||
|
rm /tmp/otel-collector.tar.gz && \
|
||||||
|
mkdir -p /etc/otel && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Add OpenTelemetry Collector configs
|
||||||
|
COPY otel/otel-collector-config-file.yaml /etc/otel/config-file.yaml
|
||||||
|
COPY otel/otel-collector-config-clickhouse.yaml /etc/otel/config-clickhouse.yaml
|
||||||
|
COPY otel/otel-collector-config-signoz.yaml /etc/otel/config-signoz.yaml
|
||||||
|
|
||||||
|
ARG LETTA_ENVIRONMENT=DEV
|
||||||
|
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT} \
|
||||||
|
VIRTUAL_ENV="/app/.venv" \
|
||||||
|
PATH="/app/.venv/bin:$PATH" \
|
||||||
|
POSTGRES_USER=letta \
|
||||||
|
POSTGRES_PASSWORD=letta \
|
||||||
|
POSTGRES_DB=letta \
|
||||||
|
COMPOSIO_DISABLE_VERSION_CHECK=true
|
||||||
|
|
||||||
|
ARG LETTA_VERSION
|
||||||
|
ENV LETTA_VERSION=${LETTA_VERSION}
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy virtual environment and app from builder
|
||||||
|
COPY --from=builder /app .
|
||||||
|
|
||||||
|
# Copy initialization SQL if it exists
|
||||||
|
COPY init.sql /docker-entrypoint-initdb.d/
|
||||||
|
|
||||||
|
EXPOSE 8283 5432 4317 4318
|
||||||
|
|
||||||
|
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||||
|
CMD ["./letta/server/startup.sh"]
|
||||||
190
LICENSE
Normal file
190
LICENSE
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
Copyright 2023, Letta authors
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
206
PRIVACY.md
Normal file
206
PRIVACY.md
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
Privacy Policy
|
||||||
|
==============
|
||||||
|
|
||||||
|
Your privacy is critically important to us. As an overview:
|
||||||
|
|
||||||
|
- When you use Letta applications/services/websites, we collect basic (anonymous) telemetry data such as clicks, crashes, etc.
|
||||||
|
- This data helps us understand how our users are using the Letta application(s) and it informs our roadmap of future features and buxfixes.
|
||||||
|
- If you would like to opt-out of basic telemetry, you can modify your configuration file to include `telemetry_disabled = True`.
|
||||||
|
- When you use Letta hosted services (such as the hosted endpoints or Discord Bot), we collect the data that was used to render these services.
|
||||||
|
- For example, for the hosted endpoint, this includes the message request and message response.
|
||||||
|
- We may use this data to improve our services, for example to train new models in the future.
|
||||||
|
- We do NOT collect data on any of your messages or prompts unless you are using our hosted services (for example, if you are running your own model backends, this data will never be collected).
|
||||||
|
|
||||||
|
Below is our full Privacy Policy, which expands the overview in full detail.
|
||||||
|
|
||||||
|
### What This Policy Covers
|
||||||
|
|
||||||
|
This Privacy Policy applies to information that we collect about you when you use:
|
||||||
|
|
||||||
|
- Our websites (including letta.ai, the Letta Discord server, and the repository github.com/cpacker/Letta);
|
||||||
|
- Our applications (including the Python package, Discord Bot, and any other hosted services);
|
||||||
|
- Our other Letta products, services, and features that are available on or through our websites;
|
||||||
|
|
||||||
|
Throughout this Privacy Policy we'll refer to our websites, mobile applications, and other products and services collectively as "Services."
|
||||||
|
|
||||||
|
Below we explain how we collect, use, and share information about you, along with the choices that you have with respect to that information.
|
||||||
|
|
||||||
|
### Information We Collect
|
||||||
|
|
||||||
|
We only collect information about you if we have a reason to do so — for example, to provide our Services, to communicate with you, or to make our Services better.
|
||||||
|
|
||||||
|
We collect this information from three sources: if and when you provide information to us, automatically through operating our Services, and from outside sources. Let's go over the information that we collect.
|
||||||
|
|
||||||
|
#### *Information You Provide to Us*
|
||||||
|
|
||||||
|
It's probably no surprise that we collect information that you provide to us directly. Here are some examples:
|
||||||
|
|
||||||
|
- **Basic account information:** We ask for basic information from you in order to set up your account.
|
||||||
|
- **Public profile information:** If you have an account with us, we collect the information that you provide for your public profile.
|
||||||
|
- **Credentials: **Depending on the Services you use, you may provide us with credentials for your self-hosted website (like SSH, FTP, and SFTP username and password).
|
||||||
|
- **Communications with us (hi there!):** You may also provide us with information when you post on GitHub, Discord, or message us through separate channels.
|
||||||
|
|
||||||
|
#### *Information We Collect Automatically*
|
||||||
|
|
||||||
|
We also collect some information automatically:
|
||||||
|
|
||||||
|
- **Log information:** We collect information that web browsers, mobile devices, and servers typically make available, including the browser type, IP address, unique device identifiers, language preference, referring site, the date and time of access, operating system, and mobile network information. We collect log information when you use our Services.
|
||||||
|
- **Usage information:** We collect information about your usage of our Services. We use this information to, for example, provide our Services to you, get insights on how people use our Services so we can make our Services better, and understand and make predictions about user retention.
|
||||||
|
- **Location information:** We may determine the location of your device from your IP address. We collect and use this information to, for example, calculate how many people visit our Services from certain geographic regions.
|
||||||
|
- **Stored information:** We may access information stored on your devices if you upload this information to our Services.
|
||||||
|
- **Information from cookies & other technologies:** A cookie is a string of information that a website stores on a visitor's computer, and that the visitor's browser provides to the website each time the visitor returns. Pixel tags (also called web beacons) are small blocks of code placed on websites and emails. We may use cookies and other technologies like pixel tags to help us identify and track visitors, usage, and access preferences for our Services.
|
||||||
|
|
||||||
|
#### *Information We Collect from Other Sources*
|
||||||
|
|
||||||
|
We may also get information about you from other sources. For example:
|
||||||
|
|
||||||
|
- **Third Party Login:** If you create or log in to our Services through another service (like Google) we'll receive associated login information (e.g. a connection token, your username, your email address)
|
||||||
|
|
||||||
|
The information we receive depends on which services you use or authorize and what options are available.
|
||||||
|
|
||||||
|
Third-party services may also give us information, like mailing addresses for individuals who are not yet our users (but we hope will be!). We use this information for marketing purposes like postcards and other mailers advertising our Services.
|
||||||
|
|
||||||
|
### How and Why We Use Information
|
||||||
|
|
||||||
|
#### *Purposes for Using Information*
|
||||||
|
|
||||||
|
We use information about you for the purposes listed below:
|
||||||
|
|
||||||
|
- **To provide our Services.** For example, to run a model on our hosted services to deliver a message to your client.
|
||||||
|
- **To ensure quality, maintain safety, and improve our Services.** For example, by providing automatic upgrades and new versions of our Services. Or, for example, by monitoring and analyzing how users interact with our Services so we can create new features that we think our users will enjoy and that will help them create and manage websites more efficiently or make our Services easier to use.
|
||||||
|
- **To protect our Services, our users, and the public.** For example, by detecting security incidents; detecting and protecting against malicious, deceptive, fraudulent, or illegal activity; fighting spam; complying with our legal obligations; and protecting the rights and property of Letta and others, which may result in us, for example, declining a transaction or terminating Services.
|
||||||
|
- **To fix problems with our Services.** For example, by monitoring, debugging, repairing, and preventing issues.
|
||||||
|
- **To customize the user experience.** For example, to personalize your experience by serving you relevant notifications for our Services.
|
||||||
|
|
||||||
|
#### *Legal Bases for Collecting and Using Information*
|
||||||
|
|
||||||
|
A note here for those in the European Union about our legal grounds for processing information about you under EU data protection laws, which is that our use of your information is based on the grounds that:
|
||||||
|
|
||||||
|
(1) The use is necessary in order to fulfill our commitments to you under the applicable terms of service or other agreements with you or is necessary to administer your account — for example, in order to enable access to our website on your device or charge you for a paid plan; or
|
||||||
|
|
||||||
|
(2) The use is necessary for compliance with a legal obligation; or
|
||||||
|
|
||||||
|
(3) The use is necessary in order to protect your vital interests or those of another person; or
|
||||||
|
|
||||||
|
(4) We have a legitimate interest in using your information — for example, to provide and update our Services; to improve our Services so that we can offer you an even better user experience; to safeguard our Services; to communicate with you; to measure, gauge, and improve the effectiveness of our advertising; and to understand our user retention and attrition; to monitor and prevent any problems with our Services; and to personalize your experience; or
|
||||||
|
|
||||||
|
(5) You have given us your consent
|
||||||
|
|
||||||
|
### Sharing Information
|
||||||
|
|
||||||
|
#### *How We Share Information*
|
||||||
|
|
||||||
|
We share information about you in limited circumstances, and with appropriate safeguards on your privacy.
|
||||||
|
|
||||||
|
- **Subsidiaries, independent contractors, and research partners:** We may disclose information about you to our subsidiaries, independent contractors, and/or research partners who need the information to help us provide our Services or process the information on our behalf. We require our subsidiaries and independent contractors to follow this Privacy Policy for any personal information that we share with them. This includes the transfer of data collect on our Services to facilitate model training and refinement.
|
||||||
|
- **Third-party vendors:** We may share information about you with third-party vendors who need the information in order to provide their services to us, or to provide their services to you or your site. This includes vendors that help us provide our Services to you (such as intrastructure or model serving companies); those that help us understand and enhance our Services (like analytics providers); those that make tools to help us run our operations (like programs that help us with task management, scheduling, word processing, email and other communications, and collaboration among our teams); other third-party tools that help us manage operations; and companies that make products available on our websites, who may need information about you in order to, for example, provide technical or other support services to you.
|
||||||
|
- **Legal and regulatory requirements:** We may disclose information about you in response to a subpoena, court order, or other governmental request.
|
||||||
|
- **To protect rights, property, and others:** We may disclose information about you when we believe in good faith that disclosure is reasonably necessary to protect the property or rights of Letta, third parties, or the public at large.
|
||||||
|
- **Asset/IP transfers:** If any transfer of Letta assets were to happen, this Privacy Policy would continue to apply to your information and the party receiving your information may continue to use your information, but only consistent with this Privacy Policy.
|
||||||
|
- **With your consent:** We may share and disclose information with your consent or at your direction.
|
||||||
|
- **Aggregated or de-identified information:** We may share information that has been aggregated or de-identified, so that it can no longer reasonably be used to identify you. For instance, we may publish aggregate statistics about the use of our Services, or share a hashed version of your email address to facilitate customized ad campaigns on other platforms.
|
||||||
|
- **Published support requests:** If you send us a request for assistance (for example, via a support email or one of our other feedback mechanisms), we reserve the right to publish that request in order to clarify or respond to your request, or to help us support other users.
|
||||||
|
|
||||||
|
#### *Information Shared Publicly*
|
||||||
|
|
||||||
|
Information that you choose to make public is — you guessed it — disclosed publicly.
|
||||||
|
|
||||||
|
That means information like your public profile, posts, other content that you make public on your website, and your "Likes" and comments on other websites are all available to others — and we hope they get a lot of views!
|
||||||
|
|
||||||
|
For example, the photo that you upload to your public profile, or a default image if you haven't uploaded one, is your **G**lobally **R**ecognized Avatar, or Gravatar — get it? :) Your Gravatar, along with other public profile information, displays alongside the comments and "Likes" that you make on other users' websites while logged in to your WordPress.com account. Your Gravatar and public profile information may also display with your comments, "Likes," and other interactions on websites that use our Gravatar service, if the email address associated with your account is the same email address you use on the other website.
|
||||||
|
|
||||||
|
Please keep all of this in mind when deciding what you would like to share publicly.
|
||||||
|
|
||||||
|
### How Long We Keep Information
|
||||||
|
|
||||||
|
We generally discard information about you when it's no longer needed for the purposes for which we collect and use it — described in the section above on How and Why We Use Information — and we're not legally required to keep it.
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
While no online service is 100% secure, we work very hard to protect information about you against unauthorized access, use, alteration, or destruction, and take reasonable measures to do so. We monitor our Services for potential vulnerabilities and attacks. To enhance the security of your account, we encourage you to enable our advanced security settings when available.
|
||||||
|
|
||||||
|
### Choices
|
||||||
|
|
||||||
|
You have several choices available when it comes to information about you:
|
||||||
|
|
||||||
|
- **Opt out of telemetry:** You can opt our of basic telemetry by modifying your configuration file.
|
||||||
|
- **Limit use of hosted services:** We only retain information on model inputs/outputs when you use our hosted services.
|
||||||
|
|
||||||
|
### Your Rights
|
||||||
|
|
||||||
|
If you are located in certain parts of the world, including some US states and countries that fall under the scope of the European General Data Protection Regulation (aka the "GDPR"), you may have certain rights regarding your personal information, like the right to request access to or deletion of your data.
|
||||||
|
|
||||||
|
#### *European General Data Protection Regulation (GDPR)*
|
||||||
|
|
||||||
|
If you are located in a country that falls under the scope of the GDPR, data protection laws give you certain rights with respect to your personal data, subject to any exemptions provided by the law, including the rights to:
|
||||||
|
|
||||||
|
- Request access to your personal data;
|
||||||
|
- Request correction or deletion of your personal data;
|
||||||
|
- Object to our use and processing of your personal data;
|
||||||
|
- Request that we limit our use and processing of your personal data; and
|
||||||
|
- Request portability of your personal data.
|
||||||
|
|
||||||
|
You also have the right to make a complaint to a government supervisory authority.
|
||||||
|
|
||||||
|
#### *US Privacy Laws*
|
||||||
|
|
||||||
|
Laws in some US states, including California, Colorado, Connecticut, Utah, and Virginia, require us to provide residents with additional information about the categories of personal information we collect and share, where we get that personal information, and how and why we use it. You'll find that information in this section (if you are a California resident, please note that this is the Notice at Collection we are required to provide you under California law).
|
||||||
|
|
||||||
|
In the last 12 months, we collected the following categories of personal information, depending on the Services used:
|
||||||
|
|
||||||
|
- Identifiers (like your name, contact information, and device and online identifiers);
|
||||||
|
- Characteristics protected by law (for example, you might provide your gender as part of a research survey for us or you may choose to voluntarily disclose your race or veteran status);
|
||||||
|
- Internet or other electronic network activity information (such as your usage of our Services);
|
||||||
|
- Application and user data (such as model data and user inputs used to render our Services)
|
||||||
|
- Geolocation data (such as your location based on your IP address);
|
||||||
|
- Audio, electronic, visual or similar information (such as your profile picture, if you uploaded one);
|
||||||
|
- Inferences we make (such as likelihood of retention or attrition).
|
||||||
|
|
||||||
|
We collect personal information for the purposes described in the "How and Why We Use Information section". And we share this information with the categories of third parties described in the "Sharing Information section". We retain this information for the length of time described in our "How Long We Keep Information section".
|
||||||
|
|
||||||
|
In some US states you have additional rights subject to any exemptions provided by your state's respective law, including the right to:
|
||||||
|
|
||||||
|
- Request a copy of the specific pieces of information we collect about you and, if you're in California, to know the categories of personal information we collect, the categories of business or commercial purpose for collecting and using it, the categories of sources from which the information came, and the categories of third parties we share it with;
|
||||||
|
- Request deletion of personal information we collect or maintain;
|
||||||
|
- Request correction of personal information we collect or maintain;
|
||||||
|
- Opt out of the sale or sharing of personal information;
|
||||||
|
- Receive a copy of your information in a readily portable format; and
|
||||||
|
- Not receive discriminatory treatment for exercising your rights.
|
||||||
|
|
||||||
|
***Right to Opt Out***
|
||||||
|
|
||||||
|
Our procedures to opt-out of data collection to our Services is the "Choices" section. We do not collect or process your sensitive (and potentially sensitive) personal information except where it is strictly necessary to provide you with our service or improve our services in the future, where the processing is not for the purpose of inferring characteristics about you, or for other purposes that do not require an option to limit under California law. We don't knowingly sell or share personal information of those under 16.
|
||||||
|
|
||||||
|
#### *Contacting Us About These Rights*
|
||||||
|
|
||||||
|
If you'd like to contact us about one of the other rights, scroll down to "How to Reach Us" to, well, find out how to reach us. When you contact us about one of your rights under this section, we'll need to verify that you are the right person before we disclose or delete anything. For example, if you are a user, we will need you to contact us from the email address associated with your account. You can also designate an authorized agent to make a request on your behalf by giving us written authorization. We may still require you to verify your identity with us.
|
||||||
|
|
||||||
|
#### ***Appeals Process for Rights Requests Denials***
|
||||||
|
|
||||||
|
In some circumstances we may deny your request to exercise one of these rights. For example, if we cannot verify that you are the account owner we may deny your request to access the personal information associated with your account. As another example, if we are legally required to maintain a copy of your personal information we may deny your request to delete your personal information.
|
||||||
|
|
||||||
|
In the event that we deny your request, we will communicate this fact to you in writing. You may appeal our decision by responding in writing to our denial email and stating that you would like to appeal. All appeals will be reviewed by an internal expert who was not involved in your original request. In the event that your appeal is also denied this information will be communicated to you in writing. Please note that the appeal process does not apply to job applicants.
|
||||||
|
|
||||||
|
If your appeal is denied, in some US states (Colorado, Connecticut, and Virginia) you may refer the denied appeal to the state attorney general if you believe the denial is in conflict with your legal rights. The process for how to do this will be communicated to you in writing at the same time we send you our decision about your appeal.
|
||||||
|
|
||||||
|
### How to Reach Us
|
||||||
|
|
||||||
|
If you have a question about this Privacy Policy, please contact us through our via [email](mailto:contact@charlespacker.com).
|
||||||
|
|
||||||
|
### Other Things You Should Know (Keep Reading!)
|
||||||
|
|
||||||
|
#### *Ads and Analytics Services Provided by Others*
|
||||||
|
|
||||||
|
Ads appearing on any of our Services may be delivered by advertising networks. Othjjgger parties may also provide analytics services via our Services. These ad networks and analytics providers may set tracking technologies (like cookies) to collect information about your use of our Services and across other websites and online services. These technologies allow these third parties to recognize your device to compile information about you or others who use your device. This information allows us and other companies to, among other things, analyze and track usage, determine the popularity of certain content, and deliver ads that may be more targeted to your interests. Please note this Privacy Policy only covers the collection of information by Letta and does not cover the collection of information by any third-party advertisers or analytics providers.
|
||||||
|
|
||||||
|
#### *Third-Party Software and Services*
|
||||||
|
|
||||||
|
If you'd like to use third-party software or services (such as forks of our code), please keep in mind that interacting with them may mean providing information about yourself (or your site visitors) to those third parties. For example, some third-party services may request or require access to your (yours, your visitors', or customers') data via a pixel or cookie. Please note that if you use the third-party service or grant access, your data will be handled in accordance with the third party's privacy policy and practices. We don't own or control these third parties, and they have their own rules about information collection, use, and sharing, which you should review before using the software or services.
|
||||||
|
|
||||||
|
### Privacy Policy Changes
|
||||||
|
|
||||||
|
Although most changes are likely to be minor, we may change its Privacy Policy from time to time. We encourage visitors to frequently check this page for any changes to its Privacy Policy. If we make changes, we will notify you by revising the policy in the public repository (change log is publically viewable). Your further use of the Services after a change to our Privacy Policy will be subject to the updated policy.
|
||||||
|
|
||||||
|
### Creative Commons Sharealike License
|
||||||
|
|
||||||
|
This privacy policy is derived from the [Automattic Privacy Policy](https://github.com/Automattic/legalmattic) distributed under a Creative Commons Sharealike license. Thank you Automattic!
|
||||||
537
README.md
Normal file
537
README.md
Normal file
@@ -0,0 +1,537 @@
|
|||||||
|
<p align="center">
|
||||||
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/letta-ai/letta/refs/heads/main/assets/Letta-logo-RGB_GreyonTransparent_cropped_small.png">
|
||||||
|
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/letta-ai/letta/refs/heads/main/assets/Letta-logo-RGB_OffBlackonTransparent_cropped_small.png">
|
||||||
|
<img alt="Letta logo" src="https://raw.githubusercontent.com/letta-ai/letta/refs/heads/main/assets/Letta-logo-RGB_GreyonOffBlack_cropped_small.png" width="500">
|
||||||
|
</picture>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Letta (formerly MemGPT)
|
||||||
|
|
||||||
|
Letta is the platform for building stateful agents: open AI with advanced memory that can learn and self-improve over time.
|
||||||
|
|
||||||
|
### Quicklinks:
|
||||||
|
* [**Developer Documentation**](https://docs.letta.com): Learn how create agents that learn using Python / TypeScript
|
||||||
|
* [**Agent Development Environment (ADE)**](https://docs.letta.com/guides/ade/overview): A no-code UI for building stateful agents
|
||||||
|
* [**Letta Desktop**](https://docs.letta.com/guides/ade/desktop): A fully-local version of the ADE, available on MacOS and Windows
|
||||||
|
* [**Letta Cloud**](https://app.letta.com/): The fastest way to try Letta, with agents running in the cloud
|
||||||
|
|
||||||
|
|
||||||
|
## Get started
|
||||||
|
|
||||||
|
### [One-Shot ✨ Vibecoding ⚡️ Prompts](https://github.com/letta-ai/letta/blob/main/fern/pages/getting-started/prompts.mdx)
|
||||||
|
|
||||||
|
Or install the Letta SDK (available for both Python and TypeScript):
|
||||||
|
|
||||||
|
### [Python SDK](https://github.com/letta-ai/letta-python)
|
||||||
|
```sh
|
||||||
|
pip install letta-client
|
||||||
|
```
|
||||||
|
|
||||||
|
### [TypeScript / Node.js SDK](https://github.com/letta-ai/letta-node)
|
||||||
|
```sh
|
||||||
|
npm install @letta-ai/letta-client
|
||||||
|
```
|
||||||
|
|
||||||
|
## Simple Hello World example
|
||||||
|
|
||||||
|
In the example below, we'll create a stateful agent with two memory blocks, one for itself (the `persona` block), and one for the human. We'll initialize the `human` memory block with incorrect information, and correct agent in our first message - which will trigger the agent to update its own memory with a tool call.
|
||||||
|
|
||||||
|
*To run the examples, you'll need to get a `LETTA_API_KEY` from [Letta Cloud](https://app.letta.com/api-keys), or run your own self-hosted server (see [our guide](https://docs.letta.com/guides/selfhosting))*
|
||||||
|
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
from letta_client import Letta
|
||||||
|
|
||||||
|
client = Letta(token="LETTA_API_KEY")
|
||||||
|
# client = Letta(base_url="http://localhost:8283") # if self-hosting, set your base_url
|
||||||
|
|
||||||
|
agent_state = client.agents.create(
|
||||||
|
model="openai/gpt-4.1",
|
||||||
|
embedding="openai/text-embedding-3-small",
|
||||||
|
memory_blocks=[
|
||||||
|
{
|
||||||
|
"label": "human",
|
||||||
|
"value": "The human's name is Chad. They like vibe coding."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "persona",
|
||||||
|
"value": "My name is Sam, a helpful assistant."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
tools=["web_search", "run_code"]
|
||||||
|
)
|
||||||
|
|
||||||
|
print(agent_state.id)
|
||||||
|
# agent-d9be...0846
|
||||||
|
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Hey, nice to meet you, my name is Brad."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# the agent will think, then edit its memory using a tool
|
||||||
|
for message in response.messages:
|
||||||
|
print(message)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
import { LettaClient } from '@letta-ai/letta-client'
|
||||||
|
|
||||||
|
const client = new LettaClient({ token: "LETTA_API_KEY" });
|
||||||
|
// const client = new LettaClient({ baseUrl: "http://localhost:8283" }); // if self-hosting, set your baseUrl
|
||||||
|
|
||||||
|
const agentState = await client.agents.create({
|
||||||
|
model: "openai/gpt-4.1",
|
||||||
|
embedding: "openai/text-embedding-3-small",
|
||||||
|
memoryBlocks: [
|
||||||
|
{
|
||||||
|
label: "human",
|
||||||
|
value: "The human's name is Chad. They like vibe coding."
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: "persona",
|
||||||
|
value: "My name is Sam, a helpful assistant."
|
||||||
|
}
|
||||||
|
],
|
||||||
|
tools: ["web_search", "run_code"]
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(agentState.id);
|
||||||
|
// agent-d9be...0846
|
||||||
|
|
||||||
|
const response = await client.agents.messages.create(
|
||||||
|
agentState.id, {
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "Hey, nice to meet you, my name is Brad."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// the agent will think, then edit its memory using a tool
|
||||||
|
for (const message of response.messages) {
|
||||||
|
console.log(message);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Core concepts in Letta:
|
||||||
|
|
||||||
|
Letta is made by the creators of [MemGPT](https://arxiv.org/abs/2310.08560), a research paper that introduced the concept of the "LLM Operating System" for memory management. The core concepts in Letta for designing stateful agents follow the MemGPT LLM OS principles:
|
||||||
|
|
||||||
|
1. [**Memory Hierarchy**](https://docs.letta.com/guides/agents/memory): Agents have self-editing memory that is split between in-context memory and out-of-context memory
|
||||||
|
2. [**Memory Blocks**](https://docs.letta.com/guides/agents/memory-blocks): The agent's in-context memory is composed of persistent editable **memory blocks**
|
||||||
|
3. [**Agentic Context Engineering**](https://docs.letta.com/guides/agents/context-engineering): Agents control the context window by using tools to edit, delete, or search for memory
|
||||||
|
4. [**Perpetual Self-Improving Agents**](https://docs.letta.com/guides/agents/overview): Every "agent" is a single entity that has a perpetual (infinite) message history
|
||||||
|
|
||||||
|
## Multi-agent shared memory ([full guide](https://docs.letta.com/guides/agents/multi-agent-shared-memory))
|
||||||
|
|
||||||
|
A single memory block can be attached to multiple agents, allowing to extremely powerful multi-agent shared memory setups.
|
||||||
|
For example, you can create two agents that have their own independent memory blocks in addition to a shared memory block.
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
# create a shared memory block
|
||||||
|
shared_block = client.blocks.create(
|
||||||
|
label="organization",
|
||||||
|
description="Shared information between all agents within the organization.",
|
||||||
|
value="Nothing here yet, we should update this over time."
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a supervisor agent
|
||||||
|
supervisor_agent = client.agents.create(
|
||||||
|
model="anthropic/claude-3-5-sonnet-20241022",
|
||||||
|
embedding="openai/text-embedding-3-small",
|
||||||
|
# blocks created for this agent
|
||||||
|
memory_blocks=[{"label": "persona", "value": "I am a supervisor"}],
|
||||||
|
# pre-existing shared block that is "attached" to this agent
|
||||||
|
block_ids=[shared_block.id],
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a worker agent
|
||||||
|
worker_agent = client.agents.create(
|
||||||
|
model="openai/gpt-4.1-mini",
|
||||||
|
embedding="openai/text-embedding-3-small",
|
||||||
|
# blocks created for this agent
|
||||||
|
memory_blocks=[{"label": "persona", "value": "I am a worker"}],
|
||||||
|
# pre-existing shared block that is "attached" to this agent
|
||||||
|
block_ids=[shared_block.id],
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
// create a shared memory block
|
||||||
|
const sharedBlock = await client.blocks.create({
|
||||||
|
label: "organization",
|
||||||
|
description: "Shared information between all agents within the organization.",
|
||||||
|
value: "Nothing here yet, we should update this over time."
|
||||||
|
});
|
||||||
|
|
||||||
|
// create a supervisor agent
|
||||||
|
const supervisorAgent = await client.agents.create({
|
||||||
|
model: "anthropic/claude-3-5-sonnet-20241022",
|
||||||
|
embedding: "openai/text-embedding-3-small",
|
||||||
|
// blocks created for this agent
|
||||||
|
memoryBlocks: [{ label: "persona", value: "I am a supervisor" }],
|
||||||
|
// pre-existing shared block that is "attached" to this agent
|
||||||
|
blockIds: [sharedBlock.id]
|
||||||
|
});
|
||||||
|
|
||||||
|
// create a worker agent
|
||||||
|
const workerAgent = await client.agents.create({
|
||||||
|
model: "openai/gpt-4.1-mini",
|
||||||
|
embedding: "openai/text-embedding-3-small",
|
||||||
|
// blocks created for this agent
|
||||||
|
memoryBlocks: [{ label: "persona", value: "I am a worker" }],
|
||||||
|
// pre-existing shared block that is "attached" to this agent
|
||||||
|
blockIds: [sharedBlock.id]
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Sleep-time agents ([full guide](https://docs.letta.com/guides/agents/architectures/sleeptime))
|
||||||
|
|
||||||
|
In Letta, you can create special **sleep-time agents** that share the memory of your primary agents, but run in the background (like an agent's "subconcious"). You can think of sleep-time agents as a special form of multi-agent architecture.
|
||||||
|
|
||||||
|
To enable sleep-time agents for your agent, set the `enable_sleeptime` flag to true when creating your agent. This will automatically create a sleep-time agent in addition to your main agent which will handle the memory editing, instead of your primary agent.
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
agent_state = client.agents.create(
|
||||||
|
...
|
||||||
|
enable_sleeptime=True, # <- enable this flag to create a sleep-time agent
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
const agentState = await client.agents.create({
|
||||||
|
...
|
||||||
|
enableSleeptime: true // <- enable this flag to create a sleep-time agent
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Saving and sharing agents with Agent File (`.af`) ([full guide](https://docs.letta.com/guides/agents/agent-file))
|
||||||
|
|
||||||
|
In Letta, all agent data is persisted to disk (Postgres or SQLite), and can be easily imported and exported using the open source [Agent File](https://github.com/letta-ai/agent-file) (`.af`) file format. You can use Agent File to checkpoint your agents, as well as move your agents (and their complete state/memories) between different Letta servers, e.g. between self-hosted Letta and Letta Cloud.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>View code snippets</summary>
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
# Import your .af file from any location
|
||||||
|
agent_state = client.agents.import_agent_serialized(file=open("/path/to/agent/file.af", "rb"))
|
||||||
|
|
||||||
|
print(f"Imported agent: {agent.id}")
|
||||||
|
|
||||||
|
# Export your agent into a serialized schema object (which you can write to a file)
|
||||||
|
schema = client.agents.export_agent_serialized(agent_id="<AGENT_ID>")
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
import { readFileSync } from 'fs';
|
||||||
|
import { Blob } from 'buffer';
|
||||||
|
|
||||||
|
// Import your .af file from any location
|
||||||
|
const file = new Blob([readFileSync('/path/to/agent/file.af')])
|
||||||
|
const agentState = await client.agents.importAgentSerialized(file, {})
|
||||||
|
|
||||||
|
console.log(`Imported agent: ${agentState.id}`);
|
||||||
|
|
||||||
|
// Export your agent into a serialized schema object (which you can write to a file)
|
||||||
|
const schema = await client.agents.exportAgentSerialized("<AGENT_ID>");
|
||||||
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Model Context Protocol (MCP) and custom tools ([full guide](https://docs.letta.com/guides/mcp/overview))
|
||||||
|
|
||||||
|
Letta has rich support for MCP tools (Letta acts as an MCP client), as well as custom Python tools.
|
||||||
|
MCP servers can be easily added within the Agent Development Environment (ADE) tool manager UI, as well as via the SDK:
|
||||||
|
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>View code snippets</summary>
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
# List tools from an MCP server
|
||||||
|
tools = client.tools.list_mcp_tools_by_server(mcp_server_name="weather-server")
|
||||||
|
|
||||||
|
# Add a specific tool from the MCP server
|
||||||
|
tool = client.tools.add_mcp_tool(
|
||||||
|
mcp_server_name="weather-server",
|
||||||
|
mcp_tool_name="get_weather"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create agent with MCP tool attached
|
||||||
|
agent_state = client.agents.create(
|
||||||
|
model="openai/gpt-4o-mini",
|
||||||
|
embedding="openai/text-embedding-3-small",
|
||||||
|
tool_ids=[tool.id]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Or attach tools to an existing agent
|
||||||
|
client.agents.tool.attach(
|
||||||
|
agent_id=agent_state.id
|
||||||
|
tool_id=tool.id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the agent with MCP tools
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Use the weather tool to check the forecast"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
// List tools from an MCP server
|
||||||
|
const tools = await client.tools.listMcpToolsByServer("weather-server");
|
||||||
|
|
||||||
|
// Add a specific tool from the MCP server
|
||||||
|
const tool = await client.tools.addMcpTool("weather-server", "get_weather");
|
||||||
|
|
||||||
|
// Create agent with MCP tool
|
||||||
|
const agentState = await client.agents.create({
|
||||||
|
model: "openai/gpt-4o-mini",
|
||||||
|
embedding: "openai/text-embedding-3-small",
|
||||||
|
toolIds: [tool.id]
|
||||||
|
});
|
||||||
|
|
||||||
|
// Use the agent with MCP tools
|
||||||
|
const response = await client.agents.messages.create(agentState.id, {
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "Use the weather tool to check the forecast"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Filesystem ([full guide](https://docs.letta.com/guides/agents/filesystem))
|
||||||
|
|
||||||
|
Letta’s filesystem allow you to easily connect your agents to external files, for example: research papers, reports, medical records, or any other data in common text formats (`.pdf`, `.txt`, `.md`, `.json`, etc).
|
||||||
|
Once you attach a folder to an agent, the agent will be able to use filesystem tools (`open_file`, `grep_file`, `search_file`) to browse the files to search for information.
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>View code snippets</summary>
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
# get an available embedding_config
|
||||||
|
embedding_configs = client.embedding_models.list()
|
||||||
|
embedding_config = embedding_configs[0]
|
||||||
|
|
||||||
|
# create the folder
|
||||||
|
folder = client.folders.create(
|
||||||
|
name="my_folder",
|
||||||
|
embedding_config=embedding_config
|
||||||
|
)
|
||||||
|
|
||||||
|
# upload a file into the folder
|
||||||
|
job = client.folders.files.upload(
|
||||||
|
folder_id=folder.id,
|
||||||
|
file=open("my_file.txt", "rb")
|
||||||
|
)
|
||||||
|
|
||||||
|
# wait until the job is completed
|
||||||
|
while True:
|
||||||
|
job = client.jobs.retrieve(job.id)
|
||||||
|
if job.status == "completed":
|
||||||
|
break
|
||||||
|
elif job.status == "failed":
|
||||||
|
raise ValueError(f"Job failed: {job.metadata}")
|
||||||
|
print(f"Job status: {job.status}")
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# once you attach a folder to an agent, the agent can see all files in it
|
||||||
|
client.agents.folders.attach(agent_id=agent.id, folder_id=folder.id)
|
||||||
|
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "What data is inside of my_file.txt?"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
for message in response.messages:
|
||||||
|
print(message)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
// get an available embedding_config
|
||||||
|
const embeddingConfigs = await client.embeddingModels.list()
|
||||||
|
const embeddingConfig = embeddingConfigs[0];
|
||||||
|
|
||||||
|
// create the folder
|
||||||
|
const folder = await client.folders.create({
|
||||||
|
name: "my_folder",
|
||||||
|
embeddingConfig: embeddingConfig
|
||||||
|
});
|
||||||
|
|
||||||
|
// upload a file into the folder
|
||||||
|
const uploadJob = await client.folders.files.upload(
|
||||||
|
createReadStream("my_file.txt"),
|
||||||
|
folder.id,
|
||||||
|
);
|
||||||
|
console.log("file uploaded")
|
||||||
|
|
||||||
|
// wait until the job is completed
|
||||||
|
while (true) {
|
||||||
|
const job = await client.jobs.retrieve(uploadJob.id);
|
||||||
|
if (job.status === "completed") {
|
||||||
|
break;
|
||||||
|
} else if (job.status === "failed") {
|
||||||
|
throw new Error(`Job failed: ${job.metadata}`);
|
||||||
|
}
|
||||||
|
console.log(`Job status: ${job.status}`);
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
|
||||||
|
// list files in the folder
|
||||||
|
const files = await client.folders.files.list(folder.id);
|
||||||
|
console.log(`Files in folder: ${files}`);
|
||||||
|
|
||||||
|
// list passages in the folder
|
||||||
|
const passages = await client.folders.passages.list(folder.id);
|
||||||
|
console.log(`Passages in folder: ${passages}`);
|
||||||
|
|
||||||
|
// once you attach a folder to an agent, the agent can see all files in it
|
||||||
|
await client.agents.folders.attach(agent.id, folder.id);
|
||||||
|
|
||||||
|
const response = await client.agents.messages.create(
|
||||||
|
agentState.id, {
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "What data is inside of my_file.txt?"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
for (const message of response.messages) {
|
||||||
|
console.log(message);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Long-running agents ([full guide](https://docs.letta.com/guides/agents/long-running))
|
||||||
|
|
||||||
|
When agents need to execute multiple tool calls or perform complex operations (like deep research, data analysis, or multi-step workflows), processing time can vary significantly. Letta supports both a background mode (with resumable streaming) as well as an async mode (with polling) to enable robust long-running agent executions.
|
||||||
|
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>View code snippets</summary>
|
||||||
|
|
||||||
|
### Python
|
||||||
|
```python
|
||||||
|
stream = client.agents.messages.create_stream(
|
||||||
|
agent_id=agent_state.id,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Run comprehensive analysis on this dataset"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
stream_tokens=True,
|
||||||
|
background=True,
|
||||||
|
)
|
||||||
|
run_id = None
|
||||||
|
last_seq_id = None
|
||||||
|
for chunk in stream:
|
||||||
|
if hasattr(chunk, "run_id") and hasattr(chunk, "seq_id"):
|
||||||
|
run_id = chunk.run_id # Save this to reconnect if your connection drops
|
||||||
|
last_seq_id = chunk.seq_id # Save this as your resumption point for cursor-based pagination
|
||||||
|
print(chunk)
|
||||||
|
|
||||||
|
# If disconnected, resume from last received seq_id:
|
||||||
|
for chunk in client.runs.stream(run_id, starting_after=last_seq_id):
|
||||||
|
print(chunk)
|
||||||
|
```
|
||||||
|
|
||||||
|
### TypeScript / Node.js
|
||||||
|
```typescript
|
||||||
|
const stream = await client.agents.messages.createStream({
|
||||||
|
agentId: agentState.id,
|
||||||
|
requestBody: {
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: "Run comprehensive analysis on this dataset"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
streamTokens: true,
|
||||||
|
background: true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let runId = null;
|
||||||
|
let lastSeqId = null;
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
if (chunk.run_id && chunk.seq_id) {
|
||||||
|
runId = chunk.run_id; // Save this to reconnect if your connection drops
|
||||||
|
lastSeqId = chunk.seq_id; // Save this as your resumption point for cursor-based pagination
|
||||||
|
}
|
||||||
|
console.log(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If disconnected, resume from last received seq_id
|
||||||
|
for await (const chunk of client.runs.stream(runId, {startingAfter: lastSeqId})) {
|
||||||
|
console.log(chunk);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Using local models
|
||||||
|
|
||||||
|
Letta is model agnostic and supports using local model providers such as [Ollama](https://docs.letta.com/guides/server/providers/ollama) and [LM Studio](https://docs.letta.com/guides/server/providers/lmstudio). You can also easily swap models inside an agent after the agent has been created, by modifying the agent state with the new model provider via the SDK or in the ADE.
|
||||||
|
|
||||||
|
## Development (only needed if you need to modify the server code)
|
||||||
|
|
||||||
|
*Note: this repostory contains the source code for the core Letta service (API server), not the client SDKs. The client SDKs can be found here: [Python](https://github.com/letta-ai/letta-python), [TypeScript](https://github.com/letta-ai/letta-node).*
|
||||||
|
|
||||||
|
To install the Letta server from source, fork the repo, clone your fork, then use [uv](https://docs.astral.sh/uv/getting-started/installation/) to install from inside the main directory:
|
||||||
|
```sh
|
||||||
|
cd letta
|
||||||
|
uv sync --all-extras
|
||||||
|
```
|
||||||
|
|
||||||
|
To run the Letta server from source, use `uv run`:
|
||||||
|
```sh
|
||||||
|
uv run letta server
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Letta is an open source project built by over a hundred contributors. There are many ways to get involved in the Letta OSS project!
|
||||||
|
|
||||||
|
* [**Join the Discord**](https://discord.gg/letta): Chat with the Letta devs and other AI developers.
|
||||||
|
* [**Chat on our forum**](https://forum.letta.com/): If you're not into Discord, check out our developer forum.
|
||||||
|
* **Follow our socials**: [Twitter/X](https://twitter.com/Letta_AI), [LinkedIn](https://www.linkedin.com/in/letta), [YouTube](https://www.youtube.com/@letta-ai)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
***Legal notices**: By using Letta and related Letta services (such as the Letta endpoint or hosted service), you are agreeing to our [privacy policy](https://www.letta.com/privacy-policy) and [terms of service](https://www.letta.com/terms-of-service).*
|
||||||
42
TERMS.md
Normal file
42
TERMS.md
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
Terms of Service
|
||||||
|
================
|
||||||
|
|
||||||
|
**Binding Agreement**. This is a binding contract ("Terms") between you and the developers of Letta and associated services ("we," "us," "our," "Letta developers", "Letta"). These Terms apply whenever you use any of the sites, apps, products, or services ("Services") we offer, in existence now to created in the future. Further, we may automatically upgrade our Services, and these Terms will apply to such upgrades. By accessing or using the Services, you agree to be bound by these Terms. If you use our services on behalf of an organization, you agree to these terms on behalf of that organization. If you do not agree to these Terms, you may not use the Services.
|
||||||
|
|
||||||
|
**Privacy**. See our Privacy Policy for details on how we collect, store, and share user information.
|
||||||
|
|
||||||
|
**Age Restrictions**. The Services are not intended for users who are under the age of 13. In order to create an account for the Services, you must be 13 years of age or older. By registering, you represent and warrant that you are 13 years of age or older. If children between the ages of 13 and 18 wish to use the Services, they must be registered by their parent or guardian.
|
||||||
|
|
||||||
|
**Your Content and Permissions**. Content may be uploaded to, shared with, or generated by Letta -- files, videos, links, music, documents, code, and text ("Your Content"). Your Content is yours. Letta does not claim any right, title, or interest in Your Content.
|
||||||
|
|
||||||
|
You grant us a non-exclusive, worldwide, royalty free license to do the things we need to do to provide the Services, including but not limited to storing, displaying, reproducing, and distributing Your Content. This license extends to trusted third parties we work with.
|
||||||
|
|
||||||
|
**Content Guidelines**. You are fully responsible for Your Content. You may not copy, upload, download, or share Your Content unless you have the appropriate rights to do so. It is your responsibility to ensure that Your Content abides by applicable laws, these Terms, and with our user guidelines. We don't actively review Your Content.
|
||||||
|
|
||||||
|
**Account Security**. You are responsible for safeguarding your password to the Services, making sure that others don't have access to it, and keeping your account information current. You must immediately notify the Letta developers of any unauthorized uses of your account or any other breaches of security. Letta will not be liable for your acts or omissions, including any damages of any kind incurred as a result of your acts or omissions.
|
||||||
|
|
||||||
|
**Changes to these Terms**. We are constantly updating our Services, and that means sometimes we have to change the legal terms under which our Services are offered. If we make changes that are material, we will let you know, for example by posting on one of our blogs, or by sending you an email or other communication before the changes take effect. The notice will designate a reasonable period of time after which the new Terms will take effect. If you disagree with our changes, then you should stop using Letta within the designated notice period. Your continued use of Letta will be subject to the new Terms. However, any dispute that arose before the changes shall be governed by the Terms (including the binding individual arbitration clause) that were in place when the dispute arose.
|
||||||
|
|
||||||
|
You can access archived versions of our policies at our repository.
|
||||||
|
|
||||||
|
**DMCA Policy**. We respond to notices of alleged copyright infringement in accordance with the Digital Millennium Copyright Act ("DMCA"). If you believe that the content of a Letta account infringes your copyrights, you can notify us using the published email in our privacy policy.
|
||||||
|
|
||||||
|
**Our Intellectual Property**: The Services and all materials contained therein, including, without limitation, Letta logo, and all designs, text, graphics, pictures, information, data, software, sound files, other files, and the selection and arrangement thereof (collectively, the "Letta Materials") are the property of Letta or its licensors or users and are protected by U.S. and international intellectual property laws. You are granted a personal, limited, non-sublicensable, non-exclusive, revocable license to access and use Letta Materials in accordance with these Terms for the sole purpose of enabling you to use and enjoy the Services.
|
||||||
|
|
||||||
|
Other trademarks, service marks, graphics and logos used in connection with the Services may be the trademarks of other third parties. Your use of the Services grants you no right or license to reproduce or otherwise use any Letta, Letta, or third-party trademarks.
|
||||||
|
|
||||||
|
**Termination**. You are free to stop using the Services at any time. We also reserve the right to suspend or end the Services at any time at our discretion and without notice. For example, we may suspend or terminate your use of the Services if you fail to comply with these Terms, or use the Services in a manner that would cause us legal liability, disrupt the Services, or disrupt others' use of the Services.
|
||||||
|
|
||||||
|
**Disclaimer of Warranties**. Letta makes no warranties of any kind with respect to Letta or your use of the Services.
|
||||||
|
|
||||||
|
**Limitation of Liability**. Letta shall not have any liability for any indirect, incidental, consequential, special, exemplary, or damages under any theory of liability arising out of, or relating to, these Terms or your use of Letta. As a condition of access to Letta, you understand and agree that Letta's liability shall not exceed $4.20.
|
||||||
|
|
||||||
|
**Indemnification**. You agree to indemnify and hold harmless Letta, its developers, its contributors, its contractors, and its licensors, and their respective directors, officers, employees, and agents from and against any and all losses, liabilities, demands, damages, costs, claims, and expenses, including attorneys’ fees, arising out of or related to your use of our Services, including but not limited to your violation of the Agreement or any agreement with a provider of third-party services used in connection with the Services or applicable law, Content that you post, and any ecommerce activities conducted through your or another user’s website.
|
||||||
|
|
||||||
|
**Exceptions to Agreement to Arbitrate**. Claims for injunctive or equitable relief or claims regarding intellectual property rights may be brought in any competent court without the posting of a bond.
|
||||||
|
|
||||||
|
**No Class Actions**. You may resolve disputes with us only on an individual basis; you may not bring a claim as a plaintiff or a class member in a class, consolidated, or representative action. **Class arbitrations, class actions, private attorney general actions, and consolidation with other arbitrations are not permitted.**
|
||||||
|
|
||||||
|
**Governing Law**. You agree that these Terms, and your use of Letta, are governed by California law, in the United States of America, without regard to its principles of conflicts of law.
|
||||||
|
|
||||||
|
**Creative Commons Sharealike License**. This document is derived from the [Automattic legalmattic repository](https://github.com/Automattic/legalmattic) distributed under a Creative Commons Sharealike license. Thank you Automattic!
|
||||||
116
alembic.ini
Normal file
116
alembic.ini
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
88
alembic/env.py
Normal file
88
alembic/env.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import os
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from letta.config import LettaConfig
|
||||||
|
from letta.orm import Base
|
||||||
|
from letta.settings import DatabaseChoice, settings
|
||||||
|
|
||||||
|
letta_config = LettaConfig.load()
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
if settings.database_engine is DatabaseChoice.POSTGRES:
|
||||||
|
config.set_main_option("sqlalchemy.url", settings.letta_pg_uri)
|
||||||
|
print("Using database: ", settings.letta_pg_uri)
|
||||||
|
else:
|
||||||
|
config.set_main_option("sqlalchemy.url", "sqlite:///" + os.path.join(letta_config.recall_storage_path, "sqlite.db"))
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata, include_schemas=True)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""Add batch_item_id to messages
|
||||||
|
|
||||||
|
Revision ID: 0335b1eb9c40
|
||||||
|
Revises: 373dabcba6cf
|
||||||
|
Create Date: 2025-05-02 10:30:08.156190
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "0335b1eb9c40"
|
||||||
|
down_revision: Union[str, None] = "373dabcba6cf"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("messages", sa.Column("batch_item_id", sa.String(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("messages", "batch_item_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
"""add metrics to agent loop runs
|
||||||
|
|
||||||
|
Revision ID: 05c3bc564286
|
||||||
|
Revises: d007f4ca66bf
|
||||||
|
Create Date: 2025-08-06 14:30:48.255538
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "05c3bc564286"
|
||||||
|
down_revision: Union[str, None] = "d007f4ca66bf"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("jobs", sa.Column("ttft_ns", sa.BigInteger(), nullable=True))
|
||||||
|
op.add_column("jobs", sa.Column("total_duration_ns", sa.BigInteger(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("jobs", "total_duration_ns")
|
||||||
|
op.drop_column("jobs", "ttft_ns")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""Add vector_db_provider to archives table
|
||||||
|
|
||||||
|
Revision ID: 068588268b02
|
||||||
|
Revises: d5103ee17ed5
|
||||||
|
Create Date: 2025-08-27 13:16:29.428231
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "068588268b02"
|
||||||
|
down_revision: Union[str, None] = "887a4367b560"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
if settings.letta_pg_uri_no_default:
|
||||||
|
# PostgreSQL - use enum type
|
||||||
|
vectordbprovider = sa.Enum("NATIVE", "TPUF", name="vectordbprovider")
|
||||||
|
vectordbprovider.create(op.get_bind(), checkfirst=True)
|
||||||
|
|
||||||
|
# Add column as nullable first
|
||||||
|
op.add_column("archives", sa.Column("vector_db_provider", vectordbprovider, nullable=True))
|
||||||
|
|
||||||
|
# Backfill existing rows with NATIVE
|
||||||
|
op.execute("UPDATE archives SET vector_db_provider = 'NATIVE' WHERE vector_db_provider IS NULL")
|
||||||
|
|
||||||
|
# Make column non-nullable
|
||||||
|
op.alter_column("archives", "vector_db_provider", nullable=False)
|
||||||
|
else:
|
||||||
|
# SQLite - use string type
|
||||||
|
# Add column as nullable first
|
||||||
|
op.add_column("archives", sa.Column("vector_db_provider", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Backfill existing rows with NATIVE
|
||||||
|
op.execute("UPDATE archives SET vector_db_provider = 'NATIVE' WHERE vector_db_provider IS NULL")
|
||||||
|
|
||||||
|
# For SQLite, we need to recreate the table to make column non-nullable
|
||||||
|
# This is a limitation of SQLite ALTER TABLE
|
||||||
|
# For simplicity, we'll leave it nullable in SQLite
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("archives", "vector_db_provider")
|
||||||
|
|
||||||
|
if settings.letta_pg_uri_no_default:
|
||||||
|
# Drop enum type for PostgreSQL
|
||||||
|
vectordbprovider = sa.Enum("NATIVE", "TPUF", name="vectordbprovider")
|
||||||
|
vectordbprovider.drop(op.get_bind(), checkfirst=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
"""support for project_id for blocks and groups
|
||||||
|
|
||||||
|
Revision ID: 06fbbf65d4f1
|
||||||
|
Revises: f55542f37641
|
||||||
|
Create Date: 2025-07-21 15:07:32.133538
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "06fbbf65d4f1"
|
||||||
|
down_revision: Union[str, None] = "f55542f37641"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("block", sa.Column("project_id", sa.String(), nullable=True))
|
||||||
|
op.add_column("groups", sa.Column("project_id", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# NOTE: running the backfill on alembic will result in locking with running application.
|
||||||
|
# This is okay if okay with downtime. Options also to do rolling migration or dynamic updates.
|
||||||
|
|
||||||
|
# Backfill project_id for blocks table
|
||||||
|
# Since all agents for a block have the same project_id, we can just grab the first one
|
||||||
|
# op.execute(
|
||||||
|
# text(
|
||||||
|
# """
|
||||||
|
# UPDATE block
|
||||||
|
# SET project_id = (
|
||||||
|
# SELECT a.project_id
|
||||||
|
# FROM blocks_agents ba
|
||||||
|
# JOIN agents a ON ba.agent_id = a.id
|
||||||
|
# WHERE ba.block_id = block.id
|
||||||
|
# AND a.project_id IS NOT NULL
|
||||||
|
# LIMIT 1
|
||||||
|
# )
|
||||||
|
# """
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
|
||||||
|
# Backfill project_id for groups table
|
||||||
|
# op.execute(
|
||||||
|
# text(
|
||||||
|
# """
|
||||||
|
# UPDATE groups
|
||||||
|
# SET project_id = (
|
||||||
|
# SELECT a.project_id
|
||||||
|
# FROM groups_agents ga
|
||||||
|
# JOIN agents a ON ga.agent_id = a.id
|
||||||
|
# WHERE ga.group_id = groups.id
|
||||||
|
# AND a.project_id IS NOT NULL
|
||||||
|
# LIMIT 1
|
||||||
|
# )
|
||||||
|
# """
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("groups", "project_id")
|
||||||
|
op.drop_column("block", "project_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
58
alembic/versions/08b2f8225812_adding_toolsagents_orm.py
Normal file
58
alembic/versions/08b2f8225812_adding_toolsagents_orm.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""adding ToolsAgents ORM
|
||||||
|
|
||||||
|
Revision ID: 08b2f8225812
|
||||||
|
Revises: 3c683a662c82
|
||||||
|
Create Date: 2024-12-05 16:46:51.258831
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "08b2f8225812"
|
||||||
|
down_revision: Union[str, None] = "3c683a662c82"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"tools_agents",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("tool_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("tool_name", sa.String(), nullable=False),
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["agent_id"],
|
||||||
|
["agents.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["tool_id"], ["tools.id"], name="fk_tool_id"),
|
||||||
|
sa.PrimaryKeyConstraint("agent_id", "tool_id", "tool_name", "id"),
|
||||||
|
sa.UniqueConstraint("agent_id", "tool_name", name="unique_tool_per_agent"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("tools_agents")
|
||||||
|
# ### end Alembic commands ###
|
||||||
63
alembic/versions/0b496eae90de_add_file_agent_table.py
Normal file
63
alembic/versions/0b496eae90de_add_file_agent_table.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
"""Add file agent table
|
||||||
|
|
||||||
|
Revision ID: 0b496eae90de
|
||||||
|
Revises: 341068089f14
|
||||||
|
Create Date: 2025-06-02 15:14:33.730687
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "0b496eae90de"
|
||||||
|
down_revision: Union[str, None] = "341068089f14"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"files_agents",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("is_open", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("visible_content", sa.Text(), nullable=True),
|
||||||
|
sa.Column("last_accessed_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", "file_id", "agent_id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_files_agents_file_id_agent_id", "files_agents", ["file_id", "agent_id"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("ix_files_agents_file_id_agent_id", table_name="files_agents")
|
||||||
|
op.drop_table("files_agents")
|
||||||
|
# ### end Alembic commands ###
|
||||||
95
alembic/versions/0ceb975e0063_add_llm_batch_jobs_tables.py
Normal file
95
alembic/versions/0ceb975e0063_add_llm_batch_jobs_tables.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
"""Add LLM batch jobs tables
|
||||||
|
|
||||||
|
Revision ID: 0ceb975e0063
|
||||||
|
Revises: 90bb156e71df
|
||||||
|
Create Date: 2025-04-07 15:57:18.475151
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
import letta
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "0ceb975e0063"
|
||||||
|
down_revision: Union[str, None] = "90bb156e71df"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"llm_batch_job",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("llm_provider", sa.String(), nullable=False),
|
||||||
|
sa.Column("create_batch_response", letta.orm.custom_columns.CreateBatchResponseColumn(), nullable=False),
|
||||||
|
sa.Column("latest_polling_response", letta.orm.custom_columns.PollBatchResponseColumn(), nullable=True),
|
||||||
|
sa.Column("last_polled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_llm_batch_job_created_at", "llm_batch_job", ["created_at"], unique=False)
|
||||||
|
op.create_index("ix_llm_batch_job_status", "llm_batch_job", ["status"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"llm_batch_items",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("batch_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("llm_config", letta.orm.custom_columns.LLMConfigColumn(), nullable=False),
|
||||||
|
sa.Column("request_status", sa.String(), nullable=False),
|
||||||
|
sa.Column("step_status", sa.String(), nullable=False),
|
||||||
|
sa.Column("step_state", letta.orm.custom_columns.AgentStepStateColumn(), nullable=False),
|
||||||
|
sa.Column("batch_request_result", letta.orm.custom_columns.BatchRequestResultColumn(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["batch_id"], ["llm_batch_job.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_llm_batch_items_agent_id", "llm_batch_items", ["agent_id"], unique=False)
|
||||||
|
op.create_index("ix_llm_batch_items_batch_id", "llm_batch_items", ["batch_id"], unique=False)
|
||||||
|
op.create_index("ix_llm_batch_items_status", "llm_batch_items", ["request_status"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("ix_llm_batch_items_status", table_name="llm_batch_items")
|
||||||
|
op.drop_index("ix_llm_batch_items_batch_id", table_name="llm_batch_items")
|
||||||
|
op.drop_index("ix_llm_batch_items_agent_id", table_name="llm_batch_items")
|
||||||
|
op.drop_table("llm_batch_items")
|
||||||
|
op.drop_index("ix_llm_batch_job_status", table_name="llm_batch_job")
|
||||||
|
op.drop_index("ix_llm_batch_job_created_at", table_name="llm_batch_job")
|
||||||
|
op.drop_table("llm_batch_job")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,31 @@
|
|||||||
|
"""Add hidden property to agents
|
||||||
|
|
||||||
|
Revision ID: 15b577c62f3f
|
||||||
|
Revises: 4c6c9ef0387d
|
||||||
|
Create Date: 2025-07-30 13:19:15.213121
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "15b577c62f3f"
|
||||||
|
down_revision: Union[str, None] = "4c6c9ef0387d"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.add_column("agents", sa.Column("hidden", sa.Boolean(), nullable=True))
|
||||||
|
|
||||||
|
# Set hidden=true for existing agents with project names starting with "templates"
|
||||||
|
connection = op.get_bind()
|
||||||
|
connection.execute(sa.text("UPDATE agents SET hidden = true WHERE project_id LIKE 'templates-%'"))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_column("agents", "hidden")
|
||||||
47
alembic/versions/167491cfb7a8_add_identities_for_blocks.py
Normal file
47
alembic/versions/167491cfb7a8_add_identities_for_blocks.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"""add identities for blocks
|
||||||
|
|
||||||
|
Revision ID: 167491cfb7a8
|
||||||
|
Revises: d211df879a5f
|
||||||
|
Create Date: 2025-03-07 17:51:24.843275
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "167491cfb7a8"
|
||||||
|
down_revision: Union[str, None] = "d211df879a5f"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"identities_blocks",
|
||||||
|
sa.Column("identity_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["block_id"], ["block.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["identity_id"], ["identities.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("identity_id", "block_id"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("identities_blocks")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""add instructions field to sources
|
||||||
|
|
||||||
|
Revision ID: 18e300709530
|
||||||
|
Revises: 878607e41ca4
|
||||||
|
Create Date: 2025-05-08 17:56:20.877183
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "18e300709530"
|
||||||
|
down_revision: Union[str, None] = "878607e41ca4"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("sources", sa.Column("instructions", sa.String(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("sources", "instructions")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
"""add agent_id index to mapping tables
|
||||||
|
|
||||||
|
Revision ID: 18ff61fbc034
|
||||||
|
Revises: b888f21b151f
|
||||||
|
Create Date: 2025-09-10 19:16:39.118760
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "18ff61fbc034"
|
||||||
|
down_revision: Union[str, None] = "b888f21b151f"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_index("ix_blocks_agents_block_id", "blocks_agents", ["block_id"], unique=False)
|
||||||
|
op.create_index("ix_block_label", "block", ["label"], unique=False)
|
||||||
|
op.create_index("ix_agents_organization_id", "agents", ["organization_id"], unique=False)
|
||||||
|
op.create_index("ix_tools_agents_tool_id", "tools_agents", ["tool_id"], unique=False)
|
||||||
|
op.create_index("ix_sources_agents_source_id", "sources_agents", ["source_id"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("ix_sources_agents_source_id", table_name="sources_agents")
|
||||||
|
op.drop_index("ix_tools_agents_tool_id", table_name="tools_agents")
|
||||||
|
op.drop_index("ix_agents_organization_id", table_name="agents")
|
||||||
|
op.drop_index("ix_block_label", table_name="block")
|
||||||
|
op.drop_index("ix_blocks_agents_block_id", table_name="blocks_agents")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,54 @@
|
|||||||
|
"""Fix files_agents constraints
|
||||||
|
|
||||||
|
Revision ID: 1af251a42c06
|
||||||
|
Revises: 51999513bcf1
|
||||||
|
Create Date: 2025-06-30 11:50:42.200885
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1af251a42c06"
|
||||||
|
down_revision: Union[str, None] = "51999513bcf1"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("ix_files_agents_agent_file_name", table_name="files_agents")
|
||||||
|
op.drop_index("ix_files_agents_file_id_agent_id", table_name="files_agents")
|
||||||
|
op.drop_constraint("uq_files_agents_agent_file_name", "files_agents", type_="unique")
|
||||||
|
op.drop_constraint("uq_files_agents_file_agent", "files_agents", type_="unique")
|
||||||
|
op.create_index("ix_agent_filename", "files_agents", ["agent_id", "file_name"], unique=False)
|
||||||
|
op.create_index("ix_file_agent", "files_agents", ["file_id", "agent_id"], unique=False)
|
||||||
|
op.create_unique_constraint("uq_agent_filename", "files_agents", ["agent_id", "file_name"])
|
||||||
|
op.create_unique_constraint("uq_file_agent", "files_agents", ["file_id", "agent_id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("uq_file_agent", "files_agents", type_="unique")
|
||||||
|
op.drop_constraint("uq_agent_filename", "files_agents", type_="unique")
|
||||||
|
op.drop_index("ix_file_agent", table_name="files_agents")
|
||||||
|
op.drop_index("ix_agent_filename", table_name="files_agents")
|
||||||
|
op.create_unique_constraint("uq_files_agents_file_agent", "files_agents", ["file_id", "agent_id"], postgresql_nulls_not_distinct=False)
|
||||||
|
op.create_unique_constraint(
|
||||||
|
"uq_files_agents_agent_file_name", "files_agents", ["agent_id", "file_name"], postgresql_nulls_not_distinct=False
|
||||||
|
)
|
||||||
|
op.create_index("ix_files_agents_file_id_agent_id", "files_agents", ["file_id", "agent_id"], unique=False)
|
||||||
|
op.create_index("ix_files_agents_agent_file_name", "files_agents", ["agent_id", "file_name"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""Add pip requirements to tools
|
||||||
|
|
||||||
|
Revision ID: 1c6b6a38b713
|
||||||
|
Revises: c96263433aef
|
||||||
|
Create Date: 2025-06-12 18:06:54.838510
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1c6b6a38b713"
|
||||||
|
down_revision: Union[str, None] = "c96263433aef"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("tools", sa.Column("pip_requirements", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("tools", "pip_requirements")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
"""Make an blocks agents mapping table
|
||||||
|
|
||||||
|
Revision ID: 1c8880d671ee
|
||||||
|
Revises: f81ceea2c08d
|
||||||
|
Create Date: 2024-11-22 15:42:47.209229
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1c8880d671ee"
|
||||||
|
down_revision: Union[str, None] = "f81ceea2c08d"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_unique_constraint("unique_block_id_label", "block", ["id", "label"])
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"blocks_agents",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_label", sa.String(), nullable=False),
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["agent_id"],
|
||||||
|
["agents.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["block_id", "block_label"], ["block.id", "block.label"], name="fk_block_id_label"),
|
||||||
|
sa.PrimaryKeyConstraint("agent_id", "block_id", "block_label", "id"),
|
||||||
|
sa.UniqueConstraint("agent_id", "block_label", name="unique_label_per_agent"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("unique_block_id_label", "block", type_="unique")
|
||||||
|
op.drop_table("blocks_agents")
|
||||||
|
# ### end Alembic commands ###
|
||||||
43
alembic/versions/1dc0fee72dea_add_block_related_indexes.py
Normal file
43
alembic/versions/1dc0fee72dea_add_block_related_indexes.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
"""add block-related indexes
|
||||||
|
|
||||||
|
Revision ID: 1dc0fee72dea
|
||||||
|
Revises: 18e300709530
|
||||||
|
Create Date: 2025-05-12 17:06:32.055091
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1dc0fee72dea"
|
||||||
|
down_revision: Union[str, None] = "18e300709530"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# add index for blocks_agents table
|
||||||
|
op.create_index("ix_blocks_agents_block_label_agent_id", "blocks_agents", ["block_label", "agent_id"], unique=False)
|
||||||
|
|
||||||
|
# add index for just block_label
|
||||||
|
op.create_index("ix_blocks_block_label", "blocks_agents", ["block_label"], unique=False)
|
||||||
|
|
||||||
|
# add index for agent_tags for agent_id and tag
|
||||||
|
op.create_index("ix_agents_tags_agent_id_tag", "agents_tags", ["agent_id", "tag"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
op.drop_index("ix_blocks_agents_block_label_agent_id", table_name="blocks_agents")
|
||||||
|
op.drop_index("ix_blocks_block_label", table_name="blocks_agents")
|
||||||
|
op.drop_index("ix_agents_tags_agent_id_tag", table_name="agents_tags")
|
||||||
40
alembic/versions/1e553a664210_add_metadata_to_tools.py
Normal file
40
alembic/versions/1e553a664210_add_metadata_to_tools.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""Add metadata to Tools
|
||||||
|
|
||||||
|
Revision ID: 1e553a664210
|
||||||
|
Revises: 2cceb07c2384
|
||||||
|
Create Date: 2025-03-17 15:50:05.562302
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1e553a664210"
|
||||||
|
down_revision: Union[str, None] = "2cceb07c2384"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("tools", sa.Column("metadata_", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("tools", "metadata_")
|
||||||
|
# ### end Alembic commands ###
|
||||||
44
alembic/versions/220856bbf43b_add_read_only_column.py
Normal file
44
alembic/versions/220856bbf43b_add_read_only_column.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
"""add read-only column
|
||||||
|
|
||||||
|
Revision ID: 220856bbf43b
|
||||||
|
Revises: 1dc0fee72dea
|
||||||
|
Create Date: 2025-05-13 14:42:17.353614
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "220856bbf43b"
|
||||||
|
down_revision: Union[str, None] = "1dc0fee72dea"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# add default value of `False`
|
||||||
|
op.add_column("block", sa.Column("read_only", sa.Boolean(), nullable=True))
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE block
|
||||||
|
SET read_only = False
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
op.alter_column("block", "read_only", nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
op.drop_column("block", "read_only")
|
||||||
40
alembic/versions/22a6e413d89c_remove_module_field_on_tool.py
Normal file
40
alembic/versions/22a6e413d89c_remove_module_field_on_tool.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""Remove module field on tool
|
||||||
|
|
||||||
|
Revision ID: 22a6e413d89c
|
||||||
|
Revises: 88f9432739a9
|
||||||
|
Create Date: 2025-01-10 17:38:23.811795
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "22a6e413d89c"
|
||||||
|
down_revision: Union[str, None] = "88f9432739a9"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("tools", "module")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("tools", sa.Column("module", sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
52
alembic/versions/25fc99e97839_fix_alembic_check_warnings.py
Normal file
52
alembic/versions/25fc99e97839_fix_alembic_check_warnings.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
"""Remove job_usage_statistics indices and update job_messages
|
||||||
|
|
||||||
|
Revision ID: 25fc99e97839
|
||||||
|
Revises: f595e0e8013e
|
||||||
|
Create Date: 2025-01-16 16:48:21.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "25fc99e97839"
|
||||||
|
down_revision: Union[str, None] = "f595e0e8013e"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Remove indices from job_messages
|
||||||
|
op.drop_index("ix_job_messages_created_at", table_name="job_messages")
|
||||||
|
op.drop_index("ix_job_messages_job_id", table_name="job_messages")
|
||||||
|
|
||||||
|
# Remove indices from job_usage_statistics
|
||||||
|
op.drop_index("ix_job_usage_statistics_created_at", table_name="job_usage_statistics")
|
||||||
|
op.drop_index("ix_job_usage_statistics_job_id", table_name="job_usage_statistics")
|
||||||
|
|
||||||
|
# Add foreign key constraint for message_id
|
||||||
|
op.create_foreign_key("fk_job_messages_message_id", "job_messages", "messages", ["message_id"], ["id"], ondelete="CASCADE")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Remove the foreign key constraint
|
||||||
|
op.drop_constraint("fk_job_messages_message_id", "job_messages", type_="foreignkey")
|
||||||
|
|
||||||
|
# Recreate indices for job_messages
|
||||||
|
op.create_index("ix_job_messages_job_id", "job_messages", ["job_id"])
|
||||||
|
op.create_index("ix_job_messages_created_at", "job_messages", ["created_at"])
|
||||||
|
|
||||||
|
# Recreate indices for job_usage_statistics
|
||||||
|
op.create_index("ix_job_usage_statistics_job_id", "job_usage_statistics", ["job_id"])
|
||||||
|
op.create_index("ix_job_usage_statistics_created_at", "job_usage_statistics", ["created_at"])
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""add support for structured_outputs in agents
|
||||||
|
|
||||||
|
Revision ID: 28b8765bdd0a
|
||||||
|
Revises: a3c7d62e08ca
|
||||||
|
Create Date: 2025-04-18 11:43:47.701786
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "28b8765bdd0a"
|
||||||
|
down_revision: Union[str, None] = "a3c7d62e08ca"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("agents", sa.Column("response_format", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("agents", "response_format")
|
||||||
|
# ### end Alembic commands ###
|
||||||
798
alembic/versions/2c059cad97cc_create_sqlite_baseline_schema.py
Normal file
798
alembic/versions/2c059cad97cc_create_sqlite_baseline_schema.py
Normal file
@@ -0,0 +1,798 @@
|
|||||||
|
"""create_sqlite_baseline_schema
|
||||||
|
|
||||||
|
Revision ID: 2c059cad97cc
|
||||||
|
Revises: 495f3f474131
|
||||||
|
Create Date: 2025-07-16 14:34:21.280233
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "2c059cad97cc"
|
||||||
|
down_revision: Union[str, None] = "495f3f474131"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Only run this migration for SQLite
|
||||||
|
if settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create the exact schema that matches the current PostgreSQL state
|
||||||
|
# This is a snapshot of the schema at the time of this migration
|
||||||
|
# Based on the schema provided by Andy
|
||||||
|
|
||||||
|
# Organizations table
|
||||||
|
op.create_table(
|
||||||
|
"organizations",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("privileged_tools", sa.Boolean(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Agents table
|
||||||
|
op.create_table(
|
||||||
|
"agents",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("message_ids", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("system", sa.String(), nullable=True),
|
||||||
|
sa.Column("agent_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("llm_config", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("embedding_config", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("tool_rules", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("project_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("template_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("base_template_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("message_buffer_autoclear", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("enable_sleeptime", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column("response_format", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("last_run_completion", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("last_run_duration_ms", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("timezone", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
)
|
||||||
|
op.create_index("ix_agents_created_at", "agents", ["created_at", "id"])
|
||||||
|
|
||||||
|
# Block history table (created before block table so block can reference it)
|
||||||
|
op.create_table(
|
||||||
|
"block_history",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.Text(), nullable=True),
|
||||||
|
sa.Column("label", sa.String(), nullable=False),
|
||||||
|
sa.Column("value", sa.Text(), nullable=False),
|
||||||
|
sa.Column("limit", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("actor_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("actor_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("sequence_number", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
# Note: block_id foreign key will be added later since block table doesn't exist yet
|
||||||
|
)
|
||||||
|
op.create_index("ix_block_history_block_id_sequence", "block_history", ["block_id", "sequence_number"], unique=True)
|
||||||
|
|
||||||
|
# Block table
|
||||||
|
op.create_table(
|
||||||
|
"block",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("value", sa.String(), nullable=False),
|
||||||
|
sa.Column("limit", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("template_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("label", sa.String(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("is_template", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("current_history_entry_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("version", sa.Integer(), server_default="1", nullable=False),
|
||||||
|
sa.Column("read_only", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("preserve_on_migration", sa.Boolean(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["current_history_entry_id"], ["block_history.id"], name="fk_block_current_history_entry"),
|
||||||
|
sa.UniqueConstraint("id", "label", name="unique_block_id_label"),
|
||||||
|
)
|
||||||
|
op.create_index("created_at_label_idx", "block", ["created_at", "label"])
|
||||||
|
op.create_index("ix_block_current_history_entry_id", "block", ["current_history_entry_id"])
|
||||||
|
|
||||||
|
# Note: Foreign key constraint for block_history.block_id cannot be added in SQLite after table creation
|
||||||
|
# This will be enforced at the ORM level
|
||||||
|
|
||||||
|
# Sources table
|
||||||
|
op.create_table(
|
||||||
|
"sources",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("embedding_config", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("instructions", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("name", "organization_id", name="uq_source_name_organization"),
|
||||||
|
)
|
||||||
|
op.create_index("source_created_at_id_idx", "sources", ["created_at", "id"])
|
||||||
|
|
||||||
|
# Files table
|
||||||
|
op.create_table(
|
||||||
|
"files",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("source_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("file_path", sa.String(), nullable=True),
|
||||||
|
sa.Column("file_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("file_size", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("file_creation_date", sa.String(), nullable=True),
|
||||||
|
sa.Column("file_last_modified_date", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("processing_status", sa.String(), nullable=False),
|
||||||
|
sa.Column("error_message", sa.Text(), nullable=True),
|
||||||
|
sa.Column("original_file_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("total_chunks", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("chunks_embedded", sa.Integer(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
)
|
||||||
|
# Note: SQLite doesn't support expression indexes, so these are simplified
|
||||||
|
op.create_index("ix_files_org_created", "files", ["organization_id"])
|
||||||
|
op.create_index("ix_files_processing_status", "files", ["processing_status"])
|
||||||
|
op.create_index("ix_files_source_created", "files", ["source_id"])
|
||||||
|
|
||||||
|
# Users table
|
||||||
|
op.create_table(
|
||||||
|
"users",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Jobs table
|
||||||
|
op.create_table(
|
||||||
|
"jobs",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("user_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("job_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("request_config", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("callback_url", sa.String(), nullable=True),
|
||||||
|
sa.Column("callback_sent_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("callback_status_code", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("callback_error", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
|
||||||
|
)
|
||||||
|
op.create_index("ix_jobs_created_at", "jobs", ["created_at", "id"])
|
||||||
|
|
||||||
|
# Tools table
|
||||||
|
op.create_table(
|
||||||
|
"tools",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("source_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("source_code", sa.String(), nullable=True),
|
||||||
|
sa.Column("json_schema", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("tags", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("return_char_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("tool_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("args_json_schema", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("pip_requirements", sa.JSON(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("name", "organization_id", name="uix_name_organization"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_tools_created_at_name", "tools", ["created_at", "name"])
|
||||||
|
|
||||||
|
# Additional tables based on Andy's schema
|
||||||
|
|
||||||
|
# Agents tags table
|
||||||
|
op.create_table(
|
||||||
|
"agents_tags",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("tag", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"]),
|
||||||
|
sa.UniqueConstraint("agent_id", "tag", name="unique_agent_tag"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_agents_tags_agent_id_tag", "agents_tags", ["agent_id", "tag"])
|
||||||
|
|
||||||
|
# Sandbox configs table
|
||||||
|
op.create_table(
|
||||||
|
"sandbox_configs",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("type", sa.String(), nullable=False), # sandboxtype in PG
|
||||||
|
sa.Column("config", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("type", "organization_id", name="uix_type_organization"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sandbox environment variables table
|
||||||
|
op.create_table(
|
||||||
|
"sandbox_environment_variables",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("key", sa.String(), nullable=False),
|
||||||
|
sa.Column("value", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("sandbox_config_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["sandbox_config_id"], ["sandbox_configs.id"]),
|
||||||
|
sa.UniqueConstraint("key", "sandbox_config_id", name="uix_key_sandbox_config"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Blocks agents table
|
||||||
|
op.create_table(
|
||||||
|
"blocks_agents",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_label", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["block_id", "block_label"], ["block.id", "block.label"], deferrable=True, initially="DEFERRED"),
|
||||||
|
sa.UniqueConstraint("agent_id", "block_label", name="unique_label_per_agent"),
|
||||||
|
sa.UniqueConstraint("agent_id", "block_id", name="unique_agent_block"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_blocks_agents_block_label_agent_id", "blocks_agents", ["block_label", "agent_id"])
|
||||||
|
op.create_index("ix_blocks_block_label", "blocks_agents", ["block_label"])
|
||||||
|
|
||||||
|
# Tools agents table
|
||||||
|
op.create_table(
|
||||||
|
"tools_agents",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("tool_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["tool_id"], ["tools.id"], ondelete="CASCADE"),
|
||||||
|
sa.UniqueConstraint("agent_id", "tool_id", name="unique_agent_tool"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sources agents table
|
||||||
|
op.create_table(
|
||||||
|
"sources_agents",
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("source_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("agent_id", "source_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Agent passages table (using BLOB for vectors in SQLite)
|
||||||
|
op.create_table(
|
||||||
|
"agent_passages",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.String(), nullable=False),
|
||||||
|
sa.Column("embedding_config", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("embedding", sa.BLOB(), nullable=True), # CommonVector becomes BLOB in SQLite
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
)
|
||||||
|
# Note: agent_passages_org_idx is not created for SQLite as it's expected to be different
|
||||||
|
op.create_index("agent_passages_created_at_id_idx", "agent_passages", ["created_at", "id"])
|
||||||
|
op.create_index("ix_agent_passages_org_agent", "agent_passages", ["organization_id", "agent_id"])
|
||||||
|
|
||||||
|
# Source passages table (using BLOB for vectors in SQLite)
|
||||||
|
op.create_table(
|
||||||
|
"source_passages",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.String(), nullable=False),
|
||||||
|
sa.Column("embedding_config", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("embedding", sa.BLOB(), nullable=True), # CommonVector becomes BLOB in SQLite
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("source_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_name", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="CASCADE"),
|
||||||
|
)
|
||||||
|
# Note: source_passages_org_idx is not created for SQLite as it's expected to be different
|
||||||
|
op.create_index("source_passages_created_at_id_idx", "source_passages", ["created_at", "id"])
|
||||||
|
|
||||||
|
# Message sequence is handled by the sequence_id field in messages table
|
||||||
|
|
||||||
|
# Messages table
|
||||||
|
op.create_table(
|
||||||
|
"messages",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("role", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.String(), nullable=True),
|
||||||
|
sa.Column("model", sa.String(), nullable=True),
|
||||||
|
sa.Column("name", sa.String(), nullable=True),
|
||||||
|
sa.Column("tool_calls", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("tool_call_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("step_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("otid", sa.String(), nullable=True),
|
||||||
|
sa.Column("tool_returns", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("group_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("content", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("sequence_id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("sender_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("batch_item_id", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["step_id"], ["steps.id"], ondelete="SET NULL"),
|
||||||
|
sa.UniqueConstraint("sequence_id", name="uq_messages_sequence_id"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_messages_agent_created_at", "messages", ["agent_id", "created_at"])
|
||||||
|
op.create_index("ix_messages_created_at", "messages", ["created_at", "id"])
|
||||||
|
op.create_index("ix_messages_agent_sequence", "messages", ["agent_id", "sequence_id"])
|
||||||
|
op.create_index("ix_messages_org_agent", "messages", ["organization_id", "agent_id"])
|
||||||
|
|
||||||
|
# Create sequence table for SQLite message sequence_id generation
|
||||||
|
op.create_table(
|
||||||
|
"message_sequence",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("next_val", sa.Integer(), nullable=False, server_default="1"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Initialize the sequence table with the next available sequence_id
|
||||||
|
op.execute("INSERT INTO message_sequence (id, next_val) VALUES (1, 1)")
|
||||||
|
|
||||||
|
# Now create the rest of the tables that might reference messages/steps
|
||||||
|
|
||||||
|
# Add missing tables and columns identified from alembic check
|
||||||
|
|
||||||
|
# Identities table
|
||||||
|
op.create_table(
|
||||||
|
"identities",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("identifier_key", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("identity_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("project_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("properties", sa.JSON(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("identifier_key", "project_id", "organization_id", name="unique_identifier_key_project_id_organization_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# MCP Server table
|
||||||
|
op.create_table(
|
||||||
|
"mcp_server",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("server_name", sa.String(), nullable=False),
|
||||||
|
sa.Column("server_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("server_url", sa.String(), nullable=True),
|
||||||
|
sa.Column("stdio_config", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("token", sa.String(), nullable=True),
|
||||||
|
sa.Column("custom_headers", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("server_name", "organization_id", name="uix_name_organization_mcp_server"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Providers table
|
||||||
|
op.create_table(
|
||||||
|
"providers",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("api_key", sa.String(), nullable=True),
|
||||||
|
sa.Column("access_key", sa.String(), nullable=True),
|
||||||
|
sa.Column("region", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("provider_type", sa.String(), nullable=True),
|
||||||
|
sa.Column("base_url", sa.String(), nullable=True),
|
||||||
|
sa.Column("provider_category", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("name", "organization_id", name="unique_name_organization_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Agent environment variables table
|
||||||
|
op.create_table(
|
||||||
|
"agent_environment_variables",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("key", sa.String(), nullable=False),
|
||||||
|
sa.Column("value", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.UniqueConstraint("key", "agent_id", name="uix_key_agent"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_agent_environment_variables_agent_id", "agent_environment_variables", ["agent_id"])
|
||||||
|
|
||||||
|
# Groups table
|
||||||
|
op.create_table(
|
||||||
|
"groups",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=False),
|
||||||
|
sa.Column("manager_type", sa.String(), nullable=False),
|
||||||
|
sa.Column("manager_agent_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("termination_token", sa.String(), nullable=True),
|
||||||
|
sa.Column("max_turns", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_ids", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("sleeptime_agent_frequency", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("turns_counter", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("last_processed_message_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("max_message_buffer_length", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("min_message_buffer_length", sa.Integer(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["manager_agent_id"], ["agents.id"], ondelete="RESTRICT"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Steps table
|
||||||
|
op.create_table(
|
||||||
|
"steps",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("completion_tokens", sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column("prompt_tokens", sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column("total_tokens", sa.Integer(), nullable=False, default=0),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("origin", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("provider_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("provider_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("model", sa.String(), nullable=True),
|
||||||
|
sa.Column("context_window_limit", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("completion_tokens_details", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("tags", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("tid", sa.String(), nullable=True),
|
||||||
|
sa.Column("model_endpoint", sa.String(), nullable=True),
|
||||||
|
sa.Column("trace_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("provider_category", sa.String(), nullable=True),
|
||||||
|
sa.Column("feedback", sa.String(), nullable=True),
|
||||||
|
sa.Column("project_id", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["job_id"], ["jobs.id"], ondelete="SET NULL"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"], ondelete="RESTRICT"),
|
||||||
|
sa.ForeignKeyConstraint(["provider_id"], ["providers.id"], ondelete="RESTRICT"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Note: Foreign key constraint for block.current_history_entry_id -> block_history.id
|
||||||
|
# would need to be added here, but SQLite doesn't support ALTER TABLE ADD CONSTRAINT
|
||||||
|
# This will be handled by the ORM at runtime
|
||||||
|
|
||||||
|
# Add missing columns to existing tables
|
||||||
|
|
||||||
|
# All missing columns have been added to the table definitions above
|
||||||
|
|
||||||
|
# step_id was already added in the messages table creation above
|
||||||
|
# op.add_column('messages', sa.Column('step_id', sa.String(), nullable=True))
|
||||||
|
# op.create_foreign_key('fk_messages_step_id', 'messages', 'steps', ['step_id'], ['id'], ondelete='SET NULL')
|
||||||
|
|
||||||
|
# Add index to source_passages for file_id
|
||||||
|
op.create_index("source_passages_file_id_idx", "source_passages", ["file_id"])
|
||||||
|
|
||||||
|
# Unique constraint for sources was added during table creation above
|
||||||
|
|
||||||
|
# Create remaining association tables
|
||||||
|
|
||||||
|
# Identities agents table
|
||||||
|
op.create_table(
|
||||||
|
"identities_agents",
|
||||||
|
sa.Column("identity_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["identity_id"], ["identities.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("identity_id", "agent_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Identities blocks table
|
||||||
|
op.create_table(
|
||||||
|
"identities_blocks",
|
||||||
|
sa.Column("identity_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["identity_id"], ["identities.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["block_id"], ["block.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("identity_id", "block_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Files agents table
|
||||||
|
op.create_table(
|
||||||
|
"files_agents",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("source_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("is_open", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("visible_content", sa.Text(), nullable=True),
|
||||||
|
sa.Column("last_accessed_at", sa.DateTime(timezone=True), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_name", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id", "file_id", "agent_id"),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.UniqueConstraint("file_id", "agent_id", name="uq_file_agent"),
|
||||||
|
sa.UniqueConstraint("agent_id", "file_name", name="uq_agent_filename"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_agent_filename", "files_agents", ["agent_id", "file_name"])
|
||||||
|
op.create_index("ix_file_agent", "files_agents", ["file_id", "agent_id"])
|
||||||
|
|
||||||
|
# Groups agents table
|
||||||
|
op.create_table(
|
||||||
|
"groups_agents",
|
||||||
|
sa.Column("group_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["group_id"], ["groups.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("group_id", "agent_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Groups blocks table
|
||||||
|
op.create_table(
|
||||||
|
"groups_blocks",
|
||||||
|
sa.Column("group_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["group_id"], ["groups.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["block_id"], ["block.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("group_id", "block_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# LLM batch job table
|
||||||
|
op.create_table(
|
||||||
|
"llm_batch_job",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("llm_provider", sa.String(), nullable=False),
|
||||||
|
sa.Column("create_batch_response", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("latest_polling_response", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("last_polled_at", sa.DateTime(timezone=True), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("letta_batch_job_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["letta_batch_job_id"], ["jobs.id"], ondelete="CASCADE"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_llm_batch_job_created_at", "llm_batch_job", ["created_at"])
|
||||||
|
op.create_index("ix_llm_batch_job_status", "llm_batch_job", ["status"])
|
||||||
|
|
||||||
|
# LLM batch items table
|
||||||
|
op.create_table(
|
||||||
|
"llm_batch_items",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("llm_config", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("request_status", sa.String(), nullable=False),
|
||||||
|
sa.Column("step_status", sa.String(), nullable=False),
|
||||||
|
sa.Column("step_state", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("batch_request_result", sa.JSON(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("llm_batch_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["llm_batch_id"], ["llm_batch_job.id"], ondelete="CASCADE"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_llm_batch_items_agent_id", "llm_batch_items", ["agent_id"])
|
||||||
|
op.create_index("ix_llm_batch_items_llm_batch_id", "llm_batch_items", ["llm_batch_id"])
|
||||||
|
op.create_index("ix_llm_batch_items_status", "llm_batch_items", ["request_status"])
|
||||||
|
|
||||||
|
# Job messages table
|
||||||
|
op.create_table(
|
||||||
|
"job_messages",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("message_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(["job_id"], ["jobs.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["message_id"], ["messages.id"], ondelete="CASCADE"),
|
||||||
|
sa.UniqueConstraint("job_id", "message_id", name="unique_job_message"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# File contents table
|
||||||
|
op.create_table(
|
||||||
|
"file_contents",
|
||||||
|
sa.Column("file_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.Text(), nullable=False),
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("file_id", "id"),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], ondelete="CASCADE"),
|
||||||
|
sa.UniqueConstraint("file_id", name="uq_file_contents_file_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Provider traces table
|
||||||
|
op.create_table(
|
||||||
|
"provider_traces",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("request_json", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("response_json", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("step_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("(FALSE)"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"]),
|
||||||
|
)
|
||||||
|
op.create_index("ix_step_id", "provider_traces", ["step_id"])
|
||||||
|
|
||||||
|
# Complete the SQLite schema alignment by adding any remaining missing elements
|
||||||
|
try:
|
||||||
|
# Unique constraints for files_agents are already created with correct names in table definition above
|
||||||
|
|
||||||
|
# Foreign key for files_agents.source_id is already created in table definition above
|
||||||
|
# Foreign key for messages.step_id is already created in table definition above
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
# Some operations may fail if the column/constraint already exists
|
||||||
|
# This is expected in some cases and we can continue
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Note: The remaining alembic check differences are expected for SQLite:
|
||||||
|
# 1. Type differences (BLOB vs CommonVector) - Expected and handled by ORM
|
||||||
|
# 2. Foreign key constraint differences - SQLite handles these at runtime
|
||||||
|
# 3. Index differences - SQLite doesn't support all PostgreSQL index features
|
||||||
|
# 4. Some constraint naming differences - Cosmetic differences
|
||||||
|
#
|
||||||
|
# These differences do not affect functionality as the ORM handles the abstraction
|
||||||
|
# between SQLite and PostgreSQL appropriately.
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Only run this migration for SQLite
|
||||||
|
if settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# SQLite downgrade is not supported
|
||||||
|
raise NotImplementedError("SQLite downgrade is not supported. Use a fresh database instead.")
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
"""add content parts to message
|
||||||
|
|
||||||
|
Revision ID: 2cceb07c2384
|
||||||
|
Revises: 77de976590ae
|
||||||
|
Create Date: 2025-03-13 14:30:53.177061
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.orm.custom_columns import MessageContentColumn
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "2cceb07c2384"
|
||||||
|
down_revision: Union[str, None] = "77de976590ae"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("messages", sa.Column("content", MessageContentColumn(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("messages", "content")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
"""add otid and tool return to message
|
||||||
|
|
||||||
|
Revision ID: 2f4ede6ae33b
|
||||||
|
Revises: 54f2311edb62
|
||||||
|
Create Date: 2025-03-05 10:04:34.717671
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
import letta.orm
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "2f4ede6ae33b"
|
||||||
|
down_revision: Union[str, None] = "54f2311edb62"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("messages", sa.Column("otid", sa.String(), nullable=True))
|
||||||
|
op.add_column("messages", sa.Column("tool_returns", letta.orm.custom_columns.ToolReturnColumn(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("messages", "tool_returns")
|
||||||
|
op.drop_column("messages", "otid")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""add preserve_on_migration to block
|
||||||
|
|
||||||
|
Revision ID: 341068089f14
|
||||||
|
Revises: 348214cbc081
|
||||||
|
Create Date: 2025-05-29 10:39:44.494643
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "341068089f14"
|
||||||
|
down_revision: Union[str, None] = "348214cbc081"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("block", sa.Column("preserve_on_migration", sa.Boolean(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("block", "preserve_on_migration")
|
||||||
|
# ### end Alembic commands ###
|
||||||
40
alembic/versions/348214cbc081_add_org_agent_id_indices.py
Normal file
40
alembic/versions/348214cbc081_add_org_agent_id_indices.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""add org agent id indices
|
||||||
|
|
||||||
|
Revision ID: 348214cbc081
|
||||||
|
Revises: dd049fbec729
|
||||||
|
Create Date: 2025-05-28 22:43:18.509397
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "348214cbc081"
|
||||||
|
down_revision: Union[str, None] = "dd049fbec729"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_index("ix_agent_passages_org_agent", "agent_passages", ["organization_id", "agent_id"], unique=False)
|
||||||
|
op.create_index("ix_messages_org_agent", "messages", ["organization_id", "agent_id"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("ix_messages_org_agent", table_name="messages")
|
||||||
|
op.drop_index("ix_agent_passages_org_agent", table_name="agent_passages")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
"""add byok fields and unique constraint
|
||||||
|
|
||||||
|
Revision ID: 373dabcba6cf
|
||||||
|
Revises: c56081a05371
|
||||||
|
Create Date: 2025-04-30 19:38:25.010856
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "373dabcba6cf"
|
||||||
|
down_revision: Union[str, None] = "c56081a05371"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("providers", sa.Column("provider_type", sa.String(), nullable=True))
|
||||||
|
op.add_column("providers", sa.Column("base_url", sa.String(), nullable=True))
|
||||||
|
op.create_unique_constraint("unique_name_organization_id", "providers", ["name", "organization_id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("unique_name_organization_id", "providers", type_="unique")
|
||||||
|
op.drop_column("providers", "base_url")
|
||||||
|
op.drop_column("providers", "provider_type")
|
||||||
|
# ### end Alembic commands ###
|
||||||
55
alembic/versions/3c683a662c82_migrate_jobs_to_the_orm.py
Normal file
55
alembic/versions/3c683a662c82_migrate_jobs_to_the_orm.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"""Migrate jobs to the orm
|
||||||
|
|
||||||
|
Revision ID: 3c683a662c82
|
||||||
|
Revises: 5987401b40ae
|
||||||
|
Create Date: 2024-12-04 15:59:41.708396
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "3c683a662c82"
|
||||||
|
down_revision: Union[str, None] = "5987401b40ae"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("jobs", sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True))
|
||||||
|
op.add_column("jobs", sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False))
|
||||||
|
op.add_column("jobs", sa.Column("_created_by_id", sa.String(), nullable=True))
|
||||||
|
op.add_column("jobs", sa.Column("_last_updated_by_id", sa.String(), nullable=True))
|
||||||
|
op.alter_column("jobs", "status", existing_type=sa.VARCHAR(), nullable=False)
|
||||||
|
op.alter_column("jobs", "completed_at", existing_type=postgresql.TIMESTAMP(timezone=True), type_=sa.DateTime(), existing_nullable=True)
|
||||||
|
op.alter_column("jobs", "user_id", existing_type=sa.VARCHAR(), nullable=False)
|
||||||
|
op.create_foreign_key(None, "jobs", "users", ["user_id"], ["id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, "jobs", type_="foreignkey")
|
||||||
|
op.alter_column("jobs", "user_id", existing_type=sa.VARCHAR(), nullable=True)
|
||||||
|
op.alter_column("jobs", "completed_at", existing_type=sa.DateTime(), type_=postgresql.TIMESTAMP(timezone=True), existing_nullable=True)
|
||||||
|
op.alter_column("jobs", "status", existing_type=sa.VARCHAR(), nullable=True)
|
||||||
|
op.drop_column("jobs", "_last_updated_by_id")
|
||||||
|
op.drop_column("jobs", "_created_by_id")
|
||||||
|
op.drop_column("jobs", "is_deleted")
|
||||||
|
op.drop_column("jobs", "updated_at")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""Add per agent environment variables
|
||||||
|
|
||||||
|
Revision ID: 400501b04bf0
|
||||||
|
Revises: e78b4e82db30
|
||||||
|
Create Date: 2025-01-04 20:45:28.024690
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "400501b04bf0"
|
||||||
|
down_revision: Union[str, None] = "e78b4e82db30"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"agent_environment_variables",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("key", sa.String(), nullable=False),
|
||||||
|
sa.Column("value", sa.String(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("key", "agent_id", name="uix_key_agent"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("agent_environment_variables")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,125 @@
|
|||||||
|
"""Repurpose JobUsageStatistics for new Steps table
|
||||||
|
|
||||||
|
Revision ID: 416b9d2db10b
|
||||||
|
Revises: 25fc99e97839
|
||||||
|
Create Date: 2025-01-17 11:27:42.115755
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "416b9d2db10b"
|
||||||
|
down_revision: Union[str, None] = "25fc99e97839"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Rename the table
|
||||||
|
op.rename_table("job_usage_statistics", "steps")
|
||||||
|
|
||||||
|
# Rename the foreign key constraint and drop non-null constraint
|
||||||
|
op.alter_column("steps", "job_id", nullable=True)
|
||||||
|
op.drop_constraint("fk_job_usage_statistics_job_id", "steps", type_="foreignkey")
|
||||||
|
|
||||||
|
# Change id field from int to string
|
||||||
|
op.execute("ALTER TABLE steps RENAME COLUMN id TO old_id")
|
||||||
|
op.add_column("steps", sa.Column("id", sa.String(), nullable=True))
|
||||||
|
op.execute("""UPDATE steps SET id = 'step-' || gen_random_uuid()::text""")
|
||||||
|
op.drop_column("steps", "old_id")
|
||||||
|
op.alter_column("steps", "id", nullable=False)
|
||||||
|
op.create_primary_key("pk_steps_id", "steps", ["id"])
|
||||||
|
|
||||||
|
# Add new columns
|
||||||
|
op.add_column("steps", sa.Column("origin", sa.String(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("organization_id", sa.String(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("provider_id", sa.String(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("provider_name", sa.String(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("model", sa.String(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("context_window_limit", sa.Integer(), nullable=True))
|
||||||
|
op.add_column(
|
||||||
|
"steps",
|
||||||
|
sa.Column("completion_tokens_details", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column(
|
||||||
|
"steps",
|
||||||
|
sa.Column("tags", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True),
|
||||||
|
)
|
||||||
|
op.add_column("steps", sa.Column("tid", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Add new foreign key constraint for provider_id
|
||||||
|
op.create_foreign_key("fk_steps_organization_id", "steps", "providers", ["provider_id"], ["id"], ondelete="RESTRICT")
|
||||||
|
|
||||||
|
# Add new foreign key constraint for provider_id
|
||||||
|
op.create_foreign_key("fk_steps_provider_id", "steps", "organizations", ["organization_id"], ["id"], ondelete="RESTRICT")
|
||||||
|
|
||||||
|
# Add new foreign key constraint for provider_id
|
||||||
|
op.create_foreign_key("fk_steps_job_id", "steps", "jobs", ["job_id"], ["id"], ondelete="SET NULL")
|
||||||
|
|
||||||
|
# Drop old step_id and step_count columns which aren't in the new model
|
||||||
|
op.drop_column("steps", "step_id")
|
||||||
|
op.drop_column("steps", "step_count")
|
||||||
|
|
||||||
|
# Add step_id to messages table
|
||||||
|
op.add_column("messages", sa.Column("step_id", sa.String(), nullable=True))
|
||||||
|
op.create_foreign_key("fk_messages_step_id", "messages", "steps", ["step_id"], ["id"], ondelete="SET NULL")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Remove step_id from messages first to avoid foreign key conflicts
|
||||||
|
op.drop_constraint("fk_messages_step_id", "messages", type_="foreignkey")
|
||||||
|
op.drop_column("messages", "step_id")
|
||||||
|
|
||||||
|
# Restore old step_count and step_id column
|
||||||
|
op.add_column("steps", sa.Column("step_count", sa.Integer(), nullable=True))
|
||||||
|
op.add_column("steps", sa.Column("step_id", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Drop new columns and constraints
|
||||||
|
op.drop_constraint("fk_steps_provider_id", "steps", type_="foreignkey")
|
||||||
|
op.drop_constraint("fk_steps_organization_id", "steps", type_="foreignkey")
|
||||||
|
op.drop_constraint("fk_steps_job_id", "steps", type_="foreignkey")
|
||||||
|
|
||||||
|
op.drop_column("steps", "tid")
|
||||||
|
op.drop_column("steps", "tags")
|
||||||
|
op.drop_column("steps", "completion_tokens_details")
|
||||||
|
op.drop_column("steps", "context_window_limit")
|
||||||
|
op.drop_column("steps", "model")
|
||||||
|
op.drop_column("steps", "provider_name")
|
||||||
|
op.drop_column("steps", "provider_id")
|
||||||
|
op.drop_column("steps", "organization_id")
|
||||||
|
op.drop_column("steps", "origin")
|
||||||
|
|
||||||
|
# Add constraints back
|
||||||
|
op.execute("DELETE FROM steps WHERE job_id IS NULL")
|
||||||
|
op.alter_column("steps", "job_id", nullable=False)
|
||||||
|
op.create_foreign_key("fk_job_usage_statistics_job_id", "steps", "jobs", ["job_id"], ["id"], ondelete="CASCADE")
|
||||||
|
|
||||||
|
# Change id field from string back to int
|
||||||
|
op.add_column("steps", sa.Column("old_id", sa.Integer(), nullable=True))
|
||||||
|
op.execute("""UPDATE steps SET old_id = CAST(ABS(hashtext(REPLACE(id, 'step-', '')::text)) AS integer)""")
|
||||||
|
op.drop_column("steps", "id")
|
||||||
|
op.execute("ALTER TABLE steps RENAME COLUMN old_id TO id")
|
||||||
|
op.alter_column("steps", "id", nullable=False)
|
||||||
|
op.create_primary_key("pk_steps_id", "steps", ["id"])
|
||||||
|
|
||||||
|
# Rename the table
|
||||||
|
op.rename_table("steps", "job_usage_statistics")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
"""Add start end for agent file
|
||||||
|
|
||||||
|
Revision ID: 4537f0996495
|
||||||
|
Revises: 06fbbf65d4f1
|
||||||
|
Create Date: 2025-07-25 17:44:26.748765
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "4537f0996495"
|
||||||
|
down_revision: Union[str, None] = "06fbbf65d4f1"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("files_agents", sa.Column("start_line", sa.Integer(), nullable=True))
|
||||||
|
op.add_column("files_agents", sa.Column("end_line", sa.Integer(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("files_agents", "end_line")
|
||||||
|
op.drop_column("files_agents", "start_line")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
"""Add unique constraint to source names and also add original file name column
|
||||||
|
|
||||||
|
Revision ID: 46699adc71a7
|
||||||
|
Revises: 1af251a42c06
|
||||||
|
Create Date: 2025-07-01 13:30:48.279151
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "46699adc71a7"
|
||||||
|
down_revision: Union[str, None] = "1af251a42c06"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("files", sa.Column("original_file_name", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Handle existing duplicate source names before adding unique constraint
|
||||||
|
connection = op.get_bind()
|
||||||
|
|
||||||
|
# Find duplicates and rename them by appending a suffix
|
||||||
|
result = connection.execute(
|
||||||
|
sa.text(
|
||||||
|
"""
|
||||||
|
WITH duplicates AS (
|
||||||
|
SELECT name, organization_id,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY name, organization_id ORDER BY created_at) as rn,
|
||||||
|
id
|
||||||
|
FROM sources
|
||||||
|
WHERE (name, organization_id) IN (
|
||||||
|
SELECT name, organization_id
|
||||||
|
FROM sources
|
||||||
|
GROUP BY name, organization_id
|
||||||
|
HAVING COUNT(*) > 1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
SELECT id, name, rn
|
||||||
|
FROM duplicates
|
||||||
|
WHERE rn > 1
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rename duplicates by appending a number suffix
|
||||||
|
for row in result:
|
||||||
|
source_id, original_name, duplicate_number = row
|
||||||
|
new_name = f"{original_name}_{duplicate_number}"
|
||||||
|
connection.execute(
|
||||||
|
sa.text("UPDATE sources SET name = :new_name WHERE id = :source_id"), {"new_name": new_name, "source_id": source_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_unique_constraint("uq_source_name_organization", "sources", ["name", "organization_id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("uq_source_name_organization", "sources", type_="unique")
|
||||||
|
op.drop_column("files", "original_file_name")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
"""Add total_chunks and chunks_embedded to files
|
||||||
|
|
||||||
|
Revision ID: 47d2277e530d
|
||||||
|
Revises: 56254216524f
|
||||||
|
Create Date: 2025-07-03 14:32:08.539280
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "47d2277e530d"
|
||||||
|
down_revision: Union[str, None] = "56254216524f"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("files", sa.Column("total_chunks", sa.Integer(), nullable=True))
|
||||||
|
op.add_column("files", sa.Column("chunks_embedded", sa.Integer(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("files", "chunks_embedded")
|
||||||
|
op.drop_column("files", "total_chunks")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
"""Write source_id directly to files agents
|
||||||
|
|
||||||
|
Revision ID: 495f3f474131
|
||||||
|
Revises: 47d2277e530d
|
||||||
|
Create Date: 2025-07-10 17:14:45.154738
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "495f3f474131"
|
||||||
|
down_revision: Union[str, None] = "47d2277e530d"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Step 1: Add the column as nullable first
|
||||||
|
op.add_column("files_agents", sa.Column("source_id", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Step 2: Backfill source_id from files table
|
||||||
|
connection = op.get_bind()
|
||||||
|
connection.execute(
|
||||||
|
sa.text(
|
||||||
|
"""
|
||||||
|
UPDATE files_agents
|
||||||
|
SET source_id = files.source_id
|
||||||
|
FROM files
|
||||||
|
WHERE files_agents.file_id = files.id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 3: Make the column NOT NULL now that it's populated
|
||||||
|
op.alter_column("files_agents", "source_id", nullable=False)
|
||||||
|
|
||||||
|
# Step 4: Add the foreign key constraint
|
||||||
|
op.create_foreign_key(None, "files_agents", "sources", ["source_id"], ["id"], ondelete="CASCADE")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, "files_agents", type_="foreignkey")
|
||||||
|
op.drop_column("files_agents", "source_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
55
alembic/versions/4c6c9ef0387d_support_modal_sandbox_type.py
Normal file
55
alembic/versions/4c6c9ef0387d_support_modal_sandbox_type.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"""support modal sandbox type
|
||||||
|
|
||||||
|
Revision ID: 4c6c9ef0387d
|
||||||
|
Revises: 4537f0996495
|
||||||
|
Create Date: 2025-07-29 15:10:08.996251
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import DatabaseChoice, settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "4c6c9ef0387d"
|
||||||
|
down_revision: Union[str, None] = "4537f0996495"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# SQLite just uses strings
|
||||||
|
if settings.database_engine == DatabaseChoice.POSTGRES:
|
||||||
|
op.execute("ALTER TYPE sandboxtype ADD VALUE 'MODAL' AFTER 'E2B'")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
if settings.database_engine == DatabaseChoice.POSTGRES:
|
||||||
|
connection = op.get_bind()
|
||||||
|
|
||||||
|
data_conflicts = connection.execute(
|
||||||
|
text(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM sandbox_configs
|
||||||
|
WHERE "type" NOT IN ('E2B', 'LOCAL')
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
).fetchone()
|
||||||
|
if data_conflicts[0]:
|
||||||
|
raise RuntimeError(
|
||||||
|
(
|
||||||
|
"Cannot downgrade enum: Data conflicts are detected in sandbox_configs.sandboxtype.\n"
|
||||||
|
"Please manually handle these records before handling the downgrades.\n"
|
||||||
|
f"{data_conflicts} invalid sandboxtype values"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Postgres does not support dropping enum values. Create a new enum and swap them.
|
||||||
|
op.execute("CREATE TYPE sandboxtype_old AS ENUM ('E2B', 'LOCAL')")
|
||||||
|
op.execute('ALTER TABLE sandbox_configs ALTER COLUMN "type" TYPE sandboxtype_old USING "type"::text::sandboxtype_old')
|
||||||
|
op.execute("DROP TYPE sandboxtype")
|
||||||
|
op.execute("ALTER TYPE sandboxtype_old RENAME to sandboxtype")
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
"""Drop api tokens table in OSS
|
||||||
|
|
||||||
|
Revision ID: 4e88e702f85e
|
||||||
|
Revises: d05669b60ebe
|
||||||
|
Create Date: 2024-12-13 17:19:55.796210
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "4e88e702f85e"
|
||||||
|
down_revision: Union[str, None] = "d05669b60ebe"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index("tokens_idx_key", table_name="tokens")
|
||||||
|
op.drop_index("tokens_idx_user", table_name="tokens")
|
||||||
|
op.drop_table("tokens")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"tokens",
|
||||||
|
sa.Column("id", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("user_id", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("key", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="tokens_pkey"),
|
||||||
|
)
|
||||||
|
op.create_index("tokens_idx_user", "tokens", ["user_id"], unique=False)
|
||||||
|
op.create_index("tokens_idx_key", "tokens", ["key"], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
40
alembic/versions/51999513bcf1_steps_feedback_field.py
Normal file
40
alembic/versions/51999513bcf1_steps_feedback_field.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""steps feedback field
|
||||||
|
|
||||||
|
Revision ID: 51999513bcf1
|
||||||
|
Revises: 61ee53ec45a5
|
||||||
|
Create Date: 2025-06-20 14:09:22.993263
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "51999513bcf1"
|
||||||
|
down_revision: Union[str, None] = "c7ac45f69849"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("steps", sa.Column("feedback", sa.String(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("steps", "feedback")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,98 @@
|
|||||||
|
"""update identities unique constraint and properties
|
||||||
|
|
||||||
|
Revision ID: 549eff097c71
|
||||||
|
Revises: a3047a624130
|
||||||
|
Create Date: 2025-02-20 09:53:42.743105
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "549eff097c71"
|
||||||
|
down_revision: Union[str, None] = "a3047a624130"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Update unique constraint on identities table
|
||||||
|
op.drop_constraint("unique_identifier_pid_org_id", "identities", type_="unique")
|
||||||
|
op.create_unique_constraint(
|
||||||
|
"unique_identifier_without_project",
|
||||||
|
"identities",
|
||||||
|
["identifier_key", "project_id", "organization_id"],
|
||||||
|
postgresql_nulls_not_distinct=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add properties column to identities table
|
||||||
|
op.add_column("identities", sa.Column("properties", postgresql.JSONB, nullable=False, server_default="[]"))
|
||||||
|
|
||||||
|
# Create identities_agents table for many-to-many relationship
|
||||||
|
op.create_table(
|
||||||
|
"identities_agents",
|
||||||
|
sa.Column("identity_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["identity_id"], ["identities.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("identity_id", "agent_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Migrate existing relationships
|
||||||
|
# First, get existing relationships where identity_id is not null
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO identities_agents (identity_id, agent_id)
|
||||||
|
SELECT DISTINCT identity_id, id as agent_id
|
||||||
|
FROM agents
|
||||||
|
WHERE identity_id IS NOT NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove old identity_id column from agents
|
||||||
|
op.drop_column("agents", "identity_id")
|
||||||
|
op.drop_column("agents", "identifier_key")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
# Add back the old columns to agents
|
||||||
|
op.add_column("agents", sa.Column("identity_id", sa.String(), nullable=True))
|
||||||
|
op.add_column("agents", sa.Column("identifier_key", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
# Migrate relationships back
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE agents a
|
||||||
|
SET identity_id = ia.identity_id
|
||||||
|
FROM identities_agents ia
|
||||||
|
WHERE a.id = ia.agent_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Drop the many-to-many table
|
||||||
|
op.drop_table("identities_agents")
|
||||||
|
|
||||||
|
# Drop properties column
|
||||||
|
op.drop_column("identities", "properties")
|
||||||
|
|
||||||
|
# Restore old unique constraint
|
||||||
|
op.drop_constraint("unique_identifier_without_project", "identities", type_="unique")
|
||||||
|
op.create_unique_constraint("unique_identifier_pid_org_id", "identities", ["identifier_key", "project_id", "organization_id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,73 @@
|
|||||||
|
"""Add tags to passages and create passage_tags junction table
|
||||||
|
|
||||||
|
Revision ID: 54c76f7cabca
|
||||||
|
Revises: c41c87205254
|
||||||
|
Create Date: 2025-08-28 15:13:01.549590
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "54c76f7cabca"
|
||||||
|
down_revision: Union[str, None] = "c41c87205254"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
|
||||||
|
# Database-specific timestamp defaults
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
# SQLite uses CURRENT_TIMESTAMP
|
||||||
|
timestamp_default = sa.text("(CURRENT_TIMESTAMP)")
|
||||||
|
else:
|
||||||
|
# PostgreSQL uses now()
|
||||||
|
timestamp_default = sa.text("now()")
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"passage_tags",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("tag", sa.String(), nullable=False),
|
||||||
|
sa.Column("passage_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("archive_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=timestamp_default, nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=timestamp_default, nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["archive_id"], ["archives.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["passage_id"], ["archival_passages.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
sa.UniqueConstraint("passage_id", "tag", name="uq_passage_tag"),
|
||||||
|
)
|
||||||
|
op.create_index("ix_passage_tags_archive_id", "passage_tags", ["archive_id"], unique=False)
|
||||||
|
op.create_index("ix_passage_tags_archive_tag", "passage_tags", ["archive_id", "tag"], unique=False)
|
||||||
|
op.create_index("ix_passage_tags_org_archive", "passage_tags", ["organization_id", "archive_id"], unique=False)
|
||||||
|
op.create_index("ix_passage_tags_tag", "passage_tags", ["tag"], unique=False)
|
||||||
|
op.add_column("archival_passages", sa.Column("tags", sa.JSON(), nullable=True))
|
||||||
|
op.add_column("source_passages", sa.Column("tags", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("source_passages", "tags")
|
||||||
|
op.drop_column("archival_passages", "tags")
|
||||||
|
op.drop_index("ix_passage_tags_tag", table_name="passage_tags")
|
||||||
|
op.drop_index("ix_passage_tags_org_archive", table_name="passage_tags")
|
||||||
|
op.drop_index("ix_passage_tags_archive_tag", table_name="passage_tags")
|
||||||
|
op.drop_index("ix_passage_tags_archive_id", table_name="passage_tags")
|
||||||
|
op.drop_table("passage_tags")
|
||||||
|
# ### end Alembic commands ###
|
||||||
121
alembic/versions/54dec07619c4_divide_passage_table_into_.py
Normal file
121
alembic/versions/54dec07619c4_divide_passage_table_into_.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
"""divide passage table into SourcePassages and AgentPassages
|
||||||
|
|
||||||
|
Revision ID: 54dec07619c4
|
||||||
|
Revises: 4e88e702f85e
|
||||||
|
Create Date: 2024-12-14 17:23:08.772554
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.orm.custom_columns import EmbeddingConfigColumn
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "54dec07619c4"
|
||||||
|
down_revision: Union[str, None] = "4e88e702f85e"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
from pgvector.sqlalchemy import Vector
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"agent_passages",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.String(), nullable=False),
|
||||||
|
sa.Column("embedding_config", EmbeddingConfigColumn(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("embedding", Vector(dim=4096), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("agent_passages_org_idx", "agent_passages", ["organization_id"], unique=False)
|
||||||
|
op.create_table(
|
||||||
|
"source_passages",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("text", sa.String(), nullable=False),
|
||||||
|
sa.Column("embedding_config", EmbeddingConfigColumn(), nullable=False),
|
||||||
|
sa.Column("metadata_", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("embedding", Vector(dim=4096), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("file_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("source_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["organization_id"],
|
||||||
|
["organizations.id"],
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="CASCADE"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("source_passages_org_idx", "source_passages", ["organization_id"], unique=False)
|
||||||
|
op.drop_table("passages")
|
||||||
|
op.drop_constraint("files_source_id_fkey", "files", type_="foreignkey")
|
||||||
|
op.create_foreign_key(None, "files", "sources", ["source_id"], ["id"], ondelete="CASCADE")
|
||||||
|
op.drop_constraint("messages_agent_id_fkey", "messages", type_="foreignkey")
|
||||||
|
op.create_foreign_key(None, "messages", "agents", ["agent_id"], ["id"], ondelete="CASCADE")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, "messages", type_="foreignkey")
|
||||||
|
op.create_foreign_key("messages_agent_id_fkey", "messages", "agents", ["agent_id"], ["id"])
|
||||||
|
op.drop_constraint(None, "files", type_="foreignkey")
|
||||||
|
op.create_foreign_key("files_source_id_fkey", "files", "sources", ["source_id"], ["id"])
|
||||||
|
op.create_table(
|
||||||
|
"passages",
|
||||||
|
sa.Column("id", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("text", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("file_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("agent_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("source_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("embedding", Vector(dim=4096), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("embedding_config", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("metadata_", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("created_at", postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("updated_at", postgresql.TIMESTAMP(timezone=True), server_default=sa.text("now()"), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.BOOLEAN(), server_default=sa.text("false"), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("organization_id", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], name="passages_agent_id_fkey"),
|
||||||
|
sa.ForeignKeyConstraint(["file_id"], ["files.id"], name="passages_file_id_fkey", ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"], name="passages_organization_id_fkey"),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="passages_pkey"),
|
||||||
|
)
|
||||||
|
op.drop_index("source_passages_org_idx", table_name="source_passages")
|
||||||
|
op.drop_table("source_passages")
|
||||||
|
op.drop_index("agent_passages_org_idx", table_name="agent_passages")
|
||||||
|
op.drop_table("agent_passages")
|
||||||
|
# ### end Alembic commands ###
|
||||||
40
alembic/versions/54f2311edb62_add_args_schema_to_tools.py
Normal file
40
alembic/versions/54f2311edb62_add_args_schema_to_tools.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""add args schema to tools
|
||||||
|
|
||||||
|
Revision ID: 54f2311edb62
|
||||||
|
Revises: b183663c6769
|
||||||
|
Create Date: 2025-02-27 16:45:50.835081
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "54f2311edb62"
|
||||||
|
down_revision: Union[str, None] = "b183663c6769"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("tools", sa.Column("args_json_schema", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("tools", "args_json_schema")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
"""add_custom_headers_to_mcp_server
|
||||||
|
|
||||||
|
Revision ID: 56254216524f
|
||||||
|
Revises: 60ed28ee7138
|
||||||
|
Create Date: 2025-07-02 14:08:59.163861
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "56254216524f"
|
||||||
|
down_revision: Union[str, None] = "60ed28ee7138"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("mcp_server", sa.Column("custom_headers", sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("mcp_server", "custom_headers")
|
||||||
|
# ### end Alembic commands ###
|
||||||
43
alembic/versions/5987401b40ae_refactor_agent_memory.py
Normal file
43
alembic/versions/5987401b40ae_refactor_agent_memory.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
"""Refactor agent memory
|
||||||
|
|
||||||
|
Revision ID: 5987401b40ae
|
||||||
|
Revises: 1c8880d671ee
|
||||||
|
Create Date: 2024-11-25 14:35:00.896507
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5987401b40ae"
|
||||||
|
down_revision: Union[str, None] = "1c8880d671ee"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column("agents", "tools", new_column_name="tool_names")
|
||||||
|
op.drop_column("agents", "memory")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column("agents", "tool_names", new_column_name="tools")
|
||||||
|
op.add_column("agents", sa.Column("memory", postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
"""add_hidden_property_to_groups_and_blocks
|
||||||
|
|
||||||
|
Revision ID: 5b804970e6a0
|
||||||
|
Revises: ddb69be34a72
|
||||||
|
Create Date: 2025-09-03 22:19:03.825077
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5b804970e6a0"
|
||||||
|
down_revision: Union[str, None] = "ddb69be34a72"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Add hidden column to groups table
|
||||||
|
op.add_column("groups", sa.Column("hidden", sa.Boolean(), nullable=True))
|
||||||
|
|
||||||
|
# Add hidden column to block table
|
||||||
|
op.add_column("block", sa.Column("hidden", sa.Boolean(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Remove hidden column from block table
|
||||||
|
op.drop_column("block", "hidden")
|
||||||
|
|
||||||
|
# Remove hidden column from groups table
|
||||||
|
op.drop_column("groups", "hidden")
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
"""add organization id to jobs model
|
||||||
|
|
||||||
|
Revision ID: 5d27a719b24d
|
||||||
|
Revises: 18ff61fbc034
|
||||||
|
Create Date: 2025-09-10 23:01:45.214589
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5d27a719b24d"
|
||||||
|
down_revision: Union[str, None] = "18ff61fbc034"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table("jobs", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("organization_id", sa.String(), nullable=True))
|
||||||
|
batch_op.create_foreign_key("fk_jobs_organization_id", "organizations", ["organization_id"], ["id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table("jobs", schema=None) as batch_op:
|
||||||
|
batch_op.drop_constraint("fk_jobs_organization_id", type_="foreignkey")
|
||||||
|
batch_op.drop_column("organization_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
55
alembic/versions/5fb8bba2c373_add_step_metrics.py
Normal file
55
alembic/versions/5fb8bba2c373_add_step_metrics.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"""add_step_metrics
|
||||||
|
|
||||||
|
Revision ID: 5fb8bba2c373
|
||||||
|
Revises: f7f757414d20
|
||||||
|
Create Date: 2025-08-07 17:40:11.923402
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5fb8bba2c373"
|
||||||
|
down_revision: Union[str, None] = "f7f757414d20"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"step_metrics",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("organization_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("provider_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("llm_request_ns", sa.BigInteger(), nullable=True),
|
||||||
|
sa.Column("tool_execution_ns", sa.BigInteger(), nullable=True),
|
||||||
|
sa.Column("step_ns", sa.BigInteger(), nullable=True),
|
||||||
|
sa.Column("base_template_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("template_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
|
||||||
|
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
|
||||||
|
sa.Column("_created_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("project_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("agent_id", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(["agent_id"], ["agents.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["id"], ["steps.id"], ondelete="CASCADE"),
|
||||||
|
sa.ForeignKeyConstraint(["job_id"], ["jobs.id"], ondelete="SET NULL"),
|
||||||
|
sa.ForeignKeyConstraint(["organization_id"], ["organizations.id"], ondelete="RESTRICT"),
|
||||||
|
sa.ForeignKeyConstraint(["provider_id"], ["providers.id"], ondelete="RESTRICT"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("step_metrics")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
"""add project id to step model
|
||||||
|
|
||||||
|
Revision ID: 60ed28ee7138
|
||||||
|
Revises: 46699adc71a7
|
||||||
|
Create Date: 2025-07-01 13:12:44.485233
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "60ed28ee7138"
|
||||||
|
down_revision: Union[str, None] = "46699adc71a7"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("steps", sa.Column("project_id", sa.String(), nullable=True))
|
||||||
|
op.execute(
|
||||||
|
"""
|
||||||
|
UPDATE steps
|
||||||
|
SET project_id = agents.project_id
|
||||||
|
FROM agents
|
||||||
|
WHERE steps.agent_id = agents.id
|
||||||
|
AND steps.agent_id IS NOT NULL
|
||||||
|
AND agents.project_id IS NOT NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("steps", "project_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
"""Add unique constraint to file_id and agent_id on file_agent
|
||||||
|
|
||||||
|
Revision ID: 614c4e53b66e
|
||||||
|
Revises: 0b496eae90de
|
||||||
|
Create Date: 2025-06-02 17:03:58.879839
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from letta.settings import settings
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "614c4e53b66e"
|
||||||
|
down_revision: Union[str, None] = "0b496eae90de"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_unique_constraint("uq_files_agents_file_agent", "files_agents", ["file_id", "agent_id"])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# Skip this migration for SQLite
|
||||||
|
if not settings.letta_pg_uri_no_default:
|
||||||
|
return
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("uq_files_agents_file_agent", "files_agents", type_="unique")
|
||||||
|
# ### end Alembic commands ###
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user