chore: bump letta python sdk version to 0.1.23 (#817)

This commit is contained in:
cthomas
2025-01-28 16:24:29 -08:00
committed by GitHub
parent 93dd40e414
commit 0bd605dfa7
3 changed files with 505 additions and 346 deletions

841
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,7 @@ llama-index = "^0.12.2"
llama-index-embeddings-openai = "^0.3.1"
e2b-code-interpreter = {version = "^1.0.3", optional = true}
anthropic = "^0.43.0"
letta_client = "^0.1.16"
letta_client = "^0.1.23"
openai = "^1.60.0"

View File

@@ -10,7 +10,7 @@ from letta_client import CreateBlock
from letta_client import Letta as LettaSDKClient
from letta_client import MessageCreate
from letta_client.core import ApiError
from letta_client.types import AgentState, LettaRequestConfig, ToolReturnMessage
from letta_client.types import AgentState, ToolReturnMessage
# Constants
SERVER_PORT = 8283
@@ -393,7 +393,7 @@ def test_function_return_limit(client: LettaSDKClient, agent: AgentState):
content="call the big_return function",
),
],
config=LettaRequestConfig(use_assistant_message=False),
use_assistant_message=False,
)
response_message = None
@@ -429,7 +429,7 @@ def test_function_always_error(client: LettaSDKClient, agent: AgentState):
content="call the always_error function",
),
],
config=LettaRequestConfig(use_assistant_message=False),
use_assistant_message=False,
)
response_message = None
@@ -495,7 +495,7 @@ def test_send_message_async(client: LettaSDKClient, agent: AgentState):
content=test_message,
),
],
config=LettaRequestConfig(use_assistant_message=False),
use_assistant_message=False,
)
assert run.id is not None
assert run.status == "created"