chore: migrate examples to use latest sdk ver (#690)

This commit is contained in:
cthomas
2025-01-16 17:06:57 -08:00
committed by GitHub
parent 0ddfc1a6d4
commit 8bd695f64b
11 changed files with 575 additions and 381 deletions

View File

@@ -1,29 +1,36 @@
from letta import ChatMemory, EmbeddingConfig, LLMConfig, create_client
from letta_client import CreateBlock, Letta, MessageCreate
from letta.prompts import gpt_system
client = create_client()
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
client = Letta(base_url="http://localhost:8283")
# create a new agent
agent_state = client.create_agent(
agent_state = client.agents.create(
# agent's name (unique per-user, autogenerated if not provided)
name="agent_name",
# in-context memory representation with human/persona blocks
memory=ChatMemory(human="Name: Sarah", persona="You are a helpful assistant that loves emojis"),
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
CreateBlock(
label="persona",
value="You are a helpful assistant that loves emojis",
),
],
# LLM model & endpoint configuration
llm_config=LLMConfig(
model="gpt-4",
model_endpoint_type="openai",
model_endpoint="https://api.openai.com/v1",
context_window=8000, # set to <= max context window
),
llm="openai/gpt-4",
context_window_limit=8000,
# embedding model & endpoint configuration (cannot be changed)
embedding_config=EmbeddingConfig(
embedding_endpoint_type="openai",
embedding_endpoint="https://api.openai.com/v1",
embedding_model="text-embedding-ada-002",
embedding_dim=1536,
embedding_chunk_size=300,
),
embedding="openai/text-embedding-ada-002",
# system instructions for the agent (defaults to `memgpt_chat`)
system=gpt_system.get_system_text("memgpt_chat"),
# whether to include base letta tools (default: True)
@@ -34,14 +41,30 @@ agent_state = client.create_agent(
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# message an agent as a user
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="hello",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# message a system message (non-user)
response = client.send_message(agent_id=agent_state.id, role="system", message="[system] user has logged in. send a friendly message.")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="system",
text="[system] user has logged in. send a friendly message.",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# delete the agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)

View File

@@ -1,29 +1,49 @@
from letta import EmbeddingConfig, LLMConfig, create_client
from letta_client import CreateBlock, Letta, MessageCreate
client = create_client()
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(LLMConfig.default_config(model_name="gpt-4"))
client.set_default_embedding_config(EmbeddingConfig.default_config(model_name="text-embedding-ada-002"))
client = Letta(base_url="http://localhost:8283")
# create a new agent
agent_state = client.create_agent()
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
],
# set automatic defaults for LLM/embedding config
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
)
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="hello",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# list all agents
agents = client.list_agents()
agents = client.agents.list()
# get the agent by ID
agent_state = client.get_agent(agent_id=agent_state.id)
agent_state = client.agents.get(agent_id=agent_state.id)
# get the agent by name
agent_id = client.get_agent_id(agent_name=agent_state.name)
agent_state = client.get_agent(agent_id=agent_id)
agent_state = client.agents.list(name=agent_state.name)[0]
# delete an agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)

View File

@@ -1,5 +1,4 @@
from letta import create_client
from letta.schemas.memory import ChatMemory
from letta_client import CreateBlock, Letta, MessageCreate
"""
Make sure you run the Letta server before running this example.
@@ -11,30 +10,47 @@ letta server
def main():
# Connect to the server as a user
client = create_client(base_url="http://localhost:8283")
client = Letta(base_url="http://localhost:8283")
# list available configs on the server
llm_configs = client.list_llm_configs()
llm_configs = client.models.list_llms()
print(f"Available LLM configs: {llm_configs}")
embedding_configs = client.list_embedding_configs()
embedding_configs = client.models.list_embedding_models()
print(f"Available embedding configs: {embedding_configs}")
# Create an agent
agent_state = client.create_agent(
agent_state = client.agents.create(
name="my_agent",
memory=ChatMemory(human="My name is Sarah.", persona="I am a friendly AI."),
embedding_config=embedding_configs[0],
llm_config=llm_configs[0],
memory_blocks=[
CreateBlock(
label="human",
value="My name is Sarah",
),
CreateBlock(
label="persona",
value="I am a friendly AI",
),
],
llm=llm_configs[0].handle,
embedding=embedding_configs[0].handle,
)
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
# Send a message to the agent
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
response = client.user_message(agent_id=agent_state.id, message="Whats my name?")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="Whats my name?",
)
],
)
print(f"Received response:", response.messages)
# Delete agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")

View File

@@ -1,11 +1,14 @@
from letta import EmbeddingConfig, LLMConfig, create_client
from letta.schemas.tool_rule import TerminalToolRule
from letta_client import CreateBlock, Letta, MessageCreate
from letta_client.types import TerminalToolRule
client = create_client()
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(LLMConfig.default_config(model_name="gpt-4"))
client.set_default_embedding_config(EmbeddingConfig.default_config(model_name="text-embedding-ada-002"))
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
client = Letta(base_url="http://localhost:8283")
# define a function with a docstring
def roll_d20() -> str:
@@ -30,43 +33,78 @@ def roll_d20() -> str:
# create a tool from the function
tool = client.create_or_update_tool(roll_d20)
tool = client.tools.upsert_from_function(func=roll_d20, name="roll_d20")
print(f"Created tool with name {tool.name}")
# create a new agent
agent_state = client.create_agent(
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
],
# set automatic defaults for LLM/embedding config
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
# create the agent with an additional tool
tool_ids=[tool.id],
# add tool rules that terminate execution after specific tools
tool_rules=[
# exit after roll_d20 is called
TerminalToolRule(tool_name=tool.name),
# exit after send_message is called (default behavior)
TerminalToolRule(tool_name="send_message"),
],
]
)
print(f"Created agent with name {agent_state.name} with tools {[t.name for t in agent_state.tools]}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="roll a dice")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="roll a dice",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# remove a tool from the agent
client.remove_tool_from_agent(agent_id=agent_state.id, tool_id=tool.id)
client.agents.tools.remove(agent_id=agent_state.id, tool_id=tool.id)
# add a tool to the agent
client.add_tool_to_agent(agent_id=agent_state.id, tool_id=tool.id)
client.agents.tools.add(agent_id=agent_state.id, tool_id=tool.id)
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)
# create an agent with only a subset of default tools
send_message_tool = client.get_tool_id("send_message")
agent_state = client.create_agent(include_base_tools=False, tool_ids=[tool.id, send_message_tool])
send_message_tool = client.tools.get_by_name(tool_name="send_message")
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="username: sarah",
),
],
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
include_base_tools=False,
tool_ids=[tool.id, send_message_tool],
)
# message the agent to search archival memory (will be unable to do so)
response = client.send_message(agent_id=agent_state.id, role="user", message="search your archival memory")
client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="search your archival memory",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)

View File

@@ -4,9 +4,17 @@
"cell_type": "markdown",
"id": "ded02088-c568-4c38-b1a8-023eda8bb484",
"metadata": {},
"source": []
},
{
"cell_type": "markdown",
"id": "096e18da",
"metadata": {},
"source": [
"# Agentic RAG with Letta\n",
"\n",
"> Make sure you run the Letta server before running this example using `letta server`\n",
"\n",
"In this lab, we'll go over how to implement agentic RAG in Letta, that is, agents which can connect to external data sources. \n",
"\n",
"In Letta, there are two ways to do this: \n",
@@ -23,22 +31,9 @@
"metadata": {},
"outputs": [],
"source": [
"from letta import create_client \n",
"from letta_client import CreateBlock, Letta, MessageCreate\n",
"\n",
"client = create_client()"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "2458e3fc-234d-4c69-ac9a-55dc9d3c1396",
"metadata": {},
"outputs": [],
"source": [
"from letta import LLMConfig, EmbeddingConfig\n",
"\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(\"text-embedding-ada-002\")) "
"client = Letta(base_url=\"http://localhost:8283\")"
]
},
{
@@ -67,7 +62,7 @@
}
],
"source": [
"source = client.create_source(\"employee_handbook\")\n",
"source = client.sources.create(name=\"employee_handbook\")\n",
"source"
]
},
@@ -78,9 +73,9 @@
"metadata": {},
"outputs": [],
"source": [
"job = client.load_file_to_source(\n",
" filename=\"data/handbook.pdf\", \n",
" source_id=source.id\n",
"job = client.sources.files.upload(\n",
" source_id=source.id,\n",
" file=\"data/handbook.pdf\"\n",
")"
]
},
@@ -106,7 +101,7 @@
}
],
"source": [
"client.get_job(job.id).metadata_"
"client.jobs.get(job_id=job.id).metadata_"
]
},
{
@@ -116,7 +111,16 @@
"metadata": {},
"outputs": [],
"source": [
"agent_state = client.create_agent()"
"agent_state = client.agents.create(\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"Name: Sarah\",\n",
" ),\n",
" ],\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")"
]
},
{
@@ -134,9 +138,10 @@
}
],
"source": [
"client.attach_source_to_agent(\n",
" agent_id=agent_state.id, \n",
" source_id=source.id\n",
"client.sources.attach(\n",
" source_id=source.id,\n",
" agent_id=agent_state.id\n",
" \n",
")"
]
},
@@ -236,11 +241,15 @@
}
],
"source": [
"response = client.send_message(\n",
" agent_id=agent_state.id, \n",
" message = \"Search archival for our company's vacation policies\", \n",
" role = \"user\"\n",
") \n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"Search archival for our company's vacation policies\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -288,7 +297,17 @@
}
],
"source": [
"normal_agent = client.create_agent()\n",
"normal_agent = client.agents.create(\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"Name: Sarah\",\n",
" ),\n",
" ],\n",
" # set automatic defaults for LLM/embedding config\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")\n",
"normal_agent.tools"
]
},
@@ -318,7 +337,16 @@
}
],
"source": [
"no_tool_agent = client.create_agent(\n",
"no_tool_agent = client.agents.create(\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"Name: Sarah\",\n",
" ),\n",
" ],\n",
" # set automatic defaults for LLM/embedding config\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
" tools=['send_message'], \n",
" include_base_tools=False\n",
")\n",
@@ -370,7 +398,7 @@
"metadata": {},
"outputs": [],
"source": [
"birthday_tool = client.create_or_update_tool(query_birthday_db)"
"birthday_tool = client.tools.upsert_from_function(func=query_birthday_db, name=\"query_birthday_db\")"
]
},
{
@@ -380,20 +408,22 @@
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.memory import ChatMemory\n",
"\n",
"# delete agent if exists \n",
"if client.get_agent_id(\"birthday_agent\"): \n",
" client.delete_agent(client.get_agent_id(\"birthday_agent\"))\n",
"\n",
"agent_state = client.create_agent(\n",
" name=\"birthday_agent\", \n",
" tools=[birthday_tool.name], \n",
" memory=ChatMemory(\n",
" human=\"My name is Sarah\", \n",
" persona=\"You are a agent with access to a birthday_db \" \\\n",
" + \"that you use to lookup information about users' birthdays.\"\n",
" )\n",
" tool_ids=[birthday_tool.id],\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"My name is Sarah\",\n",
" ),\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=\"You are a agent with access to a birthday_db \" \\\n",
" + \"that you use to lookup information about users' birthdays.\"\n",
" ),\n",
" ],\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\"\n",
")"
]
},
@@ -493,11 +523,15 @@
}
],
"source": [
"response = client.send_message(\n",
" agent_id=agent_state.id, \n",
" message = \"When is my birthday?\", \n",
" role = \"user\"\n",
") \n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"When is my birthday?\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -527,7 +561,7 @@
"metadata": {},
"outputs": [
{
"name": "stdin",
"name": "stdout",
"output_type": "stream",
"text": [
"Tavily API key:\n",
@@ -573,7 +607,6 @@
],
"source": [
"from langchain_community.tools import TavilySearchResults\n",
"from letta.schemas.tool import Tool\n",
"\n",
"search = TavilySearchResults()\n",
"search.run(\"What's Obama's first name?\") "
@@ -595,8 +628,8 @@
}
],
"source": [
"# convert the tool to MemGPT Tool \n",
"search_tool = client.load_langchain_tool(\n",
"# new SDK does not have support for converting langchain tool to MemGPT Tool \n",
"search_tool = client.tools.add_langchain_tool( \n",
" TavilySearchResults(), \n",
" additional_imports_module_attr_map={\"langchain_community.tools\": \"TavilySearchResults\", \"langchain_community.tools\": 'TavilySearchAPIWrapper'}\n",
")"
@@ -630,8 +663,6 @@
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.memory import ChatMemory\n",
"\n",
"perplexity_agent_persona = f\"\"\"\n",
"You have access to a web via a {search_tool.name} tool. \n",
"Use this tool to respond to users' questions, by summarizing the {search_tool.name} \n",
@@ -649,17 +680,19 @@
"\n",
"\"\"\"\n",
"\n",
"# delete agent if exists \n",
"if client.get_agent_id(\"search_agent\"): \n",
" client.delete_agent(client.get_agent_id(\"search_agent\"))\n",
"\n",
"agent_state = client.create_agent(\n",
" name=\"search_agent\", \n",
" tools=[search_tool.name], \n",
" memory=ChatMemory(\n",
" human=\"My name is Sarah\", \n",
" persona=perplexity_agent_persona\n",
" )\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"My name is Sarah\",\n",
" ),\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=perplexity_agent_persona,\n",
" ),\n",
" ],\n",
" tool_ids=[search_tool.id], \n",
")"
]
},
@@ -776,11 +809,15 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message = \"Who founded OpenAI? \", \n",
" role = \"user\"\n",
") \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"Who founded OpenAI?\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -803,13 +840,19 @@
"\n",
"\n",
"agent_state = client.create_agent(\n",
" name=\"gpt4_search_agent\", \n",
" tools=[search_tool.name], \n",
" memory=ChatMemory(\n",
" human=\"My name is Sarah\", \n",
" persona=perplexity_agent_persona\n",
" ),\n",
" llm_config=LLMConfig.default_config('gpt-4')\n",
" name=\"search_agent\", \n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"My name is Sarah\",\n",
" ),\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=perplexity_agent_persona,\n",
" ),\n",
" ],\n",
" tool_ids=[search_tool.id], \n",
" llm=\"openai/gpt-4\",\n",
")"
]
},
@@ -904,11 +947,15 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message = \"Who founded OpenAI? \", \n",
" role = \"user\"\n",
") \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"Who founded OpenAI?\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},

View File

@@ -6,6 +6,9 @@
"metadata": {},
"source": [
"# Customizing Memory Management \n",
"\n",
"> Make sure you run the Letta server before running this example using `letta server`\n",
"\n",
"This tutorial goes over how to implement a custom memory class in Letta, which allows you to customize how memory is organized (via `Block` objects) and also how memory is maintained (through memory editing tools). \n"
]
},
@@ -24,22 +27,9 @@
"metadata": {},
"outputs": [],
"source": [
"from letta import create_client \n",
"from letta_client import CreateBlock, Letta, MessageCreate\n",
"\n",
"client = create_client() "
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "9a28e38a-7dbe-4530-8260-202322a8458e",
"metadata": {},
"outputs": [],
"source": [
"from letta import LLMConfig, EmbeddingConfig\n",
"\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(\"text-embedding-ada-002\")) "
"client = Letta(base_url=\"http://localhost:8283\")"
]
},
{
@@ -66,7 +56,7 @@
"metadata": {},
"outputs": [],
"source": [
"from letta import ChatMemory "
"from letta_client import ChatMemory "
]
},
{
@@ -76,9 +66,13 @@
"metadata": {},
"outputs": [],
"source": [
"chat_memory = ChatMemory(\n",
" human=\"Name: Bob\", \n",
" persona=\"You are a helpful assistant\"\n",
"human_memory_block = client.blocks.create(\n",
" label=\"human\",\n",
" value=\"Name: Bob\",\n",
")\n",
"persona_memory_block = client.blocks.create(\n",
" label=\"persona\",\n",
" value=\"You are a helpful assistant\",\n",
")"
]
},
@@ -110,7 +104,7 @@
}
],
"source": [
"chat_memory.get_blocks()"
"client.blocks.list()"
]
},
{
@@ -131,7 +125,7 @@
}
],
"source": [
"chat_memory.get_block(\"human\")"
"client.blocks.list(label=\"human\")"
]
},
{
@@ -150,7 +144,8 @@
"metadata": {},
"outputs": [],
"source": [
"import inspect"
"import inspect\n",
"from letta.functions.function_sets.base import core_memory_append"
]
},
{
@@ -183,7 +178,7 @@
}
],
"source": [
"print(inspect.getsource(chat_memory.core_memory_append))"
"print(inspect.getsource(core_memory_append))"
]
},
{

View File

@@ -6,6 +6,7 @@
"metadata": {},
"source": [
"# Introduction to Letta\n",
"> Make sure you run the Letta server before running this example using `letta server`\n",
"This lab will go over: \n",
"1. Creating an agent with Letta\n",
"2. Understand Letta agent state (messages, memories, tools)\n",
@@ -28,22 +29,9 @@
"metadata": {},
"outputs": [],
"source": [
"from letta import create_client \n",
"from letta_client import CreateBlock, Letta, MessageCreate\n",
"\n",
"client = create_client() "
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "9a28e38a-7dbe-4530-8260-202322a8458e",
"metadata": {},
"outputs": [],
"source": [
"from letta import LLMConfig, EmbeddingConfig\n",
"\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(\"text-embedding-ada-002\")) "
"client = Letta(base_url=\"http://localhost:8283\")"
]
},
{
@@ -80,18 +68,18 @@
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.memory import ChatMemory\n",
"\n",
"# delete agent if exists (duplicate names not allowed)\n",
"if client.get_agent_id(agent_name): \n",
" client.delete_agent(client.get_agent_id(agent_name))\n",
"\n",
"agent_state = client.create_agent(\n",
" name=agent_name, \n",
" memory=ChatMemory(\n",
" human=\"My name is Sarah\", \n",
" persona=\"You are a helpful assistant that loves emojis\"\n",
" )\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"human\",\n",
" value=\"My name is Sarah\",\n",
" ),\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=\"You are a helpful assistant that loves emojis\",\n",
" ),\n",
" ],\n",
")"
]
},
@@ -176,10 +164,14 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message=\"hello!\", \n",
" role=\"user\" \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"hello!\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
@@ -323,7 +315,7 @@
"metadata": {},
"outputs": [],
"source": [
"memory = client.get_core_memory(agent_state.id)"
"memory = client.agents.core_memory.get_blocks(agent_id=agent_state.id)"
]
},
{
@@ -365,7 +357,7 @@
}
],
"source": [
"client.get_archival_memory_summary(agent_state.id)"
"client.agents.archival_memory.get_summary(agent_id=agent_state.id)"
]
},
{
@@ -386,7 +378,7 @@
}
],
"source": [
"client.get_recall_memory_summary(agent_state.id)"
"client.agents.recall_memory.get_summary(agent_id=agent_state.id)"
]
},
{
@@ -415,7 +407,7 @@
}
],
"source": [
"client.get_messages(agent_state.id)"
"client.agents.messages.list(agent_id=agent_state.id)"
]
},
{
@@ -532,11 +524,15 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message = \"My name is actually Bob\", \n",
" role = \"user\"\n",
") \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"My name is actually Bob\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -558,7 +554,7 @@
}
],
"source": [
"client.get_core_memory(agent_state.id)"
"client.agents.core_memory.get_blocks(agent_id=agent_state.id)"
]
},
{
@@ -681,11 +677,15 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message = \"In the future, never use emojis to communicate\", \n",
" role = \"user\"\n",
") \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"In the future, never use emojis to communicate\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -707,7 +707,7 @@
}
],
"source": [
"client.get_core_memory(agent_state.id).get_block('persona')"
"client.agents.core_memory.get_block(agent_id=agent_state.id, block_label='persona')"
]
},
{
@@ -737,7 +737,7 @@
}
],
"source": [
"client.get_archival_memory(agent_state.id)"
"client.agents.archival_memory.list(agent_id=agent_state.id)"
]
},
{
@@ -758,7 +758,7 @@
}
],
"source": [
"client.get_archival_memory_summary(agent_state.id)"
"client.agents.archival_memory.get_summary(agent_id=agent_state.id)"
]
},
{
@@ -865,11 +865,15 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" message = \"Save the information that 'bob loves cats' to archival\", \n",
" role = \"user\"\n",
") \n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"Save the information that 'bob loves cats' to archival\",\n",
" )\n",
" ],\n",
")\n",
"response"
]
},
@@ -891,7 +895,7 @@
}
],
"source": [
"client.get_archival_memory(agent_state.id)[0].text"
"client.agents.archival_memory.list(agent_id=agent_state.id)[0].text"
]
},
{
@@ -920,9 +924,9 @@
}
],
"source": [
"client.insert_archival_memory(\n",
" agent_state.id, \n",
" \"Bob's loves boston terriers\"\n",
"client.agents.archival_memory.create(\n",
" agent_id=agent_state.id,\n",
" text=\"Bob's loves boston terriers\",\n",
")"
]
},
@@ -1030,10 +1034,14 @@
}
],
"source": [
"response = client.send_message(\n",
"response = client.agents.messages.send(\n",
" agent_id=agent_state.id, \n",
" role=\"user\", \n",
" message=\"What animals do I like? Search archival.\"\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"What animals do I like? Search archival.\",\n",
" )\n",
" ],\n",
")\n",
"response"
]

View File

@@ -6,6 +6,8 @@
"metadata": {},
"source": [
"# Multi-agent recruiting workflow \n",
"> Make sure you run the Letta server before running this example using `letta server`\n",
"\n",
"Last tested with letta version `0.5.3`"
]
},
@@ -24,22 +26,9 @@
"metadata": {},
"outputs": [],
"source": [
"from letta import create_client \n",
"from letta_client import CreateBlock, Letta, MessageCreate\n",
"\n",
"client = create_client() "
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "e9849ebf-1065-4ce1-9676-16fdd82bdd17",
"metadata": {},
"outputs": [],
"source": [
"from letta import LLMConfig, EmbeddingConfig\n",
"\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(\"text-embedding-ada-002\")) "
"client = Letta(base_url=\"http://localhost:8283\")"
]
},
{
@@ -58,13 +47,14 @@
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.block import Block \n",
"\n",
"org_description = \"The company is called AgentOS \" \\\n",
"+ \"and is building AI tools to make it easier to create \" \\\n",
"+ \"and deploy LLM agents.\"\n",
"\n",
"org_block = Block(label=\"company\", value=org_description )"
"org_block = client.blocks.create(\n",
" label=\"company\",\n",
" value=org_description,\n",
")"
]
},
{
@@ -88,23 +78,6 @@
"org_block"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "3e3ce7a4-cf4d-4d74-8d09-b4a35b8bb439",
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.memory import BasicBlockMemory\n",
"\n",
"class OrgMemory(BasicBlockMemory): \n",
"\n",
" def __init__(self, persona: str, org_block: Block): \n",
" persona_block = Block(label=\"persona\", value=persona)\n",
" super().__init__(blocks=[persona_block, org_block])\n",
" "
]
},
{
"cell_type": "markdown",
"id": "8448df7b-c321-4d90-ba52-003930a513cb",
@@ -181,8 +154,8 @@
"\n",
"# TODO: add an archival andidate tool (provide justification) \n",
"\n",
"read_resume_tool = client.create_or_update_tool(read_resume) \n",
"submit_evaluation_tool = client.create_or_update_tool(submit_evaluation)"
"read_resume_tool = client.tools.upsert_from_function(name=\"read_resume\", func=read_resume) \n",
"submit_evaluation_tool = client.tools.upsert_from_function(name=\"submit_evaluation\", func=submit_evaluation)"
]
},
{
@@ -199,17 +172,18 @@
"+ f\"Ideal canddiates have skills: {skills}. \" \\\n",
"+ \"Submit your candidate evaluation with the submit_evaluation tool. \"\n",
"\n",
"# delete agent if exists \n",
"if client.get_agent_id(\"eval_agent\"): \n",
" client.delete_agent(client.get_agent_id(\"eval_agent\"))\n",
"\n",
"eval_agent = client.create_agent(\n",
"eval_agent = client.agents.create(\n",
" name=\"eval_agent\", \n",
" memory=OrgMemory(\n",
" persona=eval_persona, \n",
" org_block=org_block,\n",
" ), \n",
" tools=[read_resume_tool.name, submit_evaluation_tool.name]\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=eval_persona,\n",
" ),\n",
" ],\n",
" block_ids=[org_block.id],\n",
" tool_ids=[read_resume_tool.id, submit_evaluation_tool.id]\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")\n"
]
},
@@ -239,7 +213,7 @@
" print(\"Pretend to email:\", content)\n",
" return\n",
"\n",
"email_candidate_tool = client.create_or_update_tool(email_candidate)"
"email_candidate_tool = client.tools.upsert_from_function(name=\"email_candidate\", func=email_candidate)"
]
},
{
@@ -266,19 +240,19 @@
"<your name> \n",
"<company name> \n",
"\"\"\"\n",
"\n",
"\n",
"# delete agent if exists \n",
"if client.get_agent_id(\"outreach_agent\"): \n",
" client.delete_agent(client.get_agent_id(\"outreach_agent\"))\n",
" \n",
"outreach_agent = client.create_agent(\n",
"outreach_agent = client.agents.create(\n",
" name=\"outreach_agent\", \n",
" memory=OrgMemory(\n",
" persona=outreach_persona, \n",
" org_block=org_block\n",
" ), \n",
" tools=[email_candidate_tool.name]\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=outreach_persona,\n",
" ),\n",
" ],\n",
" block_ids=[org_block.id],\n",
" tool_ids=[email_candidate_tool.id]\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")"
]
},
@@ -297,10 +271,14 @@
"metadata": {},
"outputs": [],
"source": [
"response = client.send_message(\n",
" agent_name=\"eval_agent\", \n",
" role=\"user\", \n",
" message=\"Candidate: Tony Stark\"\n",
"response = client.agents.messages.send(\n",
" agent_id=eval_agent.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=\"Candidate: Tony Stark\",\n",
" )\n",
" ],\n",
")"
]
},
@@ -420,10 +398,14 @@
"outputs": [],
"source": [
"feedback = \"Our company pivoted to foundation model training\"\n",
"response = client.send_message(\n",
" agent_name=\"eval_agent\", \n",
" role=\"user\", \n",
" message=feedback\n",
"response = client.agents.messages.send(\n",
" agent_id=eval_agent.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=feedback,\n",
" )\n",
" ],\n",
")"
]
},
@@ -436,10 +418,14 @@
"source": [
"\n",
"feedback = \"The company is also renamed to FoundationAI\"\n",
"response = client.send_message(\n",
" agent_name=\"eval_agent\", \n",
" role=\"user\", \n",
" message=feedback\n",
"response = client.agents.messages.send(\n",
" agent_id=eval_agent.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"user\",\n",
" text=feedback,\n",
" )\n",
" ],\n",
")"
]
},
@@ -549,10 +535,14 @@
"metadata": {},
"outputs": [],
"source": [
"response = client.send_message(\n",
" agent_name=\"eval_agent\", \n",
" role=\"system\", \n",
" message=\"Candidate: Spongebob Squarepants\"\n",
"response = client.agents.messages.send(\n",
" agent_id=eval_agent.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"system\",\n",
" text=\"Candidate: Spongebob Squarepants\",\n",
" )\n",
" ],\n",
")"
]
},
@@ -574,7 +564,7 @@
}
],
"source": [
"client.get_core_memory(eval_agent.id).get_block(\"company\")"
"client.agents.core_memory.get_block(agent_id=eval_agent.id, block_label=\"company\")"
]
},
{
@@ -595,7 +585,7 @@
}
],
"source": [
"client.get_core_memory(outreach_agent.id).get_block(\"company\")"
"client.agents.core_memory.get_block(agent_id=outreach_agent.id, block_label=\"company\")"
]
},
{
@@ -615,25 +605,40 @@
"outputs": [],
"source": [
"#re-create agents \n",
"client.delete_agent(eval_agent.id)\n",
"client.delete_agent(outreach_agent.id)\n",
"client.agents.delete(eval_agent.id)\n",
"client.agents.delete(outreach_agent.id)\n",
"\n",
"eval_agent = client.create_agent(\n",
" name=\"eval_agent\", \n",
" memory=OrgMemory(\n",
" persona=eval_persona, \n",
" org_block=org_block,\n",
" ), \n",
" tools=[read_resume_tool.name, submit_evaluation_tool.name]\n",
"org_block = client.blocks.create(\n",
" label=\"company\",\n",
" value=org_description,\n",
")\n",
"\n",
"outreach_agent = client.create_agent(\n",
"eval_agent = client.agents.create(\n",
" name=\"eval_agent\", \n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=eval_persona,\n",
" ),\n",
" ],\n",
" block_ids=[org_block.id],\n",
" tool_ids=[read_resume_tool.id, submit_evaluation_tool.id]\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")\n",
"\n",
"outreach_agent = client.agents.create(\n",
" name=\"outreach_agent\", \n",
" memory=OrgMemory(\n",
" persona=outreach_persona, \n",
" org_block=org_block\n",
" ), \n",
" tools=[email_candidate_tool.name]\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=outreach_persona,\n",
" ),\n",
" ],\n",
" block_ids=[org_block.id],\n",
" tool_ids=[email_candidate_tool.id]\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\",\n",
")"
]
},
@@ -701,41 +706,42 @@
" Args: \n",
" name (str): Candidate name to consider \n",
" \"\"\"\n",
" from letta import create_client \n",
" client = create_client()\n",
" from letta_client import Letta, MessageCreate\n",
" client = Letta(base_url=\"http://localhost:8283\")\n",
" message = f\"Consider candidate {name}\" \n",
" print(\"Sending message to eval agent: \", message)\n",
" response = client.send_message(\n",
" agent_name=\"eval_agent\", \n",
" agent_id=eval_agent.id,\n",
" role=\"user\", \n",
" message=message\n",
" ) \n",
"\n",
"\n",
"# create tools \n",
"search_candidate_tool = client.create_or_update_tool(search_candidates_db)\n",
"consider_candidate_tool = client.create_or_update_tool(consider_candidate)\n",
"\n",
"# delete agent if exists \n",
"if client.get_agent_id(\"recruiter_agent\"): \n",
" client.delete_agent(client.get_agent_id(\"recruiter_agent\"))\n",
"search_candidate_tool = client.tools.upsert_from_function(name=\"search_candidates_db\", func=search_candidates_db)\n",
"consider_candidate_tool = client.tools.upsert_from_function(name=\"consider_candidate\", func=consider_candidate)\n",
"\n",
"# create recruiter agent\n",
"recruiter_agent = client.create_agent(\n",
"recruiter_agent = client.agents.create(\n",
" name=\"recruiter_agent\", \n",
" memory=OrgMemory(\n",
" persona=\"You run a recruiting process for a company. \" \\\n",
" + \"Your job is to continue to pull candidates from the \" \n",
" + \"`search_candidates_db` tool until there are no more \" \\\n",
" + \"candidates left. \" \\\n",
" + \"For each candidate, consider the candidate by calling \"\n",
" + \"the `consider_candidate` tool. \" \\\n",
" + \"You should continue to call `search_candidates_db` \" \\\n",
" + \"followed by `consider_candidate` until there are no more \" \\\n",
" \" candidates. \",\n",
" org_block=org_block\n",
" ), \n",
" tools=[search_candidate_tool.name, consider_candidate_tool.name]\n",
" memory_blocks=[\n",
" CreateBlock(\n",
" label=\"persona\",\n",
" value=\"You run a recruiting process for a company. \" \\\n",
" + \"Your job is to continue to pull candidates from the \" \n",
" + \"`search_candidates_db` tool until there are no more \" \\\n",
" + \"candidates left. \" \\\n",
" + \"For each candidate, consider the candidate by calling \"\n",
" + \"the `consider_candidate` tool. \" \\\n",
" + \"You should continue to call `search_candidates_db` \" \\\n",
" + \"followed by `consider_candidate` until there are no more \" \\\n",
" \" candidates. \",\n",
" ),\n",
" ],\n",
" block_ids=[org_block.id],\n",
" tool_ids=[search_candidate_tool.id, consider_candidate_tool.id],\n",
" llm=\"openai/gpt-4\",\n",
" embedding=\"openai/text-embedding-ada-002\"\n",
")\n",
" \n"
]
@@ -747,10 +753,14 @@
"metadata": {},
"outputs": [],
"source": [
"response = client.send_message(\n",
" agent_name=\"recruiter_agent\", \n",
" role=\"system\", \n",
" message=\"Run generation\"\n",
"response = client.agents.messages.send(\n",
" agent_id=recruiter_agent.id,\n",
" messages=[\n",
" MessageCreate(\n",
" role=\"system\",\n",
" text=\"Run generation\",\n",
" )\n",
" ],\n",
")"
]
},
@@ -845,8 +855,8 @@
"metadata": {},
"outputs": [],
"source": [
"client.delete_agent(eval_agent.id)\n",
"client.delete_agent(outreach_agent.id)"
"client.agents.delete(eval_agent.id)\n",
"client.agents.delete(outreach_agent.id)"
]
},
{
@@ -856,7 +866,7 @@
"metadata": {},
"outputs": [],
"source": [
"client.delete_agent(recruiter_agent.id)"
"client.agents.delete(recruiter_agent.id)"
]
}
],

88
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -392,6 +392,10 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@@ -404,8 +408,14 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@@ -416,8 +426,24 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@@ -427,6 +453,10 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@@ -438,6 +468,10 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@@ -450,6 +484,10 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@@ -462,6 +500,10 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@@ -2470,13 +2512,13 @@ langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"]
[[package]]
name = "letta-client"
version = "0.1.15"
version = "0.1.16"
description = ""
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "letta_client-0.1.15-py3-none-any.whl", hash = "sha256:31b4134769f3241736389eac70c3f8f204044ac6346cbd490ef536f003ab2386"},
{file = "letta_client-0.1.15.tar.gz", hash = "sha256:42cf84a0a7f344f1e7d0c809aeea2d1c6e73eccd8c80655b762f696b41f4c8e9"},
{file = "letta_client-0.1.16-py3-none-any.whl", hash = "sha256:e7a03c80ae2840ef4342f3bf777f9281589fed3092d1f617a0e5a14f63166150"},
{file = "letta_client-0.1.16.tar.gz", hash = "sha256:f837855bb8b5c2d9a8dfb0754cf0d396464f6c027ad9627f0ea8c721ab3c9ced"},
]
[package.dependencies]
@@ -4403,28 +4445,28 @@ pytest = {version = ">=6.2.4", markers = "python_version >= \"3.10\""}
[[package]]
name = "python-box"
version = "7.3.1"
version = "7.3.2"
description = "Advanced Python dictionaries with dot notation access"
optional = false
python-versions = ">=3.9"
files = [
{file = "python_box-7.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fadf589c5d37d5bf40d25f6580d500168f2fc825d2f601c25e753ffc8d4bbec0"},
{file = "python_box-7.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d375605b159c174b0d60b6acb3586bc47ba75f542b614e96fac2ef899c08add8"},
{file = "python_box-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:f7fef93deb2695716218f513cc43e665f447a85e41cf58219e42e026c570bd67"},
{file = "python_box-7.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7cdcc0585d5840a04a74e64301d4ec5b0a05bc98a305d0f9516d3e59d265add1"},
{file = "python_box-7.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aa85d0f1f0ea1ef4af33c0f3a133b8cec8f0ad3bfd6868370833efb8b9f86b3"},
{file = "python_box-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:6fd0463e20a4c990591094fbb0f4e3b39f8212d1faf69648df4ffac10912c49e"},
{file = "python_box-7.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3320d3fa83f006ae44bda02f9ee08647ed709506baf5ae85be3eb045683dd12b"},
{file = "python_box-7.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6277ef305fb1cc75e903416e0b4f59952675d55e8ae997924f4e2f6e5abf61b"},
{file = "python_box-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:34d409137b41c15322491f353c331069a07d194573e95e56eae07fe101c04cbe"},
{file = "python_box-7.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e5e0c2bf73ab1020fc62f2a7161b8b0e12ee29872292ec33fb8124aa81adb48e"},
{file = "python_box-7.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fe1e1c705535ec5ab9fa66172cf184a330fd41638aaf638a08e33a12c7c3f71"},
{file = "python_box-7.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:4fccc0b218937a6254219073f945117978f5222eff1bbae8a35b11c6e9651f5d"},
{file = "python_box-7.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a48050391cb4d8dcec4b0f8c860b778821ae013a293d49f0cbaeab5548c46829"},
{file = "python_box-7.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a5bf3264cd4ee9b742aefadb7ff549297dd7eef8826b3a4b922a4a44e9b0751"},
{file = "python_box-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:0ed2024e27d67c5cf1ed1f88d8849aace9234d7a198fd4d5c791ed12e99e7345"},
{file = "python_box-7.3.1-py3-none-any.whl", hash = "sha256:2d77100d0d5ad67e0d062fac4f77f973851db236f4a445c60b02d0415f83b0d6"},
{file = "python_box-7.3.1.tar.gz", hash = "sha256:a0bd9dbb4ddd2842f8d0143b8aa0c87d0e82e39093dd4698a5cbbb2d2ac71361"},
{file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"},
{file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"},
{file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"},
{file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"},
{file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"},
{file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"},
{file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"},
{file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"},
{file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"},
{file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"},
{file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"},
{file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"},
{file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"},
{file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"},
{file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"},
{file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"},
{file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"},
]
[package.extras]
@@ -6251,4 +6293,4 @@ tests = ["wikipedia"]
[metadata]
lock-version = "2.0"
python-versions = "<3.14,>=3.10"
content-hash = "754d922b20713a9219ef3465aebf8f435d608be996dd55fe48968fa6c3fa7d4d"
content-hash = "f1cb65b567ce1063354e088b2d3b714b2701aa046a38bb6624d35d12a9a0cfc9"

View File

@@ -76,7 +76,7 @@ grpcio-tools = "^1.68.1"
llama-index = "^0.12.2"
llama-index-embeddings-openai = "^0.3.1"
e2b-code-interpreter = {version = "^1.0.3", optional = true}
letta_client = "^0.1.15"
letta_client = "^0.1.16"
[tool.poetry.extras]
postgres = ["pgvector", "pg8000", "psycopg2-binary", "psycopg2"]

View File

@@ -1,6 +1,4 @@
import asyncio
# import json
import os
import threading
import time
@@ -12,8 +10,7 @@ from letta_client import CreateBlock
from letta_client import Letta as LettaSDKClient
from letta_client import MessageCreate
from letta_client.core import ApiError
from letta_client.runs.types import GetRunMessagesResponseItem_ToolCallMessage
from letta_client.types import LettaRequestConfig, LettaResponseMessagesItem_ToolReturnMessage
from letta_client.types import LettaMessageUnion_ToolCallMessage, LettaMessageUnion_ToolReturnMessage, LettaRequestConfig
# Constants
SERVER_PORT = 8283
@@ -94,7 +91,7 @@ def test_shared_blocks(client):
)
# update memory
client.agents.messages.create(
client.agents.messages.send(
agent_id=agent_state1.id,
messages=[
MessageCreate(
@@ -107,7 +104,7 @@ def test_shared_blocks(client):
# check agent 2 memory
assert "charles" in client.blocks.get(block_id=block.id).value.lower(), f"Shared block update failed {client.get_block(block.id).value}"
client.agents.messages.create(
client.agents.messages.send(
agent_id=agent_state2.id,
messages=[
MessageCreate(
@@ -332,7 +329,7 @@ def test_update_agent_memory_limit(client, agent):
def test_messages(client, agent):
send_message_response = client.agents.messages.create(
send_message_response = client.agents.messages.send(
agent_id=agent.id,
messages=[
MessageCreate(
@@ -352,7 +349,7 @@ def test_messages(client, agent):
def test_send_system_message(client, agent):
"""Important unit test since the Letta API exposes sending system messages, but some backends don't natively support it (eg Anthropic)"""
send_system_message_response = client.agents.messages.create(
send_system_message_response = client.agents.messages.send(
agent_id=agent.id,
messages=[
MessageCreate(
@@ -381,7 +378,7 @@ def test_function_return_limit(client, agent):
client.agents.tools.add(agent_id=agent.id, tool_id=tool.id)
# get function response
response = client.agents.messages.create(
response = client.agents.messages.send(
agent_id=agent.id,
messages=[
MessageCreate(
@@ -394,7 +391,7 @@ def test_function_return_limit(client, agent):
response_message = None
for message in response.messages:
if isinstance(message, LettaResponseMessagesItem_ToolReturnMessage):
if isinstance(message, LettaMessageUnion_ToolReturnMessage):
response_message = message
break
@@ -417,7 +414,7 @@ def test_function_always_error(client, agent):
client.agents.tools.add(agent_id=agent.id, tool_id=tool.id)
# get function response
response = client.agents.messages.create(
response = client.agents.messages.send(
agent_id=agent.id,
messages=[
MessageCreate(
@@ -430,7 +427,7 @@ def test_function_always_error(client, agent):
response_message = None
for message in response.messages:
if isinstance(message, LettaResponseMessagesItem_ToolReturnMessage):
if isinstance(message, LettaMessageUnion_ToolReturnMessage):
response_message = message
break
@@ -448,7 +445,7 @@ async def test_send_message_parallel(client, agent):
# Define a coroutine for sending a message using asyncio.to_thread for synchronous calls
async def send_message_task(message: str):
response = await asyncio.to_thread(
client.agents.messages.create,
client.agents.messages.send,
agent_id=agent.id,
messages=[
MessageCreate(
@@ -483,7 +480,7 @@ def test_send_message_async(client, agent):
Test that we can send a message asynchronously and retrieve the messages, along with usage statistics
"""
test_message = "This is a test message, respond to the user with a sentence."
run = client.agents.messages.create_async(
run = client.agents.messages.send_async(
agent_id=agent.id,
messages=[
MessageCreate(
@@ -519,9 +516,7 @@ def test_send_message_async(client, agent):
assert len(tool_messages) > 0
specific_tool_messages = [
message
for message in client.runs.get_run_messages(run_id=run.id)
if isinstance(message, GetRunMessagesResponseItem_ToolCallMessage)
message for message in client.runs.get_run_messages(run_id=run.id) if isinstance(message, LettaMessageUnion_ToolCallMessage)
]
assert specific_tool_messages[0].tool_call.name == "send_message"
assert len(specific_tool_messages) > 0