chore: migrate examples to use latest sdk ver (#690)

This commit is contained in:
cthomas
2025-01-16 17:06:57 -08:00
committed by GitHub
parent 0ddfc1a6d4
commit 8bd695f64b
11 changed files with 575 additions and 381 deletions

View File

@@ -1,29 +1,36 @@
from letta import ChatMemory, EmbeddingConfig, LLMConfig, create_client
from letta_client import CreateBlock, Letta, MessageCreate
from letta.prompts import gpt_system
client = create_client()
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
client = Letta(base_url="http://localhost:8283")
# create a new agent
agent_state = client.create_agent(
agent_state = client.agents.create(
# agent's name (unique per-user, autogenerated if not provided)
name="agent_name",
# in-context memory representation with human/persona blocks
memory=ChatMemory(human="Name: Sarah", persona="You are a helpful assistant that loves emojis"),
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
CreateBlock(
label="persona",
value="You are a helpful assistant that loves emojis",
),
],
# LLM model & endpoint configuration
llm_config=LLMConfig(
model="gpt-4",
model_endpoint_type="openai",
model_endpoint="https://api.openai.com/v1",
context_window=8000, # set to <= max context window
),
llm="openai/gpt-4",
context_window_limit=8000,
# embedding model & endpoint configuration (cannot be changed)
embedding_config=EmbeddingConfig(
embedding_endpoint_type="openai",
embedding_endpoint="https://api.openai.com/v1",
embedding_model="text-embedding-ada-002",
embedding_dim=1536,
embedding_chunk_size=300,
),
embedding="openai/text-embedding-ada-002",
# system instructions for the agent (defaults to `memgpt_chat`)
system=gpt_system.get_system_text("memgpt_chat"),
# whether to include base letta tools (default: True)
@@ -34,14 +41,30 @@ agent_state = client.create_agent(
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# message an agent as a user
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="hello",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# message a system message (non-user)
response = client.send_message(agent_id=agent_state.id, role="system", message="[system] user has logged in. send a friendly message.")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="system",
text="[system] user has logged in. send a friendly message.",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# delete the agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)

View File

@@ -1,29 +1,49 @@
from letta import EmbeddingConfig, LLMConfig, create_client
from letta_client import CreateBlock, Letta, MessageCreate
client = create_client()
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(LLMConfig.default_config(model_name="gpt-4"))
client.set_default_embedding_config(EmbeddingConfig.default_config(model_name="text-embedding-ada-002"))
client = Letta(base_url="http://localhost:8283")
# create a new agent
agent_state = client.create_agent()
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
],
# set automatic defaults for LLM/embedding config
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
)
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="hello",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# list all agents
agents = client.list_agents()
agents = client.agents.list()
# get the agent by ID
agent_state = client.get_agent(agent_id=agent_state.id)
agent_state = client.agents.get(agent_id=agent_state.id)
# get the agent by name
agent_id = client.get_agent_id(agent_name=agent_state.name)
agent_state = client.get_agent(agent_id=agent_id)
agent_state = client.agents.list(name=agent_state.name)[0]
# delete an agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)

View File

@@ -1,5 +1,4 @@
from letta import create_client
from letta.schemas.memory import ChatMemory
from letta_client import CreateBlock, Letta, MessageCreate
"""
Make sure you run the Letta server before running this example.
@@ -11,30 +10,47 @@ letta server
def main():
# Connect to the server as a user
client = create_client(base_url="http://localhost:8283")
client = Letta(base_url="http://localhost:8283")
# list available configs on the server
llm_configs = client.list_llm_configs()
llm_configs = client.models.list_llms()
print(f"Available LLM configs: {llm_configs}")
embedding_configs = client.list_embedding_configs()
embedding_configs = client.models.list_embedding_models()
print(f"Available embedding configs: {embedding_configs}")
# Create an agent
agent_state = client.create_agent(
agent_state = client.agents.create(
name="my_agent",
memory=ChatMemory(human="My name is Sarah.", persona="I am a friendly AI."),
embedding_config=embedding_configs[0],
llm_config=llm_configs[0],
memory_blocks=[
CreateBlock(
label="human",
value="My name is Sarah",
),
CreateBlock(
label="persona",
value="I am a friendly AI",
),
],
llm=llm_configs[0].handle,
embedding=embedding_configs[0].handle,
)
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
# Send a message to the agent
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
response = client.user_message(agent_id=agent_state.id, message="Whats my name?")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="Whats my name?",
)
],
)
print(f"Received response:", response.messages)
# Delete agent
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")

View File

@@ -1,11 +1,14 @@
from letta import EmbeddingConfig, LLMConfig, create_client
from letta.schemas.tool_rule import TerminalToolRule
from letta_client import CreateBlock, Letta, MessageCreate
from letta_client.types import TerminalToolRule
client = create_client()
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(LLMConfig.default_config(model_name="gpt-4"))
client.set_default_embedding_config(EmbeddingConfig.default_config(model_name="text-embedding-ada-002"))
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
client = Letta(base_url="http://localhost:8283")
# define a function with a docstring
def roll_d20() -> str:
@@ -30,43 +33,78 @@ def roll_d20() -> str:
# create a tool from the function
tool = client.create_or_update_tool(roll_d20)
tool = client.tools.upsert_from_function(func=roll_d20, name="roll_d20")
print(f"Created tool with name {tool.name}")
# create a new agent
agent_state = client.create_agent(
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="Name: Sarah",
),
],
# set automatic defaults for LLM/embedding config
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
# create the agent with an additional tool
tool_ids=[tool.id],
# add tool rules that terminate execution after specific tools
tool_rules=[
# exit after roll_d20 is called
TerminalToolRule(tool_name=tool.name),
# exit after send_message is called (default behavior)
TerminalToolRule(tool_name="send_message"),
],
]
)
print(f"Created agent with name {agent_state.name} with tools {[t.name for t in agent_state.tools]}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="roll a dice")
response = client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="roll a dice",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
# remove a tool from the agent
client.remove_tool_from_agent(agent_id=agent_state.id, tool_id=tool.id)
client.agents.tools.remove(agent_id=agent_state.id, tool_id=tool.id)
# add a tool to the agent
client.add_tool_to_agent(agent_id=agent_state.id, tool_id=tool.id)
client.agents.tools.add(agent_id=agent_state.id, tool_id=tool.id)
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)
# create an agent with only a subset of default tools
send_message_tool = client.get_tool_id("send_message")
agent_state = client.create_agent(include_base_tools=False, tool_ids=[tool.id, send_message_tool])
send_message_tool = client.tools.get_by_name(tool_name="send_message")
agent_state = client.agents.create(
memory_blocks=[
CreateBlock(
label="human",
value="username: sarah",
),
],
llm="openai/gpt-4",
embedding="openai/text-embedding-ada-002",
include_base_tools=False,
tool_ids=[tool.id, send_message_tool],
)
# message the agent to search archival memory (will be unable to do so)
response = client.send_message(agent_id=agent_state.id, role="user", message="search your archival memory")
client.agents.messages.send(
agent_id=agent_state.id,
messages=[
MessageCreate(
role="user",
text="search your archival memory",
)
],
)
print("Usage", response.usage)
print("Agent messages", response.messages)
client.delete_agent(agent_id=agent_state.id)
client.agents.delete(agent_id=agent_state.id)