feat: add e2e example scripts for documentation (#1995)

This commit is contained in:
Sarah Wooders
2024-11-06 17:39:51 -08:00
committed by GitHub
parent f1ff70a18f
commit f88a0cbfa6
11 changed files with 254 additions and 83 deletions

View File

@@ -0,0 +1,40 @@
from letta import ChatMemory, EmbeddingConfig, LLMConfig, create_client
from letta.prompts import gpt_system
client = create_client()
# create a new agent
agent_state = client.create_agent(
name="agent_name",
memory=ChatMemory(human="Name: Sarah", persona="You are a helpful assistant that loves emojis"),
llm_config=LLMConfig(
model="gpt-4",
model_endpoint_type="openai",
model_endpoint="https://api.openai.com/v1",
context_window=8000,
),
embedding_config=EmbeddingConfig(
embedding_endpoint_type="openai",
embedding_endpoint="https://api.openai.com/v1",
embedding_model="text-embedding-ada-002",
embedding_dim=1536,
embedding_chunk_size=300,
),
system=gpt_system.get_system_text("memgpt_chat"),
tools=[],
include_base_tools=True,
)
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# message an agent as a user
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
print("Usage", response.usage)
print("Agent messages", response.messages)
# message a system message (non-user)
response = client.send_message(agent_id=agent_state.id, role="system", message="[system] user has logged in. send a friendly message.")
print("Usage", response.usage)
print("Agent messages", response.messages)
# delete the agent
client.delete_agent(agent_id=agent_state.id)

View File

@@ -0,0 +1,40 @@
from letta import EmbeddingConfig, LLMConfig, create_client
client = create_client()
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(
LLMConfig(model="gpt-4o-mini", model_endpoint_type="openai", model_endpoint="https://api.openai.com/v1", context_window=128000)
)
client.set_default_embedding_config(
EmbeddingConfig(
embedding_endpoint_type="openai",
embedding_endpoint="https://api.openai.com/v1",
embedding_model="text-embedding-ada-002",
embedding_dim=1536,
embedding_chunk_size=300,
)
)
# create a new agent
agent_state = client.create_agent()
print(f"Created agent with name {agent_state.name} and unique ID {agent_state.id}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="hello")
print("Usage", response.usage)
print("Agent messages", response.messages)
# list all agents
agents = client.list_agents()
# get the agent by ID
agent_state = client.get_agent(agent_id=agent_state.id)
# get the agent by name
agent_id = client.get_agent_id(agent_name=agent_state.name)
agent_state = client.get_agent(agent_id=agent_id)
# delete an agent
client.delete_agent(agent_id=agent_state.id)

0
examples/docs/memory.py Normal file
View File

View File

@@ -0,0 +1,42 @@
from letta import create_client
from letta.schemas.memory import ChatMemory
"""
Make sure you run the Letta server before running this example.
```
letta server
```
"""
def main():
# Connect to the server as a user
client = create_client(base_url="http://localhost:8283")
# list available configs on the server
llm_configs = client.list_llm_configs()
print(f"Available LLM configs: {llm_configs}")
embedding_configs = client.list_embedding_configs()
print(f"Available embedding configs: {embedding_configs}")
# Create an agent
agent_state = client.create_agent(
name="my_agent",
memory=ChatMemory(human="My name is Sarah.", persona="I am a friendly AI."),
embedding_config=embedding_configs[0],
llm_config=llm_configs[0],
)
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
# Send a message to the agent
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
response = client.user_message(agent_id=agent_state.id, message="Whats my name?")
print(f"Recieved response:", response.messages)
# Delete agent
client.delete_agent(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")
if __name__ == "__main__":
main()

68
examples/docs/tools.py Normal file
View File

@@ -0,0 +1,68 @@
from letta import EmbeddingConfig, LLMConfig, create_client
client = create_client()
# set automatic defaults for LLM/embedding config
client.set_default_llm_config(
LLMConfig(model="gpt-4", model_endpoint_type="openai", model_endpoint="https://api.openai.com/v1", context_window=8000)
)
client.set_default_embedding_config(
EmbeddingConfig(
embedding_endpoint_type="openai",
embedding_endpoint="https://api.openai.com/v1",
embedding_model="text-embedding-ada-002",
embedding_dim=1536,
embedding_chunk_size=300,
)
)
# define a function with a docstring
def roll_d20() -> str:
"""
Simulate the roll of a 20-sided die (d20).
This function generates a random integer between 1 and 20, inclusive,
which represents the outcome of a single roll of a d20.
Returns:
int: A random integer between 1 and 20, representing the die roll.
Example:
>>> roll_d20()
15 # This is an example output and may vary each time the function is called.
"""
import random
dice_role_outcome = random.randint(1, 20)
output_string = f"You rolled a {dice_role_outcome}"
return output_string
tool = client.create_tool(roll_d20, name="roll_dice")
# create a new agent
agent_state = client.create_agent(tools=[tool.name])
print(f"Created agent with name {agent_state.name} with tools {agent_state.tools}")
# Message an agent
response = client.send_message(agent_id=agent_state.id, role="user", message="roll a dice")
print("Usage", response.usage)
print("Agent messages", response.messages)
# remove a tool from the agent
client.remove_tool_from_agent(agent_id=agent_state.id, tool_id=tool.id)
# add a tool to the agent
client.add_tool_to_agent(agent_id=agent_state.id, tool_id=tool.id)
client.delete_agent(agent_id=agent_state.id)
# create an agent with only a subset of default tools
agent_state = client.create_agent(include_base_tools=False, tools=[tool.name, "send_message"])
# message the agent to search archival memory (will be unable to do so)
response = client.send_message(agent_id=agent_state.id, role="user", message="search your archival memory")
print("Usage", response.usage)
print("Agent messages", response.messages)
client.delete_agent(agent_id=agent_state.id)