docs: Finish writing example for LangChain tooling (#1810)

Co-authored-by: Matt Zhou <mattzhou@Matts-MacBook-Pro.local>
This commit is contained in:
Matthew Zhou
2024-10-01 13:46:49 -07:00
committed by GitHub
parent 88d18b07db
commit ed7e3d1482
6 changed files with 93 additions and 194 deletions

View File

@@ -1,189 +0,0 @@
import json
from typing import List, Tuple
from letta import create_client
from letta.agent import Agent
from letta.memory import ChatMemory
"""
This example show how you can add a google search custom function to your Letta agent.
First, make sure you run:
```
pip install serpapi
pip install llama-index-readers-web
```
then setup letta with `letta configure`.
"""
def google_search(self: Agent, query: str) -> List[Tuple[str, str]]:
"""
A tool to search google with the provided query, and return a list of relevant summaries and URLs.
Args:
query (str): The search query.
Returns:
List[Tuple[str, str]]: A list of up to 5 tuples, each containing a summary of the search result and the URL of the search result in the form (summary, URL)
Example:
>>> google_search("How can I make a french 75?")
[
(
"To make a French 75 cocktail, combine 1½ oz. gin, ¾ oz. fresh lemon juice, and ¾ oz. simple syrup in a cocktail shaker with ice. Shake vigorously, then strain into a large flute. Top with 2 oz. Champagne and garnish with a long spiral lemon twist. The recipe prefers gin, but cognac is also traditional. Serve in Champagne flutes for the full effect.",
"https://www.bonappetit.com/recipe/french-75-3"
)
]
"""
# imports must be inside the function
import os
import time
from concurrent.futures import ThreadPoolExecutor
import serpapi
from openai import OpenAI
from letta.credentials import LettaCredentials
from letta.data_sources.connectors import WebConnector
from letta.utils import printd
printd("Starting google search:", query)
def summarize_text(document_text: str, question: str) -> str:
# TODO: make request to GPT-4 turbo API for conditional summarization
prompt = (
f'Given the question "{question}", summarize the text below. If there is no relevant information, say "No relevant information found.'
+ f"\n\n{document_text}"
)
credentials = LettaCredentials().load()
assert credentials.openai_key is not None, credentials.openai_key
# model = "gpt-4-1106-preview"
model = "gpt-3.5-turbo-1106"
client = OpenAI(api_key=credentials.openai_key)
chat_completion = client.chat.completions.create(
messages=[
{"role": "user", "content": prompt},
],
model=model,
)
response = chat_completion.choices[0].message.content
# return None if nothing found
if "No relevant information found." in response:
return None
return response
params = {
"engine": "google",
"q": query,
}
# get links from web search
try:
st = time.time()
search = serpapi.Client(api_key=os.environ["SERPAPI_API_KEY"]).search(params)
printd(f"Time taken to retrieve search results: {time.time() - st}")
results = search["organic_results"]
links = []
for result in results:
data = {"title": result.get("title"), "link": result.get("link"), "snippet": result.get("snippet")}
links.append(data["link"])
links = links[:5]
except Exception as e:
print(f"An error occurred with retrieving results: {e}")
return []
print("links", links)
# retrieve text data from links
def read_and_summarize_link(link):
connector = WebConnector([link])
st = time.time()
for document_text, document_metadata in connector.generate_documents():
printd(f"Time taken to retrieve text data: {time.time() - st}")
# summarize text data
st = time.time()
summary = summarize_text(document_text[: 16000 - 500], query)
printd(f"Time taken to summarize text data: {time.time() - st}, length: {len(document_text)}")
printd(link)
if summary is not None:
return (summary, document_metadata["url"])
return None
try:
futures = []
st = time.time()
with ThreadPoolExecutor(max_workers=16) as executor:
for link in links:
future = executor.submit(read_and_summarize_link, link)
futures.append(future)
response = [future.result() for future in futures if future.result() is not None]
print(f"Time taken: {time.time() - st}")
# response = []
# connector = WebConnector(links)
# for document_text, document_metadata in connector.generate_documents():
# # summarize text data
# summary = summarize_text(document_text, query)
# if summary is not None:
# response.append((summary, document_metadata["url"]))
print("Response:", response)
return response
except Exception as e:
print(f"An error occurred with retrieving text data: {e}")
return []
def main():
# Create a `LocalClient` (you can also use a `RESTClient`, see the letta_rest_client.py example)
client = create_client()
# create tool
search_tool = client.create_tool(google_search, name="google_search")
print(f"Created tool: {search_tool.name} with ID {str(search_tool.id)}")
print(f"Tool schema: {json.dumps(search_tool.json_schema, indent=4)}")
# google search persona
persona = """
My name is Letta.
I am a personal assistant who answers a user's questionien using google web searches. When a user asks me a question and the answer is not in my context, I will use a tool called google_search which will search the web and return relevant summaries and the link they correspond to. It is my job to construct the best query to input into google_search based on the user's question, and to aggregate the response of google_search construct a final answer that also references the original links the information was pulled from. Here is an example:
---
User: Who founded OpenAI?
Letta: OpenAI was founded by Ilya Sutskever, Greg Brockman, Trevor Blackwell, Vicki Cheung, Andrej Karpathy, Durk Kingma, Jessica Livingston, John Schulman, Pamela Vagata, and Wojciech Zaremba, with Sam Altman and Elon Musk serving as the initial Board of Directors members. [1][2]
[1] https://www.britannica.com/topic/OpenAI
[2] https://en.wikipedia.org/wiki/OpenAI
---
Dont forget - inner monologue / inner thoughts should always be different than the contents of send_message! send_message is how you communicate with the user, whereas inner thoughts are your own personal inner thoughts.
"""
# Create an agent
agent_state = client.create_agent(
name="my_agent3", memory=ChatMemory(human="My name is Sarah.", persona=persona), tools=[search_tool.name]
)
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
# Send a message to the agent
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
send_message_response = client.user_message(agent_id=agent_state.id, message="What is the weather in Berkeley?")
print(f"Recieved response: \n{json.dumps(send_message_response.messages, indent=4)}")
# Delete agent
client.delete_agent(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,86 @@
import json
import uuid
from letta import create_client
from letta.schemas.memory import ChatMemory
from letta.schemas.tool import Tool
"""
This example show how you can add LangChain tools .
First, make sure you have LangChain and some of the extras downloaded.
For this specific example, you will need `wikipedia` installed.
```
poetry install --extras "tests"
poetry install langchain
```
then setup letta with `letta configure`.
"""
def main():
from langchain_community.tools import WikipediaQueryRun
from langchain_community.utilities import WikipediaAPIWrapper
api_wrapper = WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=500)
langchain_tool = WikipediaQueryRun(api_wrapper=api_wrapper)
# Translate to memGPT Tool
# Note the additional_imports_module_attr_map
# We need to pass in a map of all the additional imports necessary to run this tool
# Because an object of type WikipediaAPIWrapper is passed into WikipediaQueryRun to initialize langchain_tool,
# We need to also import WikipediaAPIWrapper
# The map is a mapping of the module name to the attribute name
# langchain_community.utilities.WikipediaAPIWrapper
wikipedia_query_tool = Tool.from_langchain(
langchain_tool, additional_imports_module_attr_map={"langchain_community.utilities": "WikipediaAPIWrapper"}
)
tool_name = wikipedia_query_tool.name
# Create a `LocalClient` (you can also use a `RESTClient`, see the letta_rest_client.py example)
client = create_client()
# create tool
client.add_tool(wikipedia_query_tool)
# Confirm that the tool is in
tools = client.list_tools()
assert wikipedia_query_tool.name in [t.name for t in tools]
# Generate uuid for agent name for this example
namespace = uuid.NAMESPACE_DNS
agent_uuid = str(uuid.uuid5(namespace, "letta-langchain-tooling-example"))
# Clear all agents
for agent_state in client.list_agents():
if agent_state.name == agent_uuid:
client.delete_agent(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")
# google search persona
persona = f"""
My name is Letta.
I am a personal assistant who answers a user's questions using wikipedia searches. When a user asks me a question, I will use a tool called {tool_name} which will search Wikipedia and return a Wikipedia page about the topic. It is my job to construct the best query to input into {tool_name} based on the user's question.
Dont forget - inner monologue / inner thoughts should always be different than the contents of send_message! send_message is how you communicate with the user, whereas inner thoughts are your own personal inner thoughts.
"""
# Create an agent
agent_state = client.create_agent(name=agent_uuid, memory=ChatMemory(human="My name is Matt.", persona=persona), tools=[tool_name])
print(f"Created agent: {agent_state.name} with ID {str(agent_state.id)}")
# Send a message to the agent
send_message_response = client.user_message(agent_id=agent_state.id, message="How do you pronounce Albert Einstein's name?")
for message in send_message_response.messages:
response_json = json.dumps(message.model_dump(), indent=4)
print(f"{response_json}\n")
# Delete agent
client.delete_agent(agent_id=agent_state.id)
print(f"Deleted agent: {agent_state.name} with ID {str(agent_state.id)}")
if __name__ == "__main__":
main()