Allow MemGPT to read/write text files + make HTTP requests (#174)

* added file read/write

* added HTTP requests

* black on utils.py
This commit is contained in:
Charles Packer
2023-11-01 01:08:44 -07:00
committed by GitHub
parent f765290cfb
commit f889f24643
4 changed files with 185 additions and 4 deletions

View File

@@ -5,6 +5,7 @@ import glob
import pickle
import math
import os
import requests
import json
import threading
@@ -247,7 +248,6 @@ class Agent(object):
"edit_memory_append": self.edit_memory_append,
"edit_memory_replace": self.edit_memory_replace,
"pause_heartbeats": self.pause_heartbeats,
"message_chatgpt": self.message_chatgpt,
"core_memory_append": self.edit_memory_append,
"core_memory_replace": self.edit_memory_replace,
"recall_memory_search": self.recall_memory_search,
@@ -256,6 +256,10 @@ class Agent(object):
"conversation_search_date": self.recall_memory_search_date,
"archival_memory_insert": self.archival_memory_insert,
"archival_memory_search": self.archival_memory_search,
# extras
"read_from_text_file": self.read_from_text_file,
"append_to_text_file": self.append_to_text_file,
"http_request": self.http_request,
}
@property
@@ -797,6 +801,73 @@ class Agent(object):
reply = response.choices[0].message.content
return reply
def read_from_text_file(self, filename, line_start, num_lines=1, max_chars=500, trunc_message=True):
if not os.path.exists(filename):
raise FileNotFoundError(f"The file '{filename}' does not exist.")
if line_start < 1 or num_lines < 1:
raise ValueError("Both line_start and num_lines must be positive integers.")
lines = []
chars_read = 0
with open(filename, "r") as file:
for current_line_number, line in enumerate(file, start=1):
if line_start <= current_line_number < line_start + num_lines:
chars_to_add = len(line)
if max_chars is not None and chars_read + chars_to_add > max_chars:
# If adding this line exceeds MAX_CHARS, truncate the line if needed and stop reading further.
excess_chars = (chars_read + chars_to_add) - max_chars
lines.append(line[:-excess_chars].rstrip("\n"))
if trunc_message:
lines.append(f"[SYSTEM ALERT - max chars ({max_chars}) reached during file read]")
break
else:
lines.append(line.rstrip("\n"))
chars_read += chars_to_add
if current_line_number >= line_start + num_lines - 1:
break
return "\n".join(lines)
def append_to_text_file(self, filename, content):
if not os.path.exists(filename):
raise FileNotFoundError(f"The file '{filename}' does not exist.")
with open(filename, "a") as file:
file.write(content + "\n")
def http_request(self, method, url, payload_json=None):
"""
Makes an HTTP request based on the specified method, URL, and JSON payload.
Args:
method (str): The HTTP method (e.g., 'GET', 'POST').
url (str): The URL for the request.
payload_json (str): A JSON string representing the request payload.
Returns:
dict: The response from the HTTP request.
"""
try:
headers = {"Content-Type": "application/json"}
# For GET requests, ignore the payload
if method.upper() == "GET":
print(f"[HTTP] launching GET request to {url}")
response = requests.get(url, headers=headers)
else:
# Validate and convert the payload for other types of requests
if payload_json:
payload = json.loads(payload_json)
else:
payload = {}
print(f"[HTTP] launching {method} request to {url}, payload=\n{json.dumps(payload, indent=2)}")
response = requests.request(method, url, json=payload, headers=headers)
return {"status_code": response.status_code, "headers": dict(response.headers), "body": response.text}
except Exception as e:
return {"error": str(e)}
def pause_heartbeats(self, minutes, max_pause=MAX_PAUSE_HEARTBEATS):
"""Pause timed heartbeats for N minutes"""
minutes = min(max_pause, minutes)

View File

@@ -445,7 +445,7 @@ async def run_agent_loop(memgpt_agent, first, no_verify=False, cfg=None, legacy=
continue
elif user_input.lower() == "/dump":
await print_messages(memgpt_agent.messages)
await memgpt.interface.print_messages(memgpt_agent.messages)
continue
elif user_input.lower() == "/dumpraw":
@@ -453,7 +453,7 @@ async def run_agent_loop(memgpt_agent, first, no_verify=False, cfg=None, legacy=
continue
elif user_input.lower() == "/dump1":
await print_messages(memgpt_agent.messages[-1])
await memgpt.interface.print_messages(memgpt_agent.messages[-1])
continue
elif user_input.lower() == "/memory":

View File

@@ -45,7 +45,7 @@ def use_preset(preset_name, agent_config, model, persona, human, interface, pers
first_message_verify_mono=True if "gpt-4" in model else False,
)
if preset_name == "memgpt_chat_sync": # TODO: remove me after we move the CLI to AgentSync
elif preset_name == "memgpt_chat_sync": # TODO: remove me after we move the CLI to AgentSync
functions = [
"send_message",
"pause_heartbeats",
@@ -77,5 +77,41 @@ def use_preset(preset_name, agent_config, model, persona, human, interface, pers
first_message_verify_mono=True if "gpt-4" in model else False,
)
elif preset_name == "memgpt_extras":
functions = [
"send_message",
"pause_heartbeats",
"core_memory_append",
"core_memory_replace",
"conversation_search",
"conversation_search_date",
"archival_memory_insert",
"archival_memory_search",
# extra for read/write to files
"read_from_text_file",
"append_to_text_file",
# internet access
"http_request",
]
available_functions = [v for k, v in gpt_functions.FUNCTIONS_CHAINING.items() if k in functions]
printd(f"Available functions:\n", [x["name"] for x in available_functions])
assert len(functions) == len(available_functions)
if "gpt-3.5" in model:
# use a different system message for gpt-3.5
preset_name = "memgpt_gpt35_extralong"
return AgentAsync(
model=model,
system=gpt_system.get_system_text("memgpt_chat"),
functions=available_functions,
interface=interface,
persistence_manager=persistence_manager,
persona_notes=persona,
human_notes=human,
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
first_message_verify_mono=True if "gpt-4" in model else False,
)
else:
raise ValueError(preset_name)

View File

@@ -235,4 +235,78 @@ FUNCTIONS_CHAINING = {
"required": ["name", "query", "page", "request_heartbeat"],
},
},
"read_from_text_file": {
"name": "read_from_text_file",
"description": "Read lines from a text file.",
"parameters": {
"type": "object",
"properties": {
"filename": {
"type": "string",
"description": "The name of the file to read.",
},
"line_start": {
"type": "integer",
"description": "Line to start reading from.",
},
"num_lines": {
"type": "integer",
"description": "How many lines to read (defaults to 1).",
},
"request_heartbeat": {
"type": "boolean",
"description": FUNCTION_PARAM_DESCRIPTION_REQ_HEARTBEAT,
},
},
"required": ["filename", "line_start", "request_heartbeat"],
},
},
"append_to_text_file": {
"name": "append_to_text_file",
"description": "Append to a text file.",
"parameters": {
"type": "object",
"properties": {
"filename": {
"type": "string",
"description": "The name of the file to read.",
},
"content": {
"type": "string",
"description": "Content to append to the file.",
},
"request_heartbeat": {
"type": "boolean",
"description": FUNCTION_PARAM_DESCRIPTION_REQ_HEARTBEAT,
},
},
"required": ["filename", "content", "request_heartbeat"],
},
},
"http_request": {
"name": "http_request",
"description": "Generates an HTTP request and returns the response.",
"parameters": {
"type": "object",
"properties": {
"method": {
"type": "string",
"description": "The HTTP method (e.g., 'GET', 'POST').",
},
"url": {
"type": "string",
"description": "The URL for the request",
},
"payload": {
"type": "string",
"description": "A JSON string representing the request payload.",
},
"request_heartbeat": {
"type": "boolean",
"description": FUNCTION_PARAM_DESCRIPTION_REQ_HEARTBEAT,
},
},
"required": ["method", "url", "request_heartbeat"],
},
},
}