feat: Add memgpt quickstart command (#641)

* Revert "Revert "nonfunctional 404 quickstart command w/ some other typo corrections""

This reverts commit 5dbdf31f1ce939843ff97e649554d8bc0556a834.

* Revert "Revert "added example config file""

This reverts commit 72a58f6de31f3ff71847bbaf083a91182469f9af.

* tested and working

* added and tested openai quickstart, added fallback if internet 404's to pull from local copy

* typo

* updated openai key input message to include html link

* renamed --type to --backend, added --latest flag which fetches from online default is to pull from local file

* fixed links
This commit is contained in:
Charles Packer
2023-12-20 00:00:40 -08:00
committed by GitHub
parent dd500e19c3
commit f532ffc41f
6 changed files with 159 additions and 3 deletions

View File

@@ -0,0 +1,12 @@
{
"context_window": 32768,
"model": "ehartford/dolphin-2.5-mixtral-8x7b",
"model_endpoint_type": "vllm",
"model_endpoint": "http://api.memgpt.ai",
"model_wrapper": "airoboros-l2-70b-2.1",
"embedding_endpoint_type": "hugging-face",
"embedding_endpoint": "http://embeddings.memgpt.ai",
"embedding_model": "BAAI/bge-large-en-v1.5",
"embedding_dim": 1536,
"embedding_chunk_size": 300
}

12
configs/openai.json Normal file
View File

@@ -0,0 +1,12 @@
{
"context_window": 8192,
"model": "gpt-4",
"model_endpoint_type": "openai",
"model_endpoint": "https://api.openai.com/v1",
"model_wrapper": null,
"embedding_endpoint_type": "openai",
"embedding_endpoint": "https://api.openai.com/v1",
"embedding_model": null,
"embedding_dim": 1536,
"embedding_chunk_size": 300
}

View File

@@ -1,5 +1,6 @@
import typer
import json
import requests
import sys
import io
import logging
@@ -25,6 +26,136 @@ from memgpt.embeddings import embedding_model
from memgpt.server.constants import WS_DEFAULT_PORT, REST_DEFAULT_PORT
class QuickstartChoice(Enum):
openai = "openai"
# azure = "azure"
memgpt_hosted = "memgpt"
def set_config_with_dict(new_config: dict):
"""Set the base config using a dict"""
from memgpt.utils import printd
old_config = MemGPTConfig.load()
modified = False
for k, v in vars(old_config).items():
if k in new_config:
if v != new_config[k]:
printd(f"Replacing config {k}: {v} -> {new_config[k]}")
modified = True
# old_config[k] = new_config[k]
setattr(old_config, k, new_config[k]) # Set the new value using dot notation
else:
printd(f"Skipping new config {k}: {v} == {new_config[k]}")
if modified:
printd(f"Saving new config file.")
old_config.save()
def quickstart(
backend: QuickstartChoice = typer.Option("memgpt", help="Quickstart setup backend"),
latest: bool = typer.Option(False, "--latest", help="Use --latest to pull the latest config from online"),
debug: bool = typer.Option(False, "--debug", help="Use --debug to enable debugging output"),
):
"""Set the base config file with a single command"""
# setup logger
utils.DEBUG = debug
logging.getLogger().setLevel(logging.CRITICAL)
if debug:
logging.getLogger().setLevel(logging.DEBUG)
if backend == QuickstartChoice.memgpt_hosted:
# if latest, try to pull the config from the repo
# fallback to using local
if latest:
# Download the latest memgpt hosted config
url = "https://raw.githubusercontent.com/cpacker/MemGPT/main/configs/memgpt_hosted.json"
response = requests.get(url)
# Check if the request was successful
if response.status_code == 200:
# Parse the response content as JSON
config = response.json()
# Output a success message and the first few items in the dictionary as a sample
print("JSON config file downloaded successfully.")
set_config_with_dict(config)
else:
print(f"Failed to download config from {url}. Status code:", response.status_code)
# Load the file from the relative path
script_dir = os.path.dirname(__file__) # Get the directory where the script is located
backup_config_path = os.path.join(script_dir, "..", "..", "configs", "memgpt_hosted.json")
try:
with open(backup_config_path, "r") as file:
backup_config = json.load(file)
print("Loaded backup config file successfully.")
set_config_with_dict(backup_config)
except FileNotFoundError:
print(f"Backup config file not found at {backup_config_path}")
else:
# Load the file from the relative path
script_dir = os.path.dirname(__file__) # Get the directory where the script is located
backup_config_path = os.path.join(script_dir, "..", "..", "configs", "memgpt_hosted.json")
try:
with open(backup_config_path, "r") as file:
backup_config = json.load(file)
print("Loaded config file successfully.")
set_config_with_dict(backup_config)
except FileNotFoundError:
print(f"Config file not found at {backup_config_path}")
elif backend == QuickstartChoice.openai:
# if latest, try to pull the config from the repo
# fallback to using local
if latest:
# Make sure we have an API key
api_key = os.getenv("OPENAI_API_KEY")
while api_key is None or len(api_key) == 0:
# Ask for API key as input
api_key = questionary.text("Enter your OpenAI API key (starts with 'sk-', see https://platform.openai.com/api-keys):").ask()
url = "https://raw.githubusercontent.com/cpacker/MemGPT/main/configs/openai.json"
response = requests.get(url)
# Check if the request was successful
if response.status_code == 200:
# Parse the response content as JSON
config = response.json()
# Output a success message and the first few items in the dictionary as a sample
print("JSON config file downloaded successfully.")
# Add the API key
config["openai_key"] = api_key
set_config_with_dict(config)
else:
print(f"Failed to download config from {url}. Status code:", response.status_code)
# Load the file from the relative path
script_dir = os.path.dirname(__file__) # Get the directory where the script is located
backup_config_path = os.path.join(script_dir, "..", "..", "configs", "openai.json")
try:
with open(backup_config_path, "r") as file:
backup_config = json.load(file)
print("Loaded backup config file successfully.")
set_config_with_dict(backup_config)
except FileNotFoundError:
print(f"Backup config file not found at {backup_config_path}")
else:
# Load the file from the relative path
script_dir = os.path.dirname(__file__) # Get the directory where the script is located
backup_config_path = os.path.join(script_dir, "..", "..", "configs", "openai.json")
try:
with open(backup_config_path, "r") as file:
backup_config = json.load(file)
print("Loaded config file successfully.")
set_config_with_dict(backup_config)
except FileNotFoundError:
print(f"Config file not found at {backup_config_path}")
else:
raise NotImplementedError(backend)
def open_folder():
"""Open a folder viewer of the MemGPT home directory"""
try:

View File

@@ -54,7 +54,7 @@ def get_chat_completion(
# Warn the user that we're using the fallback
if not has_shown_warning:
print(
f"{CLI_WARNING_PREFIX}no wrapper specified for local LLM, using the default wrapper (you can remove this warning by specifying the wrapper with --wrapper)"
f"{CLI_WARNING_PREFIX}no wrapper specified for local LLM, using the default wrapper (you can remove this warning by specifying the wrapper with --model-wrapper)"
)
has_shown_warning = True
if endpoint_type in ["koboldcpp", "llamacpp", "webui"]:

View File

@@ -20,7 +20,7 @@ def get_vllm_completion(endpoint, model, prompt, context_window, user, grammar=N
settings = get_completions_settings()
request = settings
request["prompt"] = prompt
request["max_tokens"] = int(context_window - prompt_tokens)
request["max_tokens"] = 3000 # int(context_window - prompt_tokens)
request["stream"] = False
request["user"] = user

View File

@@ -21,7 +21,7 @@ from memgpt.interface import CLIInterface as interface # for printing to termin
import memgpt.agent as agent
import memgpt.system as system
import memgpt.constants as constants
from memgpt.cli.cli import run, attach, version, server, open_folder
from memgpt.cli.cli import run, attach, version, server, open_folder, quickstart
from memgpt.cli.cli_config import configure, list, add
from memgpt.cli.cli_load import app as load_app
from memgpt.connectors.storage import StorageConnector
@@ -35,6 +35,7 @@ app.command(name="list")(list)
app.command(name="add")(add)
app.command(name="server")(server)
app.command(name="folder")(open_folder)
app.command(name="quickstart")(quickstart)
# load data commands
app.add_typer(load_app, name="load")