feat: one time pass of autoflake + add autoflake to dev extras (#1097)

Co-authored-by: tombedor <tombedor@gmail.com>
This commit is contained in:
Charles Packer
2024-03-05 16:35:12 -08:00
committed by GitHub
parent b665e67b01
commit 637cb9c2b4
47 changed files with 59 additions and 90 deletions

View File

@@ -1,5 +1,4 @@
from openai import OpenAI
import time
"""
This script provides an example of how you can use OpenAI's python client with a MemGPT server.
@@ -33,7 +32,7 @@ def main():
)
# Store the run ID
run_id = run.id
run.id
# Retrieve all messages from the thread
messages = client.beta.threads.messages.list(thread_id=thread.id)

View File

@@ -674,7 +674,7 @@ class Agent(object):
# (if yes) Step 3: call the function
# (if yes) Step 4: send the info on the function call and function response to LLM
response_message = response.choices[0].message
response_message_copy = response_message.copy()
response_message.copy()
all_response_messages, heartbeat_request, function_failed = self._handle_ai_response(response_message)
# Add the extra metadata to the assistant response

View File

@@ -1,7 +1,5 @@
import uuid
import json
import re
from typing import Optional, List, Iterator, Dict, Tuple, cast, Type
from typing import Optional, List, Iterator, Dict, Tuple, cast
import chromadb
from chromadb.api.types import Include, GetResult

View File

@@ -11,9 +11,8 @@ from sqlalchemy_json import mutable_json_type, MutableJson
from sqlalchemy import TypeDecorator, CHAR
import uuid
import re
from tqdm import tqdm
from typing import Optional, List, Iterator, Dict, Tuple
from typing import Optional, List, Iterator, Dict
import numpy as np
from tqdm import tqdm
import pandas as pd
@@ -26,8 +25,6 @@ from memgpt.data_types import Record, Message, Passage, ToolCall, RecordType
from memgpt.constants import MAX_EMBEDDING_DIM
from memgpt.metadata import MetadataStore
from datetime import datetime
# Custom UUID type
class CommonUUID(TypeDecorator):

View File

@@ -3,10 +3,7 @@
We originally tried to use Llama Index VectorIndex, but their limited API was extremely problematic.
"""
from typing import Any, Optional, List, Iterator, Union, Tuple, Type
import re
import pickle
import os
from typing import Optional, List, Iterator, Union, Tuple, Type
import uuid
from abc import abstractmethod

View File

@@ -265,7 +265,7 @@ def load_autogen_memgpt_agent(
# Create the agent object directly from the loaded state (not via preset creation)
try:
memgpt_agent = MemGPTAgent(agent_state=agent_state, interface=interface)
except Exception as e:
except Exception:
print(f"Failed to create an agent object from agent state =\n{agent_state}")
raise
@@ -317,8 +317,8 @@ def create_autogen_memgpt_agent(
embedding_config = config.default_embedding_config
# Overwrite parts of the LLM and embedding configs that were passed into the config dicts
llm_config_was_modified = update_config_from_dict(llm_config, agent_config)
embedding_config_was_modified = update_config_from_dict(embedding_config, agent_config)
update_config_from_dict(llm_config, agent_config)
update_config_from_dict(embedding_config, agent_config)
# Create the default user, or load the specified user
ms = MetadataStore(config)

View File

@@ -3,7 +3,7 @@
import uuid
import typer
import time
from typing import Annotated, Optional
from typing import Annotated
from memgpt import create_client
from memgpt.config import MemGPTConfig

View File

@@ -2,8 +2,6 @@ import uuid
import json
import requests
import sys
import shutil
import io
import logging
from pathlib import Path
import os
@@ -722,7 +720,7 @@ def delete_agent(
try:
ms.delete_agent(agent_id=agent.id)
typer.secho(f"🕊️ Successfully deleted agent '{agent_name}' (id={agent.id})", fg=typer.colors.GREEN)
except Exception as e:
except Exception:
typer.secho(f"Failed to delete agent '{agent_name}' (id={agent.id})", fg=typer.colors.RED)
sys.exit(1)

View File

@@ -1,9 +1,7 @@
import builtins
import json
import os
import shutil
import uuid
from typing import Annotated, Tuple, Optional
from typing import Annotated, Optional
from enum import Enum
from typing import Annotated
@@ -512,7 +510,7 @@ def configure_embedding_endpoint(config: MemGPTConfig, credentials: MemGPTCreden
raise KeyboardInterrupt
try:
embedding_dim = int(embedding_dim)
except Exception as e:
except Exception:
raise ValueError(f"Failed to cast {embedding_dim} to integer.")
else: # local models
embedding_endpoint_type = "local"
@@ -627,7 +625,7 @@ def configure():
# check credentials
credentials = MemGPTCredentials.load()
openai_key = get_openai_credentials()
azure_creds = get_azure_credentials()
get_azure_credentials()
MemGPTConfig.create_config_dir()

View File

@@ -23,7 +23,6 @@ from memgpt.data_types import Source, Passage, Document, User
from memgpt.utils import get_utc_time, suppress_stdout
from memgpt.agent_store.storage import StorageConnector, TableType
from datetime import datetime
app = typer.Typer()

View File

@@ -1,4 +1,3 @@
import os
import datetime
import requests
import uuid

View File

@@ -1,8 +1,5 @@
from memgpt.log import logger
import inspect
import json
import os
import uuid
from dataclasses import dataclass
import configparser
import typer

View File

@@ -4,7 +4,6 @@ from memgpt.agent_store.storage import StorageConnector, TableType
from memgpt.embeddings import embedding_model
from memgpt.data_types import Document, Passage
import uuid
from typing import List, Iterator, Dict, Tuple, Optional
from llama_index.core import Document as LlamaIndexDocument
@@ -102,7 +101,6 @@ class DirectoryConnector(DataConnector):
reader = SimpleDirectoryReader(input_files=[str(f) for f in self.input_files])
llama_index_docs = reader.load_data(show_progress=True)
docs = []
for llama_index_doc in llama_index_docs:
# TODO: add additional metadata?
# doc = Document(text=llama_index_doc.text, metadata=llama_index_doc.metadata)

View File

@@ -2,7 +2,6 @@
import uuid
from datetime import datetime
from abc import abstractmethod
from typing import Optional, List, Dict, TypeVar
import numpy as np

View File

@@ -1,8 +1,6 @@
class LLMError(Exception):
"""Base class for all LLM-related errors."""
pass
class LLMJSONParsingError(LLMError):
"""Exception raised for errors in the JSON parsing process."""

View File

@@ -1,6 +1,5 @@
from typing import Optional
import datetime
import os
import json
import math
@@ -62,7 +61,7 @@ def core_memory_append(self, name: str, content: str) -> Optional[str]:
Returns:
Optional[str]: None is always returned as this function does not produce a response.
"""
new_len = self.memory.edit_append(name, content)
self.memory.edit_append(name, content)
self.rebuild_memory()
return None
@@ -79,7 +78,7 @@ def core_memory_replace(self, name: str, old_content: str, new_content: str) ->
Returns:
Optional[str]: None is always returned as this function does not produce a response.
"""
new_len = self.memory.edit_replace(name, old_content, new_content)
self.memory.edit_replace(name, old_content, new_content)
self.rebuild_memory()
return None

View File

@@ -203,7 +203,6 @@ class CLIInterface(AgentInterface):
except Exception as e:
printd(str(e))
printd(msg_dict)
pass
elif function_name in ["conversation_search", "conversation_search_date"]:
print_function_message("🧠", f"searching memory with {function_name}")
try:
@@ -216,7 +215,6 @@ class CLIInterface(AgentInterface):
except Exception as e:
printd(str(e))
printd(msg_dict)
pass
else:
printd(f"{CLI_WARNING_PREFIX}did not recognize function message")
printd_function_message("", msg)

View File

@@ -2,7 +2,7 @@ import random
import time
import requests
import time
from typing import Callable, TypeVar, Union
from typing import Union
import urllib
from memgpt.credentials import MemGPTCredentials

View File

@@ -8,7 +8,7 @@ from docstring_parser import parse
from pydantic import BaseModel, create_model, Field
from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias
from enum import Enum
from typing import get_type_hints, Callable
from typing import Callable
import re

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,5 +1,4 @@
import json
from typing import List
import yaml

View File

@@ -5,9 +5,7 @@ class LLMChatCompletionWrapper(ABC):
@abstractmethod
def chat_completion_to_prompt(self, messages, functions, function_documentation=None):
"""Go from ChatCompletion to a single prompt string"""
pass
@abstractmethod
def output_to_chat_completion_response(self, raw_llm_output):
"""Turn the LLM output string into a ChatCompletion response"""
pass

View File

@@ -41,7 +41,6 @@ class ZephyrMistralWrapper(LLMChatCompletionWrapper):
prompt = ""
IM_START_TOKEN = "<s>"
IM_END_TOKEN = "</s>"
# System instructions go first
@@ -205,7 +204,6 @@ class ZephyrMistralInnerMonologueWrapper(ZephyrMistralWrapper):
def chat_completion_to_prompt(self, messages, functions, function_documentation=None):
prompt = ""
IM_START_TOKEN = "<s>"
IM_END_TOKEN = "</s>"
# System insturctions go first

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -1,4 +1,3 @@
import os
from urllib.parse import urljoin
import requests

View File

@@ -142,7 +142,6 @@ class ArchivalMemory(ABC):
:param memory_string: Memory string to insert
:type memory_string: str
"""
pass
@abstractmethod
def search(self, query_string, count=None, start=None) -> Tuple[List[str], int]:
@@ -157,7 +156,6 @@ class ArchivalMemory(ABC):
:return: Tuple of (list of results, total number of results)
"""
pass
@abstractmethod
def __repr__(self) -> str:
@@ -168,12 +166,10 @@ class RecallMemory(ABC):
@abstractmethod
def text_search(self, query_string, count=None, start=None):
"""Search messages that match query_string in recall memory"""
pass
@abstractmethod
def date_search(self, start_date, end_date, count=None, start=None):
"""Search messages between start_date and end_date in recall memory"""
pass
@abstractmethod
def __repr__(self) -> str:
@@ -182,7 +178,6 @@ class RecallMemory(ABC):
@abstractmethod
def insert(self, message: Message):
"""Insert message into recall memory"""
pass
class DummyRecallMemory(RecallMemory):

View File

@@ -3,8 +3,7 @@
import os
import uuid
import secrets
from typing import Optional, List, Dict
from datetime import datetime
from typing import Optional, List
from memgpt.constants import DEFAULT_HUMAN, DEFAULT_MEMGPT_MODEL, DEFAULT_PERSONA, DEFAULT_PRESET, LLM_MAX_TOKENS
from memgpt.utils import get_local_time, enforce_types

View File

@@ -248,7 +248,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
try:
with open(persistence_filename, "rb") as f:
data = pickle.load(f)
except ModuleNotFoundError as e:
except ModuleNotFoundError:
# Patch for stripped openai package
# ModuleNotFoundError: No module named 'openai.openai_object'
with open(persistence_filename, "rb") as f:
@@ -474,7 +474,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
interface=None,
)
save_agent(agent, ms=ms)
except Exception as e:
except Exception:
# if "Agent with name" in str(e):
# print(e)
# return
@@ -540,7 +540,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
try:
new_agent_folder = os.path.join(data_dir, MIGRATION_BACKUP_FOLDER, "agents", agent_name)
shutil.move(agent_folder, new_agent_folder)
except Exception as e:
except Exception:
print(f"Failed to move agent folder from {agent_folder} to {new_agent_folder}")
raise

View File

@@ -1,7 +1,6 @@
from typing import List, Union, Optional, Dict, Literal
from typing import List, Union, Optional, Dict
from enum import Enum
from pydantic import BaseModel, Field, Json
import uuid
class ImageFile(BaseModel):

View File

@@ -1,5 +1,4 @@
from typing import List, Union, Optional, Dict, Literal
from enum import Enum
from typing import List, Optional, Dict
from pydantic import BaseModel, Field, Json
import uuid
from datetime import datetime

View File

@@ -1,5 +1,4 @@
from abc import ABC, abstractmethod
import pickle
from typing import List
from memgpt.memory import (

View File

@@ -68,8 +68,8 @@ def setup_agents_config_router(server: SyncServer, interface: QueuingInterface,
interface.clear()
agent_state = server.get_agent_config(user_id=user_id, agent_id=agent_id)
# return GetAgentResponse(agent_state=agent_state)
llm_config = LLMConfigModel(**vars(agent_state.llm_config))
embedding_config = EmbeddingConfigModel(**vars(agent_state.embedding_config))
LLMConfigModel(**vars(agent_state.llm_config))
EmbeddingConfigModel(**vars(agent_state.embedding_config))
return GetAgentResponse(
agent_state=AgentStateModel(

View File

@@ -53,7 +53,6 @@ class QueuingInterface(AgentInterface):
def user_message(self, msg: str):
"""Handle reception of a user message"""
pass
def internal_monologue(self, msg: str) -> None:
"""Handle the agent's internal monologue"""

View File

@@ -1,4 +1,3 @@
import uuid
from functools import partial
from typing import List
@@ -19,7 +18,7 @@ class ListModelsResponse(BaseModel):
def setup_models_index_router(server: SyncServer, interface: QueuingInterface, password: str):
get_current_user_with_server = partial(partial(get_current_user, server), password)
partial(partial(get_current_user, server), password)
@router.get("/models", tags=["models"], response_model=ListModelsResponse)
async def list_models():

View File

@@ -1,10 +1,6 @@
import asyncio
from fastapi import FastAPI
from asyncio import AbstractEventLoop
from enum import Enum
import json
import uuid
from typing import List, Optional, Union
from typing import List, Optional
from datetime import datetime
from fastapi import APIRouter, Depends, Body, HTTPException, Query, Path

View File

@@ -435,7 +435,7 @@ class SyncServer(LockingServer):
while len(memgpt_agent.messages) > 0:
if memgpt_agent.messages[-1].get("role") == "user":
# we want to pop up to the last user message and send it again
user_message = memgpt_agent.messages[-1].get("content")
memgpt_agent.messages[-1].get("content")
memgpt_agent.messages.pop()
break
memgpt_agent.messages.pop()

View File

@@ -30,7 +30,6 @@ class AsyncWebSocketInterface(BaseWebSocketInterface):
async def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
pass
async def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""
@@ -85,7 +84,6 @@ class SyncWebSocketInterface(BaseWebSocketInterface):
def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
pass
def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""

28
poetry.lock generated
View File

@@ -218,6 +218,21 @@ tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "autoflake"
version = "2.3.0"
description = "Removes unused imports and unused variables"
optional = false
python-versions = ">=3.8"
files = [
{file = "autoflake-2.3.0-py3-none-any.whl", hash = "sha256:79a51eb8c0744759d2efe052455ab20aa6a314763510c3fd897499a402126327"},
{file = "autoflake-2.3.0.tar.gz", hash = "sha256:8c2011fa34701b9d7dcf05b9873bc4859d4fce4e62dfea90dffefd1576f5f01d"},
]
[package.dependencies]
pyflakes = ">=3.0.0"
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
[[package]]
name = "azure-core"
version = "1.30.1"
@@ -3779,6 +3794,17 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pyflakes"
version = "3.2.0"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.8"
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
]
[[package]]
name = "pygments"
version = "2.17.2"
@@ -5874,4 +5900,4 @@ server = ["fastapi", "uvicorn", "websockets"]
[metadata]
lock-version = "2.0"
python-versions = "<3.12,>=3.10"
content-hash = "44a3dd2672236f037f34c7859a943d86a8a02297190ac4f826fe41d21955c4ca"
content-hash = "4fe62729e4de4bdacedc6e969aafdecf5897f34762d2c7cdad61bc813de93651"

View File

@@ -56,10 +56,11 @@ python-box = "^7.1.1"
pytest-order = {version = "^1.2.0", optional = true}
sqlmodel = "^0.0.16"
llama-index-embeddings-azure-openai = "^0.1.6"
autoflake = {version = "^2.3.0", optional = true}
[tool.poetry.extras]
postgres = ["pgvector", "pg8000"]
dev = ["pytest", "pytest-asyncio", "pexpect", "black", "pre-commit", "datasets", "pyright", "pytest-order"]
dev = ["pytest", "pytest-asyncio", "pexpect", "black", "pre-commit", "datasets", "pyright", "pytest-order", "autoflake"]
server = ["websockets", "fastapi", "uvicorn"]
autogen = ["pyautogen"]

View File

@@ -1,5 +1,4 @@
import os
import sys
import subprocess
import pytest

View File

@@ -1,6 +1,5 @@
from fastapi import FastAPI
from fastapi.testclient import TestClient
import uuid
from memgpt.server.server import SyncServer
from memgpt.server.rest_api.server import app

View File

@@ -1,5 +1,4 @@
from openai import OpenAI
import time
import uvicorn

View File

@@ -187,8 +187,8 @@ def test_get_recall_memory(server, user_id, agent_id):
cursor1, messages_1 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, limit=2)
cursor2, messages_2 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, after=cursor1, limit=1000)
cursor3, messages_3 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, limit=1000)
ids3 = [m["id"] for m in messages_3]
ids2 = [m["id"] for m in messages_2]
[m["id"] for m in messages_3]
[m["id"] for m in messages_2]
timestamps = [m["created_at"] for m in messages_3]
print("timestamps", timestamps)
assert messages_3[-1]["created_at"] < messages_3[0]["created_at"]