diff --git a/examples/openai_client_assistants.py b/examples/openai_client_assistants.py
index ca21451d..20ab3847 100644
--- a/examples/openai_client_assistants.py
+++ b/examples/openai_client_assistants.py
@@ -1,5 +1,4 @@
from openai import OpenAI
-import time
"""
This script provides an example of how you can use OpenAI's python client with a MemGPT server.
@@ -33,7 +32,7 @@ def main():
)
# Store the run ID
- run_id = run.id
+ run.id
# Retrieve all messages from the thread
messages = client.beta.threads.messages.list(thread_id=thread.id)
diff --git a/memgpt/agent.py b/memgpt/agent.py
index a189e559..86295e92 100644
--- a/memgpt/agent.py
+++ b/memgpt/agent.py
@@ -674,7 +674,7 @@ class Agent(object):
# (if yes) Step 3: call the function
# (if yes) Step 4: send the info on the function call and function response to LLM
response_message = response.choices[0].message
- response_message_copy = response_message.copy()
+ response_message.copy()
all_response_messages, heartbeat_request, function_failed = self._handle_ai_response(response_message)
# Add the extra metadata to the assistant response
diff --git a/memgpt/agent_store/chroma.py b/memgpt/agent_store/chroma.py
index 637f9421..bccf1acf 100644
--- a/memgpt/agent_store/chroma.py
+++ b/memgpt/agent_store/chroma.py
@@ -1,7 +1,5 @@
import uuid
-import json
-import re
-from typing import Optional, List, Iterator, Dict, Tuple, cast, Type
+from typing import Optional, List, Iterator, Dict, Tuple, cast
import chromadb
from chromadb.api.types import Include, GetResult
diff --git a/memgpt/agent_store/db.py b/memgpt/agent_store/db.py
index 7441a659..98944336 100644
--- a/memgpt/agent_store/db.py
+++ b/memgpt/agent_store/db.py
@@ -11,9 +11,8 @@ from sqlalchemy_json import mutable_json_type, MutableJson
from sqlalchemy import TypeDecorator, CHAR
import uuid
-import re
from tqdm import tqdm
-from typing import Optional, List, Iterator, Dict, Tuple
+from typing import Optional, List, Iterator, Dict
import numpy as np
from tqdm import tqdm
import pandas as pd
@@ -26,8 +25,6 @@ from memgpt.data_types import Record, Message, Passage, ToolCall, RecordType
from memgpt.constants import MAX_EMBEDDING_DIM
from memgpt.metadata import MetadataStore
-from datetime import datetime
-
# Custom UUID type
class CommonUUID(TypeDecorator):
diff --git a/memgpt/agent_store/storage.py b/memgpt/agent_store/storage.py
index a670cd93..89ee8eed 100644
--- a/memgpt/agent_store/storage.py
+++ b/memgpt/agent_store/storage.py
@@ -3,10 +3,7 @@
We originally tried to use Llama Index VectorIndex, but their limited API was extremely problematic.
"""
-from typing import Any, Optional, List, Iterator, Union, Tuple, Type
-import re
-import pickle
-import os
+from typing import Optional, List, Iterator, Union, Tuple, Type
import uuid
from abc import abstractmethod
diff --git a/memgpt/autogen/memgpt_agent.py b/memgpt/autogen/memgpt_agent.py
index 51dac335..91e3cd8a 100644
--- a/memgpt/autogen/memgpt_agent.py
+++ b/memgpt/autogen/memgpt_agent.py
@@ -265,7 +265,7 @@ def load_autogen_memgpt_agent(
# Create the agent object directly from the loaded state (not via preset creation)
try:
memgpt_agent = MemGPTAgent(agent_state=agent_state, interface=interface)
- except Exception as e:
+ except Exception:
print(f"Failed to create an agent object from agent state =\n{agent_state}")
raise
@@ -317,8 +317,8 @@ def create_autogen_memgpt_agent(
embedding_config = config.default_embedding_config
# Overwrite parts of the LLM and embedding configs that were passed into the config dicts
- llm_config_was_modified = update_config_from_dict(llm_config, agent_config)
- embedding_config_was_modified = update_config_from_dict(embedding_config, agent_config)
+ update_config_from_dict(llm_config, agent_config)
+ update_config_from_dict(embedding_config, agent_config)
# Create the default user, or load the specified user
ms = MetadataStore(config)
diff --git a/memgpt/benchmark/benchmark.py b/memgpt/benchmark/benchmark.py
index 63c7200c..593ad65f 100644
--- a/memgpt/benchmark/benchmark.py
+++ b/memgpt/benchmark/benchmark.py
@@ -3,7 +3,7 @@
import uuid
import typer
import time
-from typing import Annotated, Optional
+from typing import Annotated
from memgpt import create_client
from memgpt.config import MemGPTConfig
diff --git a/memgpt/cli/cli.py b/memgpt/cli/cli.py
index 68bce6c9..6fe7761d 100644
--- a/memgpt/cli/cli.py
+++ b/memgpt/cli/cli.py
@@ -2,8 +2,6 @@ import uuid
import json
import requests
import sys
-import shutil
-import io
import logging
from pathlib import Path
import os
@@ -722,7 +720,7 @@ def delete_agent(
try:
ms.delete_agent(agent_id=agent.id)
typer.secho(f"🕊️ Successfully deleted agent '{agent_name}' (id={agent.id})", fg=typer.colors.GREEN)
- except Exception as e:
+ except Exception:
typer.secho(f"Failed to delete agent '{agent_name}' (id={agent.id})", fg=typer.colors.RED)
sys.exit(1)
diff --git a/memgpt/cli/cli_config.py b/memgpt/cli/cli_config.py
index 9a7e9a48..912c9f12 100644
--- a/memgpt/cli/cli_config.py
+++ b/memgpt/cli/cli_config.py
@@ -1,9 +1,7 @@
import builtins
-import json
import os
-import shutil
import uuid
-from typing import Annotated, Tuple, Optional
+from typing import Annotated, Optional
from enum import Enum
from typing import Annotated
@@ -512,7 +510,7 @@ def configure_embedding_endpoint(config: MemGPTConfig, credentials: MemGPTCreden
raise KeyboardInterrupt
try:
embedding_dim = int(embedding_dim)
- except Exception as e:
+ except Exception:
raise ValueError(f"Failed to cast {embedding_dim} to integer.")
else: # local models
embedding_endpoint_type = "local"
@@ -627,7 +625,7 @@ def configure():
# check credentials
credentials = MemGPTCredentials.load()
openai_key = get_openai_credentials()
- azure_creds = get_azure_credentials()
+ get_azure_credentials()
MemGPTConfig.create_config_dir()
diff --git a/memgpt/cli/cli_load.py b/memgpt/cli/cli_load.py
index d74d7c8d..105ec982 100644
--- a/memgpt/cli/cli_load.py
+++ b/memgpt/cli/cli_load.py
@@ -23,7 +23,6 @@ from memgpt.data_types import Source, Passage, Document, User
from memgpt.utils import get_utc_time, suppress_stdout
from memgpt.agent_store.storage import StorageConnector, TableType
-from datetime import datetime
app = typer.Typer()
diff --git a/memgpt/client/client.py b/memgpt/client/client.py
index 0a923aa8..8380a6fe 100644
--- a/memgpt/client/client.py
+++ b/memgpt/client/client.py
@@ -1,4 +1,3 @@
-import os
import datetime
import requests
import uuid
diff --git a/memgpt/credentials.py b/memgpt/credentials.py
index 853ea38e..3464752b 100644
--- a/memgpt/credentials.py
+++ b/memgpt/credentials.py
@@ -1,8 +1,5 @@
from memgpt.log import logger
-import inspect
-import json
import os
-import uuid
from dataclasses import dataclass
import configparser
import typer
diff --git a/memgpt/data_sources/connectors.py b/memgpt/data_sources/connectors.py
index c54a95b8..ce55fc8a 100644
--- a/memgpt/data_sources/connectors.py
+++ b/memgpt/data_sources/connectors.py
@@ -4,7 +4,6 @@ from memgpt.agent_store.storage import StorageConnector, TableType
from memgpt.embeddings import embedding_model
from memgpt.data_types import Document, Passage
-import uuid
from typing import List, Iterator, Dict, Tuple, Optional
from llama_index.core import Document as LlamaIndexDocument
@@ -102,7 +101,6 @@ class DirectoryConnector(DataConnector):
reader = SimpleDirectoryReader(input_files=[str(f) for f in self.input_files])
llama_index_docs = reader.load_data(show_progress=True)
- docs = []
for llama_index_doc in llama_index_docs:
# TODO: add additional metadata?
# doc = Document(text=llama_index_doc.text, metadata=llama_index_doc.metadata)
diff --git a/memgpt/data_types.py b/memgpt/data_types.py
index 7e05f76e..12f58a04 100644
--- a/memgpt/data_types.py
+++ b/memgpt/data_types.py
@@ -2,7 +2,6 @@
import uuid
from datetime import datetime
-from abc import abstractmethod
from typing import Optional, List, Dict, TypeVar
import numpy as np
diff --git a/memgpt/errors.py b/memgpt/errors.py
index aa1f57fc..6ac70181 100644
--- a/memgpt/errors.py
+++ b/memgpt/errors.py
@@ -1,8 +1,6 @@
class LLMError(Exception):
"""Base class for all LLM-related errors."""
- pass
-
class LLMJSONParsingError(LLMError):
"""Exception raised for errors in the JSON parsing process."""
diff --git a/memgpt/functions/function_sets/base.py b/memgpt/functions/function_sets/base.py
index 55ab03dc..a5974f88 100644
--- a/memgpt/functions/function_sets/base.py
+++ b/memgpt/functions/function_sets/base.py
@@ -1,6 +1,5 @@
from typing import Optional
import datetime
-import os
import json
import math
@@ -62,7 +61,7 @@ def core_memory_append(self, name: str, content: str) -> Optional[str]:
Returns:
Optional[str]: None is always returned as this function does not produce a response.
"""
- new_len = self.memory.edit_append(name, content)
+ self.memory.edit_append(name, content)
self.rebuild_memory()
return None
@@ -79,7 +78,7 @@ def core_memory_replace(self, name: str, old_content: str, new_content: str) ->
Returns:
Optional[str]: None is always returned as this function does not produce a response.
"""
- new_len = self.memory.edit_replace(name, old_content, new_content)
+ self.memory.edit_replace(name, old_content, new_content)
self.rebuild_memory()
return None
diff --git a/memgpt/interface.py b/memgpt/interface.py
index a75ea927..cbaec27a 100644
--- a/memgpt/interface.py
+++ b/memgpt/interface.py
@@ -203,7 +203,6 @@ class CLIInterface(AgentInterface):
except Exception as e:
printd(str(e))
printd(msg_dict)
- pass
elif function_name in ["conversation_search", "conversation_search_date"]:
print_function_message("🧠", f"searching memory with {function_name}")
try:
@@ -216,7 +215,6 @@ class CLIInterface(AgentInterface):
except Exception as e:
printd(str(e))
printd(msg_dict)
- pass
else:
printd(f"{CLI_WARNING_PREFIX}did not recognize function message")
printd_function_message("", msg)
diff --git a/memgpt/llm_api_tools.py b/memgpt/llm_api_tools.py
index 3fa8d7ff..d5a53aa8 100644
--- a/memgpt/llm_api_tools.py
+++ b/memgpt/llm_api_tools.py
@@ -2,7 +2,7 @@ import random
import time
import requests
import time
-from typing import Callable, TypeVar, Union
+from typing import Union
import urllib
from memgpt.credentials import MemGPTCredentials
diff --git a/memgpt/local_llm/grammars/gbnf_grammar_generator.py b/memgpt/local_llm/grammars/gbnf_grammar_generator.py
index 58b73f3f..a1d2f937 100644
--- a/memgpt/local_llm/grammars/gbnf_grammar_generator.py
+++ b/memgpt/local_llm/grammars/gbnf_grammar_generator.py
@@ -8,7 +8,7 @@ from docstring_parser import parse
from pydantic import BaseModel, create_model, Field
from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias
from enum import Enum
-from typing import get_type_hints, Callable
+from typing import Callable
import re
diff --git a/memgpt/local_llm/koboldcpp/api.py b/memgpt/local_llm/koboldcpp/api.py
index 4b720d4d..a6ac87e0 100644
--- a/memgpt/local_llm/koboldcpp/api.py
+++ b/memgpt/local_llm/koboldcpp/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/llamacpp/api.py b/memgpt/local_llm/llamacpp/api.py
index 2e0bea6b..438fe7d5 100644
--- a/memgpt/local_llm/llamacpp/api.py
+++ b/memgpt/local_llm/llamacpp/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py b/memgpt/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py
index 36543cdd..0c9be182 100644
--- a/memgpt/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py
+++ b/memgpt/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py
@@ -1,5 +1,4 @@
import json
-from typing import List
import yaml
diff --git a/memgpt/local_llm/llm_chat_completion_wrappers/wrapper_base.py b/memgpt/local_llm/llm_chat_completion_wrappers/wrapper_base.py
index 40b6ab70..a37f73a3 100644
--- a/memgpt/local_llm/llm_chat_completion_wrappers/wrapper_base.py
+++ b/memgpt/local_llm/llm_chat_completion_wrappers/wrapper_base.py
@@ -5,9 +5,7 @@ class LLMChatCompletionWrapper(ABC):
@abstractmethod
def chat_completion_to_prompt(self, messages, functions, function_documentation=None):
"""Go from ChatCompletion to a single prompt string"""
- pass
@abstractmethod
def output_to_chat_completion_response(self, raw_llm_output):
"""Turn the LLM output string into a ChatCompletion response"""
- pass
diff --git a/memgpt/local_llm/llm_chat_completion_wrappers/zephyr.py b/memgpt/local_llm/llm_chat_completion_wrappers/zephyr.py
index a003d50d..60cc6056 100644
--- a/memgpt/local_llm/llm_chat_completion_wrappers/zephyr.py
+++ b/memgpt/local_llm/llm_chat_completion_wrappers/zephyr.py
@@ -41,7 +41,6 @@ class ZephyrMistralWrapper(LLMChatCompletionWrapper):
prompt = ""
- IM_START_TOKEN = ""
IM_END_TOKEN = ""
# System instructions go first
@@ -205,7 +204,6 @@ class ZephyrMistralInnerMonologueWrapper(ZephyrMistralWrapper):
def chat_completion_to_prompt(self, messages, functions, function_documentation=None):
prompt = ""
- IM_START_TOKEN = ""
IM_END_TOKEN = ""
# System insturctions go first
diff --git a/memgpt/local_llm/lmstudio/api.py b/memgpt/local_llm/lmstudio/api.py
index 4aa3db13..a82b827b 100644
--- a/memgpt/local_llm/lmstudio/api.py
+++ b/memgpt/local_llm/lmstudio/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/ollama/api.py b/memgpt/local_llm/ollama/api.py
index a73d4990..b8471328 100644
--- a/memgpt/local_llm/ollama/api.py
+++ b/memgpt/local_llm/ollama/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/vllm/api.py b/memgpt/local_llm/vllm/api.py
index aa5fd6a6..a22a1fb1 100644
--- a/memgpt/local_llm/vllm/api.py
+++ b/memgpt/local_llm/vllm/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/webui/api.py b/memgpt/local_llm/webui/api.py
index 2b617462..5751591d 100644
--- a/memgpt/local_llm/webui/api.py
+++ b/memgpt/local_llm/webui/api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/local_llm/webui/legacy_api.py b/memgpt/local_llm/webui/legacy_api.py
index 6f3d8b2e..6fc99c38 100644
--- a/memgpt/local_llm/webui/legacy_api.py
+++ b/memgpt/local_llm/webui/legacy_api.py
@@ -1,4 +1,3 @@
-import os
from urllib.parse import urljoin
import requests
diff --git a/memgpt/memory.py b/memgpt/memory.py
index 5bc6c928..38f22aa5 100644
--- a/memgpt/memory.py
+++ b/memgpt/memory.py
@@ -142,7 +142,6 @@ class ArchivalMemory(ABC):
:param memory_string: Memory string to insert
:type memory_string: str
"""
- pass
@abstractmethod
def search(self, query_string, count=None, start=None) -> Tuple[List[str], int]:
@@ -157,7 +156,6 @@ class ArchivalMemory(ABC):
:return: Tuple of (list of results, total number of results)
"""
- pass
@abstractmethod
def __repr__(self) -> str:
@@ -168,12 +166,10 @@ class RecallMemory(ABC):
@abstractmethod
def text_search(self, query_string, count=None, start=None):
"""Search messages that match query_string in recall memory"""
- pass
@abstractmethod
def date_search(self, start_date, end_date, count=None, start=None):
"""Search messages between start_date and end_date in recall memory"""
- pass
@abstractmethod
def __repr__(self) -> str:
@@ -182,7 +178,6 @@ class RecallMemory(ABC):
@abstractmethod
def insert(self, message: Message):
"""Insert message into recall memory"""
- pass
class DummyRecallMemory(RecallMemory):
diff --git a/memgpt/metadata.py b/memgpt/metadata.py
index 1cf1d693..36946be5 100644
--- a/memgpt/metadata.py
+++ b/memgpt/metadata.py
@@ -3,8 +3,7 @@
import os
import uuid
import secrets
-from typing import Optional, List, Dict
-from datetime import datetime
+from typing import Optional, List
from memgpt.constants import DEFAULT_HUMAN, DEFAULT_MEMGPT_MODEL, DEFAULT_PERSONA, DEFAULT_PRESET, LLM_MAX_TOKENS
from memgpt.utils import get_local_time, enforce_types
diff --git a/memgpt/migrate.py b/memgpt/migrate.py
index 55b54d10..45efb513 100644
--- a/memgpt/migrate.py
+++ b/memgpt/migrate.py
@@ -248,7 +248,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
try:
with open(persistence_filename, "rb") as f:
data = pickle.load(f)
- except ModuleNotFoundError as e:
+ except ModuleNotFoundError:
# Patch for stripped openai package
# ModuleNotFoundError: No module named 'openai.openai_object'
with open(persistence_filename, "rb") as f:
@@ -474,7 +474,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
interface=None,
)
save_agent(agent, ms=ms)
- except Exception as e:
+ except Exception:
# if "Agent with name" in str(e):
# print(e)
# return
@@ -540,7 +540,7 @@ def migrate_agent(agent_name: str, data_dir: str = MEMGPT_DIR, ms: Optional[Meta
try:
new_agent_folder = os.path.join(data_dir, MIGRATION_BACKUP_FOLDER, "agents", agent_name)
shutil.move(agent_folder, new_agent_folder)
- except Exception as e:
+ except Exception:
print(f"Failed to move agent folder from {agent_folder} to {new_agent_folder}")
raise
diff --git a/memgpt/models/openai.py b/memgpt/models/openai.py
index 60d59971..5d41c091 100644
--- a/memgpt/models/openai.py
+++ b/memgpt/models/openai.py
@@ -1,7 +1,6 @@
-from typing import List, Union, Optional, Dict, Literal
+from typing import List, Union, Optional, Dict
from enum import Enum
from pydantic import BaseModel, Field, Json
-import uuid
class ImageFile(BaseModel):
diff --git a/memgpt/models/pydantic_models.py b/memgpt/models/pydantic_models.py
index 46bb1657..eb8fac97 100644
--- a/memgpt/models/pydantic_models.py
+++ b/memgpt/models/pydantic_models.py
@@ -1,5 +1,4 @@
-from typing import List, Union, Optional, Dict, Literal
-from enum import Enum
+from typing import List, Optional, Dict
from pydantic import BaseModel, Field, Json
import uuid
from datetime import datetime
diff --git a/memgpt/persistence_manager.py b/memgpt/persistence_manager.py
index 8dd972c2..4615970a 100644
--- a/memgpt/persistence_manager.py
+++ b/memgpt/persistence_manager.py
@@ -1,5 +1,4 @@
from abc import ABC, abstractmethod
-import pickle
from typing import List
from memgpt.memory import (
diff --git a/memgpt/server/rest_api/agents/config.py b/memgpt/server/rest_api/agents/config.py
index dc895a91..05c55466 100644
--- a/memgpt/server/rest_api/agents/config.py
+++ b/memgpt/server/rest_api/agents/config.py
@@ -68,8 +68,8 @@ def setup_agents_config_router(server: SyncServer, interface: QueuingInterface,
interface.clear()
agent_state = server.get_agent_config(user_id=user_id, agent_id=agent_id)
# return GetAgentResponse(agent_state=agent_state)
- llm_config = LLMConfigModel(**vars(agent_state.llm_config))
- embedding_config = EmbeddingConfigModel(**vars(agent_state.embedding_config))
+ LLMConfigModel(**vars(agent_state.llm_config))
+ EmbeddingConfigModel(**vars(agent_state.embedding_config))
return GetAgentResponse(
agent_state=AgentStateModel(
diff --git a/memgpt/server/rest_api/interface.py b/memgpt/server/rest_api/interface.py
index 1755c687..c9abfa28 100644
--- a/memgpt/server/rest_api/interface.py
+++ b/memgpt/server/rest_api/interface.py
@@ -53,7 +53,6 @@ class QueuingInterface(AgentInterface):
def user_message(self, msg: str):
"""Handle reception of a user message"""
- pass
def internal_monologue(self, msg: str) -> None:
"""Handle the agent's internal monologue"""
diff --git a/memgpt/server/rest_api/models/index.py b/memgpt/server/rest_api/models/index.py
index 37d956d5..ad4a23a0 100644
--- a/memgpt/server/rest_api/models/index.py
+++ b/memgpt/server/rest_api/models/index.py
@@ -1,4 +1,3 @@
-import uuid
from functools import partial
from typing import List
@@ -19,7 +18,7 @@ class ListModelsResponse(BaseModel):
def setup_models_index_router(server: SyncServer, interface: QueuingInterface, password: str):
- get_current_user_with_server = partial(partial(get_current_user, server), password)
+ partial(partial(get_current_user, server), password)
@router.get("/models", tags=["models"], response_model=ListModelsResponse)
async def list_models():
diff --git a/memgpt/server/rest_api/openai_assistants/assistants.py b/memgpt/server/rest_api/openai_assistants/assistants.py
index c82c1ce3..fd16e3f0 100644
--- a/memgpt/server/rest_api/openai_assistants/assistants.py
+++ b/memgpt/server/rest_api/openai_assistants/assistants.py
@@ -1,10 +1,6 @@
-import asyncio
from fastapi import FastAPI
-from asyncio import AbstractEventLoop
-from enum import Enum
-import json
import uuid
-from typing import List, Optional, Union
+from typing import List, Optional
from datetime import datetime
from fastapi import APIRouter, Depends, Body, HTTPException, Query, Path
diff --git a/memgpt/server/server.py b/memgpt/server/server.py
index f68e23d7..28f964ce 100644
--- a/memgpt/server/server.py
+++ b/memgpt/server/server.py
@@ -435,7 +435,7 @@ class SyncServer(LockingServer):
while len(memgpt_agent.messages) > 0:
if memgpt_agent.messages[-1].get("role") == "user":
# we want to pop up to the last user message and send it again
- user_message = memgpt_agent.messages[-1].get("content")
+ memgpt_agent.messages[-1].get("content")
memgpt_agent.messages.pop()
break
memgpt_agent.messages.pop()
diff --git a/memgpt/server/ws_api/interface.py b/memgpt/server/ws_api/interface.py
index eae6b3ca..12fa7088 100644
--- a/memgpt/server/ws_api/interface.py
+++ b/memgpt/server/ws_api/interface.py
@@ -30,7 +30,6 @@ class AsyncWebSocketInterface(BaseWebSocketInterface):
async def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
- pass
async def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""
@@ -85,7 +84,6 @@ class SyncWebSocketInterface(BaseWebSocketInterface):
def user_message(self, msg):
"""Handle reception of a user message"""
# Logic to process the user message and possibly trigger agent's response
- pass
def internal_monologue(self, msg):
"""Handle the agent's internal monologue"""
diff --git a/poetry.lock b/poetry.lock
index 0122b2fb..faedde3e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -218,6 +218,21 @@ tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
+[[package]]
+name = "autoflake"
+version = "2.3.0"
+description = "Removes unused imports and unused variables"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "autoflake-2.3.0-py3-none-any.whl", hash = "sha256:79a51eb8c0744759d2efe052455ab20aa6a314763510c3fd897499a402126327"},
+ {file = "autoflake-2.3.0.tar.gz", hash = "sha256:8c2011fa34701b9d7dcf05b9873bc4859d4fce4e62dfea90dffefd1576f5f01d"},
+]
+
+[package.dependencies]
+pyflakes = ">=3.0.0"
+tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
+
[[package]]
name = "azure-core"
version = "1.30.1"
@@ -3779,6 +3794,17 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+[[package]]
+name = "pyflakes"
+version = "3.2.0"
+description = "passive checker of Python programs"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
+ {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
+]
+
[[package]]
name = "pygments"
version = "2.17.2"
@@ -5874,4 +5900,4 @@ server = ["fastapi", "uvicorn", "websockets"]
[metadata]
lock-version = "2.0"
python-versions = "<3.12,>=3.10"
-content-hash = "44a3dd2672236f037f34c7859a943d86a8a02297190ac4f826fe41d21955c4ca"
+content-hash = "4fe62729e4de4bdacedc6e969aafdecf5897f34762d2c7cdad61bc813de93651"
diff --git a/pyproject.toml b/pyproject.toml
index 0ef230ae..ad76e52b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,10 +56,11 @@ python-box = "^7.1.1"
pytest-order = {version = "^1.2.0", optional = true}
sqlmodel = "^0.0.16"
llama-index-embeddings-azure-openai = "^0.1.6"
+autoflake = {version = "^2.3.0", optional = true}
[tool.poetry.extras]
postgres = ["pgvector", "pg8000"]
-dev = ["pytest", "pytest-asyncio", "pexpect", "black", "pre-commit", "datasets", "pyright", "pytest-order"]
+dev = ["pytest", "pytest-asyncio", "pexpect", "black", "pre-commit", "datasets", "pyright", "pytest-order", "autoflake"]
server = ["websockets", "fastapi", "uvicorn"]
autogen = ["pyautogen"]
diff --git a/tests/test_autogen_integration.py b/tests/test_autogen_integration.py
index 4bf0b0aa..1c1b37cd 100644
--- a/tests/test_autogen_integration.py
+++ b/tests/test_autogen_integration.py
@@ -1,5 +1,4 @@
import os
-import sys
import subprocess
import pytest
diff --git a/tests/test_openai_assistant_api.py b/tests/test_openai_assistant_api.py
index 71d6d1a4..8f15e358 100644
--- a/tests/test_openai_assistant_api.py
+++ b/tests/test_openai_assistant_api.py
@@ -1,6 +1,5 @@
from fastapi import FastAPI
from fastapi.testclient import TestClient
-import uuid
from memgpt.server.server import SyncServer
from memgpt.server.rest_api.server import app
diff --git a/tests/test_openai_client.py b/tests/test_openai_client.py
index 15fc72f0..86ea5ebf 100644
--- a/tests/test_openai_client.py
+++ b/tests/test_openai_client.py
@@ -1,5 +1,4 @@
from openai import OpenAI
-import time
import uvicorn
diff --git a/tests/test_server.py b/tests/test_server.py
index 5dc0da07..c14b075b 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -187,8 +187,8 @@ def test_get_recall_memory(server, user_id, agent_id):
cursor1, messages_1 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, limit=2)
cursor2, messages_2 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, after=cursor1, limit=1000)
cursor3, messages_3 = server.get_agent_recall_cursor(user_id=user_id, agent_id=agent_id, reverse=True, limit=1000)
- ids3 = [m["id"] for m in messages_3]
- ids2 = [m["id"] for m in messages_2]
+ [m["id"] for m in messages_3]
+ [m["id"] for m in messages_2]
timestamps = [m["created_at"] for m in messages_3]
print("timestamps", timestamps)
assert messages_3[-1]["created_at"] < messages_3[0]["created_at"]