feat: add workflow to build + test docker container (#1278)

This commit is contained in:
Sarah Wooders
2024-04-21 17:58:09 -07:00
committed by GitHub
parent 132c789ec5
commit 416ed3fad9
14 changed files with 232 additions and 144 deletions

View File

@@ -0,0 +1,64 @@
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Set permissions for log directory
run: |
mkdir -p /home/runner/.memgpt/logs
sudo chown -R $USER:$USER /home/runner/.memgpt/logs
chmod -R 755 /home/runner/.memgpt/logs
- name: Build and run docker dev server
env:
MEMGPT_PG_DB: memgpt
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_PORT: 8888
MEMGPT_SERVER_PASS: test_server_token
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: docker compose -f dev-compose.yaml up --build -d
#- name: "Setup Python, Poetry and Dependencies"
# uses: packetcoders/action-setup-cache-python-poetry@v1.2.0
# with:
# python-version: "3.12"
# poetry-version: "1.8.2"
# install-args: "--all-extras"
- name: Wait for service
run: bash scripts/wait_for_service.sh http://localhost:8083 -- echo "Service is ready"
- name: Run tests with pytest
env:
MEMGPT_PG_DB: memgpt
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_PORT: 8888
MEMGPT_SERVER_PASS: test_server_token
MEMGPT_SERVER_URL: http://localhost:8083
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
PYTHONPATH: ${{ github.workspace }}:${{ env.PYTHONPATH }}
run: |
pipx install poetry==1.8.2
poetry install -E dev
poetry run pytest -s tests/test_client.py
- name: Print docker logs if tests fail
if: failure()
run: |
echo "Printing Docker Logs..."
docker compose -f dev-compose.yaml logs

View File

@@ -25,8 +25,8 @@ jobs:
- name: "Setup Python, Poetry and Dependencies" - name: "Setup Python, Poetry and Dependencies"
uses: packetcoders/action-setup-cache-python-poetry@main uses: packetcoders/action-setup-cache-python-poetry@main
with: with:
python-version: "3.11" python-version: "3.12"
poetry-version: "1.7.1" poetry-version: "1.8.2"
install-args: "--all-extras" install-args: "--all-extras"
- name: Initialize credentials - name: Initialize credentials

View File

@@ -33,6 +33,7 @@ services:
- MEMGPT_PG_PASSWORD=${MEMGPT_PG_PASSWORD} - MEMGPT_PG_PASSWORD=${MEMGPT_PG_PASSWORD}
- MEMGPT_PG_HOST=pgvector_db - MEMGPT_PG_HOST=pgvector_db
- MEMGPT_PG_PORT=5432 - MEMGPT_PG_PORT=5432
- OPENAI_API_KEY=${OPENAI_API_KEY}
volumes: volumes:
- ./configs/server_config.yaml:/root/.memgpt/config # config file - ./configs/server_config.yaml:/root/.memgpt/config # config file
- ~/.memgpt/credentials:/root/.memgpt/credentials # credentials file - ~/.memgpt/credentials:/root/.memgpt/credentials # credentials file

View File

@@ -7,11 +7,13 @@ human = basic
model = gpt-4 model = gpt-4
model_endpoint = https://api.openai.com/v1 model_endpoint = https://api.openai.com/v1
model_endpoint_type = openai model_endpoint_type = openai
model_wrapper = null
context_window = 8192 context_window = 8192
[embedding] [embedding]
embedding_endpoint_type = openai embedding_endpoint_type = openai
embedding_endpoint = https://api.openai.com/v1 embedding_endpoint = https://api.openai.com/v1
embedding_model = text-embedding-ada-002
embedding_dim = 1536 embedding_dim = 1536
embedding_chunk_size = 300 embedding_chunk_size = 300

View File

@@ -11,7 +11,7 @@ services:
- POSTGRES_PASSWORD=${MEMGPT_PG_PASSWORD} - POSTGRES_PASSWORD=${MEMGPT_PG_PASSWORD}
- POSTGRES_DB=${MEMGPT_PG_DB} - POSTGRES_DB=${MEMGPT_PG_DB}
volumes: volumes:
- ./.persist/pgdata:/var/lib/postgresql/data - ./.persist/pgdata-test:/var/lib/postgresql/data
- ./init.sql:/docker-entrypoint-initdb.d/init.sql - ./init.sql:/docker-entrypoint-initdb.d/init.sql
ports: ports:
- "5432:5432" - "5432:5432"
@@ -27,8 +27,6 @@ services:
ports: ports:
- "8083:8083" - "8083:8083"
- "8283:8283" - "8283:8283"
env_file:
- .env
environment: environment:
- MEMGPT_SERVER_PASS=${MEMGPT_SERVER_PASS} # memgpt server password - MEMGPT_SERVER_PASS=${MEMGPT_SERVER_PASS} # memgpt server password
- MEMGPT_PG_DB=${MEMGPT_PG_DB} - MEMGPT_PG_DB=${MEMGPT_PG_DB}
@@ -36,13 +34,7 @@ services:
- MEMGPT_PG_PASSWORD=${MEMGPT_PG_PASSWORD} - MEMGPT_PG_PASSWORD=${MEMGPT_PG_PASSWORD}
- MEMGPT_PG_HOST=pgvector_db - MEMGPT_PG_HOST=pgvector_db
- MEMGPT_PG_PORT=5432 - MEMGPT_PG_PORT=5432
- OPENAI_API_KEY=${OPENAI_API_KEY}
volumes: volumes:
- ./configs/server_config.yaml:/root/.memgpt/config # config file - ./configs/server_config.yaml:/root/.memgpt/config # config file
- ~/.memgpt/credentials:/root/.memgpt/credentials # credentials file - ~/.memgpt/credentials:/root/.memgpt/credentials # credentials file
memgpt_nginx:
hostname: memgpt-nginx
image: nginx:stable-alpine3.17-slim
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
ports:
- "80:80"

View File

@@ -256,7 +256,7 @@ class RESTClient(AbstractClient):
} }
response = requests.post(f"{self.base_url}/api/agents", json=payload, headers=self.headers) response = requests.post(f"{self.base_url}/api/agents", json=payload, headers=self.headers)
if response.status_code != 200: if response.status_code != 200:
raise ValueError(f"Failed to create agent: {response.text}") raise ValueError(f"Status {response.status_code} - Failed to create agent: {response.text}")
response_obj = CreateAgentResponse(**response.json()) response_obj = CreateAgentResponse(**response.json())
return self.get_agent_response_to_state(response_obj) return self.get_agent_response_to_state(response_obj)

View File

@@ -16,7 +16,7 @@ class MemGPTCredentials:
# openai config # openai config
openai_auth_type: str = "bearer_token" openai_auth_type: str = "bearer_token"
openai_key: Optional[str] = None openai_key: Optional[str] = os.getenv("OPENAI_API_KEY")
# gemini config # gemini config
google_ai_key: Optional[str] = None google_ai_key: Optional[str] = None

View File

@@ -55,56 +55,56 @@ def setup_agents_index_router(server: SyncServer, interface: QueuingInterface, p
""" """
interface.clear() interface.clear()
try: # try:
agent_state = server.create_agent( agent_state = server.create_agent(
user_id=user_id, user_id=user_id,
# **request.config # **request.config
# TODO turn into a pydantic model # TODO turn into a pydantic model
name=request.config["name"], name=request.config["name"],
preset=request.config["preset"] if "preset" in request.config else None, preset=request.config["preset"] if "preset" in request.config else None,
persona_name=request.config["persona_name"] if "persona_name" in request.config else None, persona_name=request.config["persona_name"] if "persona_name" in request.config else None,
human_name=request.config["human_name"] if "human_name" in request.config else None, human_name=request.config["human_name"] if "human_name" in request.config else None,
persona=request.config["persona"] if "persona" in request.config else None, persona=request.config["persona"] if "persona" in request.config else None,
human=request.config["human"] if "human" in request.config else None, human=request.config["human"] if "human" in request.config else None,
# llm_config=LLMConfigModel( # llm_config=LLMConfigModel(
# model=request.config['model'], # model=request.config['model'],
# ) # )
function_names=request.config["function_names"].split(",") if "function_names" in request.config else None, function_names=request.config["function_names"].split(",") if "function_names" in request.config else None,
) )
llm_config = LLMConfigModel(**vars(agent_state.llm_config)) llm_config = LLMConfigModel(**vars(agent_state.llm_config))
embedding_config = EmbeddingConfigModel(**vars(agent_state.embedding_config)) embedding_config = EmbeddingConfigModel(**vars(agent_state.embedding_config))
# TODO when get_preset returns a PresetModel instead of Preset, we can remove this packing/unpacking line # TODO when get_preset returns a PresetModel instead of Preset, we can remove this packing/unpacking line
preset = server.ms.get_preset(name=agent_state.preset, user_id=user_id) preset = server.ms.get_preset(name=agent_state.preset, user_id=user_id)
return CreateAgentResponse( return CreateAgentResponse(
agent_state=AgentStateModel( agent_state=AgentStateModel(
id=agent_state.id, id=agent_state.id,
name=agent_state.name, name=agent_state.name,
user_id=agent_state.user_id, user_id=agent_state.user_id,
preset=agent_state.preset, preset=agent_state.preset,
persona=agent_state.persona, persona=agent_state.persona,
human=agent_state.human, human=agent_state.human,
llm_config=llm_config, llm_config=llm_config,
embedding_config=embedding_config, embedding_config=embedding_config,
state=agent_state.state, state=agent_state.state,
created_at=int(agent_state.created_at.timestamp()), created_at=int(agent_state.created_at.timestamp()),
functions_schema=agent_state.state["functions"], # TODO: this is very error prone, jsut lookup the preset instead functions_schema=agent_state.state["functions"], # TODO: this is very error prone, jsut lookup the preset instead
), ),
preset=PresetModel( preset=PresetModel(
name=preset.name, name=preset.name,
id=preset.id, id=preset.id,
user_id=preset.user_id, user_id=preset.user_id,
description=preset.description, description=preset.description,
created_at=preset.created_at, created_at=preset.created_at,
system=preset.system, system=preset.system,
persona=preset.persona, persona=preset.persona,
human=preset.human, human=preset.human,
functions_schema=preset.functions_schema, functions_schema=preset.functions_schema,
), ),
) )
except Exception as e: # except Exception as e:
print(str(e)) # print(str(e))
raise HTTPException(status_code=500, detail=str(e)) # raise HTTPException(status_code=500, detail=str(e))
return router return router

View File

@@ -39,10 +39,6 @@ Start the server with:
cd memgpt/server/rest_api cd memgpt/server/rest_api
poetry run uvicorn server:app --reload poetry run uvicorn server:app --reload
""" """
config = MemGPTConfig.load()
for memory_type in ("archival", "recall", "metadata"):
setattr(config, f"{memory_type}_storage_uri", settings.pg_uri)
config.save()
interface: QueuingInterface = QueuingInterface() interface: QueuingInterface = QueuingInterface()
server: SyncServer = SyncServer(default_interface=interface) server: SyncServer = SyncServer(default_interface=interface)

View File

@@ -10,6 +10,7 @@ from typing import Callable, List, Optional, Tuple, Union
from fastapi import HTTPException from fastapi import HTTPException
from memgpt.settings import settings
import memgpt.constants as constants import memgpt.constants as constants
import memgpt.presets.presets as presets import memgpt.presets.presets as presets
import memgpt.server.utils as server_utils import memgpt.server.utils as server_utils
@@ -197,9 +198,23 @@ class SyncServer(LockingServer):
assert self.config.persona is not None, "Persona must be set in the config" assert self.config.persona is not None, "Persona must be set in the config"
assert self.config.human is not None, "Human must be set in the config" assert self.config.human is not None, "Human must be set in the config"
# Update storage URI to match passed in settings
# TODO: very hack, fix in the future
for memory_type in ("archival", "recall", "metadata"):
setattr(self.config, f"{memory_type}_storage_uri", settings.pg_uri)
self.config.save()
# TODO figure out how to handle credentials for the server # TODO figure out how to handle credentials for the server
self.credentials = MemGPTCredentials.load() self.credentials = MemGPTCredentials.load()
# check credentials
# TODO: add checks for other providers
if (
self.config.default_embedding_config.embedding_endpoint_type == "openai"
or self.config.default_llm_config.model_endpoint_type == "openai"
):
assert self.credentials.openai_key is not None, "OpenAI key must be set in the credentials file"
# Ensure valid database configuration # Ensure valid database configuration
# TODO: add back once tests are matched # TODO: add back once tests are matched
# assert ( # assert (
@@ -665,25 +680,25 @@ class SyncServer(LockingServer):
preset_override = True preset_override = True
preset_obj.human = human preset_obj.human = human
# This is a check for a common bug where users were providing filenames instead of values # This is a check for a common bug where users were providing filenames instead of values
try: # try:
get_human_text(human) # get_human_text(human)
raise ValueError(human) # raise ValueError(human)
raise UserWarning( # raise UserWarning(
f"It looks like there is a human file named {human} - did you mean to pass the file contents to the `human` arg?" # f"It looks like there is a human file named {human} - did you mean to pass the file contents to the `human` arg?"
) # )
except: # except:
pass # pass
if persona is not None: if persona is not None:
preset_override = True preset_override = True
preset_obj.persona = persona preset_obj.persona = persona
try: # try:
get_persona_text(persona) # get_persona_text(persona)
raise ValueError(persona) # raise ValueError(persona)
raise UserWarning( # raise UserWarning(
f"It looks like there is a persona file named {persona} - did you mean to pass the file contents to the `persona` arg?" # f"It looks like there is a persona file named {persona} - did you mean to pass the file contents to the `persona` arg?"
) # )
except: # except:
pass # pass
if human_name is not None and human_name != preset_obj.human_name: if human_name is not None and human_name != preset_obj.human_name:
preset_override = True preset_override = True
preset_obj.human_name = human_name preset_obj.human_name = human_name
@@ -721,8 +736,6 @@ class SyncServer(LockingServer):
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now # gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
first_message_verify_mono=True if (llm_config.model is not None and "gpt-4" in llm_config.model) else False, first_message_verify_mono=True if (llm_config.model is not None and "gpt-4" in llm_config.model) else False,
) )
save_agent(agent=agent, ms=self.ms)
# FIXME: this is a hacky way to get the system prompts injected into agent into the DB # FIXME: this is a hacky way to get the system prompts injected into agent into the DB
# self.ms.update_agent(agent.agent_state) # self.ms.update_agent(agent.agent_state)
except Exception as e: except Exception as e:

88
poetry.lock generated
View File

@@ -1587,13 +1587,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
[[package]] [[package]]
name = "identify" name = "identify"
version = "2.5.35" version = "2.5.36"
description = "File identification library for Python" description = "File identification library for Python"
optional = true optional = true
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
{file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
] ]
[package.extras] [package.extras]
@@ -3410,13 +3410,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-
[[package]] [[package]]
name = "pluggy" name = "pluggy"
version = "1.4.0" version = "1.5.0"
description = "plugin and hook calling mechanisms for python" description = "plugin and hook calling mechanisms for python"
optional = true optional = true
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
] ]
[package.extras] [package.extras]
@@ -3619,51 +3619,51 @@ files = [
[[package]] [[package]]
name = "pyarrow" name = "pyarrow"
version = "15.0.2" version = "16.0.0"
description = "Python library for Apache Arrow" description = "Python library for Apache Arrow"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, {file = "pyarrow-16.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:22a1fdb1254e5095d629e29cd1ea98ed04b4bbfd8e42cc670a6b639ccc208b60"},
{file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, {file = "pyarrow-16.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:574a00260a4ed9d118a14770edbd440b848fcae5a3024128be9d0274dbcaf858"},
{file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0815d0ddb733b8c1b53a05827a91f1b8bde6240f3b20bf9ba5d650eb9b89cdf"},
{file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df0080339387b5d30de31e0a149c0c11a827a10c82f0c67d9afae3981d1aabb7"},
{file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:edf38cce0bf0dcf726e074159c60516447e4474904c0033f018c1f33d7dac6c5"},
{file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91d28f9a40f1264eab2af7905a4d95320ac2f287891e9c8b0035f264fe3c3a4b"},
{file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, {file = "pyarrow-16.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:99af421ee451a78884d7faea23816c429e263bd3618b22d38e7992c9ce2a7ad9"},
{file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, {file = "pyarrow-16.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d22d0941e6c7bafddf5f4c0662e46f2075850f1c044bf1a03150dd9e189427ce"},
{file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, {file = "pyarrow-16.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:266ddb7e823f03733c15adc8b5078db2df6980f9aa93d6bb57ece615df4e0ba7"},
{file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cc23090224b6594f5a92d26ad47465af47c1d9c079dd4a0061ae39551889efe"},
{file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56850a0afe9ef37249d5387355449c0f94d12ff7994af88f16803a26d38f2016"},
{file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:705db70d3e2293c2f6f8e84874b5b775f690465798f66e94bb2c07bab0a6bb55"},
{file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5448564754c154997bc09e95a44b81b9e31ae918a86c0fcb35c4aa4922756f55"},
{file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, {file = "pyarrow-16.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:729f7b262aa620c9df8b9967db96c1575e4cfc8c25d078a06968e527b8d6ec05"},
{file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, {file = "pyarrow-16.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fb8065dbc0d051bf2ae2453af0484d99a43135cadabacf0af588a3be81fbbb9b"},
{file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, {file = "pyarrow-16.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:20ce707d9aa390593ea93218b19d0eadab56390311cb87aad32c9a869b0e958c"},
{file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5823275c8addbbb50cd4e6a6839952682a33255b447277e37a6f518d6972f4e1"},
{file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab8b9050752b16a8b53fcd9853bf07d8daf19093533e990085168f40c64d978"},
{file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:42e56557bc7c5c10d3e42c3b32f6cff649a29d637e8f4e8b311d334cc4326730"},
{file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a7abdee4a4a7cfa239e2e8d721224c4b34ffe69a0ca7981354fe03c1328789b"},
{file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, {file = "pyarrow-16.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:ef2f309b68396bcc5a354106741d333494d6a0d3e1951271849787109f0229a6"},
{file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, {file = "pyarrow-16.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ed66e5217b4526fa3585b5e39b0b82f501b88a10d36bd0d2a4d8aa7b5a48e2df"},
{file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, {file = "pyarrow-16.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc8814310486f2a73c661ba8354540f17eef51e1b6dd090b93e3419d3a097b3a"},
{file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c2f5e239db7ed43e0ad2baf46a6465f89c824cc703f38ef0fde927d8e0955f7"},
{file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f293e92d1db251447cb028ae12f7bc47526e4649c3a9924c8376cab4ad6b98bd"},
{file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:dd9334a07b6dc21afe0857aa31842365a62eca664e415a3f9536e3a8bb832c07"},
{file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d91073d1e2fef2c121154680e2ba7e35ecf8d4969cc0af1fa6f14a8675858159"},
{file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, {file = "pyarrow-16.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:71d52561cd7aefd22cf52538f262850b0cc9e4ec50af2aaa601da3a16ef48877"},
{file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, {file = "pyarrow-16.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b93c9a50b965ee0bf4fef65e53b758a7e8dcc0c2d86cebcc037aaaf1b306ecc0"},
{file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, {file = "pyarrow-16.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d831690844706e374c455fba2fb8cfcb7b797bfe53ceda4b54334316e1ac4fa4"},
{file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35692ce8ad0b8c666aa60f83950957096d92f2a9d8d7deda93fb835e6053307e"},
{file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dd3151d098e56f16a8389c1247137f9e4c22720b01c6f3aa6dec29a99b74d80"},
{file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bd40467bdb3cbaf2044ed7a6f7f251c8f941c8b31275aaaf88e746c4f3ca4a7a"},
{file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:00a1dcb22ad4ceb8af87f7bd30cc3354788776c417f493089e0a0af981bc8d80"},
{file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, {file = "pyarrow-16.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fda9a7cebd1b1d46c97b511f60f73a5b766a6de4c5236f144f41a5d5afec1f35"},
{file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, {file = "pyarrow-16.0.0.tar.gz", hash = "sha256:59bb1f1edbbf4114c72415f039f1359f1a57d166a331c3229788ccbfbb31689a"},
] ]
[package.dependencies] [package.dependencies]
numpy = ">=1.16.6,<2" numpy = ">=1.16.6"
[[package]] [[package]]
name = "pyarrow-hotfix" name = "pyarrow-hotfix"

View File

@@ -0,0 +1,16 @@
#!/bin/bash
# wait-for-it.sh
set -e
host="$1"
shift
cmd="$@"
until curl -s "$host" > /dev/null; do
>&2 echo "Service is unavailable - sleeping"
sleep 1
done
>&2 echo "Service is up - executing command"
exec $cmd

View File

@@ -77,6 +77,8 @@ def run_server():
credentials.save() credentials.save()
# start server # start server
from memgpt.server.rest_api.server import start_server
start_server(debug=True) start_server(debug=True)

View File

@@ -23,8 +23,6 @@ client = None
test_agent_state_post_message = None test_agent_state_post_message = None
test_user_id = uuid.uuid4() test_user_id = uuid.uuid4()
local_service_url = "http://localhost:8283"
docker_compose_url = "http://localhost:8083"
# admin credentials # admin credentials
test_server_token = "test_server_token" test_server_token = "test_server_token"
@@ -98,37 +96,41 @@ def run_server():
# Fixture to create clients with different configurations # Fixture to create clients with different configurations
@pytest.fixture( @pytest.fixture(
params=[ params=[ # whether to use REST API server
{"base_url": local_service_url}, {"server": True},
# {"base_url": docker_compose_url}, # TODO: add when docker compose added to tests # {"server": False} # TODO: add when implemented
# {"base_url": None} # TODO: add when implemented
], ],
scope="module", scope="module",
) )
# @pytest.fixture(params=[{"base_url": test_base_url}], scope="module")
def client(request): def client(request):
print("CLIENT", request.param["base_url"]) if request.param["server"]:
if request.param["base_url"]: # get URL from enviornment
if request.param["base_url"] == local_service_url: server_url = os.getenv("MEMGPT_SERVER_URL")
# start server if server_url is None:
# run server in thread
# NOTE: must set MEMGPT_SERVER_PASS enviornment variable
server_url = "http://localhost:8283"
print("Starting server thread") print("Starting server thread")
thread = threading.Thread(target=run_server, daemon=True) thread = threading.Thread(target=run_server, daemon=True)
thread.start() thread.start()
time.sleep(5) time.sleep(5)
print("Running client tests with server:", server_url)
admin = Admin(request.param["base_url"], test_server_token) # create user via admin client
admin = Admin(server_url, test_server_token)
response = admin.create_user(test_user_id) # Adjust as per your client's method response = admin.create_user(test_user_id) # Adjust as per your client's method
response.user_id response.user_id
token = response.api_key token = response.api_key
else: else:
# use local client (no server)
token = None token = None
server_url = None
client = create_client(**request.param, token=token) # This yields control back to the test function client = create_client(base_url=server_url, token=token) # This yields control back to the test function
try: try:
yield client yield client
finally: finally:
# cleanup user # cleanup user
if request.param["base_url"]: if server_url:
admin.delete_user(test_user_id) # Adjust as per your client's method admin.delete_user(test_user_id) # Adjust as per your client's method