chore: migrate package name to letta (#1775)

Co-authored-by: Charles Packer <packercharles@gmail.com>
Co-authored-by: Shubham Naik <shubham.naik10@gmail.com>
Co-authored-by: Shubham Naik <shub@memgpt.ai>
This commit is contained in:
Sarah Wooders
2024-09-23 09:15:18 -07:00
committed by GitHub
parent 9ebbaacc1f
commit 8ae1e64987
337 changed files with 5528 additions and 6795 deletions

View File

@@ -11,11 +11,11 @@ assignees: ''
A clear and concise description of what the bug is.
**Please describe your setup**
- [ ] How did you install memgpt?
- `pip install pymemgpt`? `pip install pymemgpt-nightly`? `git clone`?
- [ ] How did you install letta?
- `pip install letta`? `pip install letta-nightly`? `git clone`?
- [ ] Describe your setup
- What's your OS (Windows/MacOS/Linux)?
- How are you running `memgpt`? (`cmd.exe`/Powershell/Anaconda Shell/Terminal)
- How are you running `letta`? (`cmd.exe`/Powershell/Anaconda Shell/Terminal)
**Screenshots**
If applicable, add screenshots to help explain your problem.
@@ -23,8 +23,8 @@ If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here.
**MemGPT Config**
Please attach your `~/.memgpt/config` file or copy past it below.
**Letta Config**
Please attach your `~/.letta/config` file or copy past it below.
---
@@ -32,7 +32,7 @@ If you're not using OpenAI, please provide additional information on your local
**Local LLM details**
If you are trying to run MemGPT with local LLMs, please provide the following information:
If you are trying to run Letta with local LLMs, please provide the following information:
- [ ] The exact model you're trying to use (e.g. `dolphin-2.1-mistral-7b.Q6_K.gguf`)
- [ ] The local LLM backend you are using (web UI? LM Studio?)

View File

@@ -8,7 +8,7 @@ How can we test your PR during review? What commands should we run? What outcome
Have you tested the latest commit on the PR? If so please provide outputs from your tests.
**Related issues or PRs**
Please link any related GitHub [issues](https://github.com/cpacker/MemGPT/issues) or [PRs](https://github.com/cpacker/MemGPT/pulls).
Please link any related GitHub [issues](https://github.com/cpacker/Letta/issues) or [PRs](https://github.com/cpacker/Letta/pulls).
**Is your PR over 500 lines of code?**
If so, please break up your PR into multiple smaller PRs so that we can review them quickly, or provide justification for its length.

View File

@@ -23,6 +23,6 @@ jobs:
run: |
# Extract the version number from pyproject.toml using awk
CURRENT_VERSION=$(awk -F '"' '/version =/ { print $2 }' pyproject.toml | head -n 1)
docker build . --file Dockerfile --tag memgpt/memgpt-server:$CURRENT_VERSION --tag memgpt/memgpt-server:latest
docker push memgpt/memgpt-server:$CURRENT_VERSION
docker push memgpt/memgpt-server:latest
docker build . --file Dockerfile --tag lettaai/letta:$CURRENT_VERSION --tag lettaai/letta:latest
docker push lettaai/letta:$CURRENT_VERSION
docker push lettaai/letta:latest

View File

@@ -20,17 +20,16 @@ jobs:
- name: Set permissions for log directory
run: |
mkdir -p /home/runner/.memgpt/logs
sudo chown -R $USER:$USER /home/runner/.memgpt/logs
chmod -R 755 /home/runner/.memgpt/logs
mkdir -p /home/runner/.letta/logs
sudo chown -R $USER:$USER /home/runner/.letta/logs
chmod -R 755 /home/runner/.letta/logs
- name: Build and run docker dev server
env:
MEMGPT_PG_DB: memgpt
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_PORT: 8888
MEMGPT_SERVER_PASS: test_server_token
LETTA_PG_DB: letta
LETTA_PG_USER: letta
LETTA_PG_PASSWORD: letta
LETTA_PG_PORT: 8888
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: docker compose -f dev-compose.yaml up --build -d
@@ -46,12 +45,12 @@ jobs:
- name: Run tests with pytest
env:
MEMGPT_PG_DB: memgpt
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_PORT: 8888
MEMGPT_SERVER_PASS: test_server_token
MEMGPT_SERVER_URL: http://localhost:8083
LETTA_PG_DB: letta
LETTA_PG_USER: letta
LETTA_PG_PASSWORD: letta
LETTA_PG_PORT: 8888
LETTA_SERVER_PASS: test_server_token
LETTA_SERVER_URL: http://localhost:8083
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
PYTHONPATH: ${{ github.workspace }}:${{ env.PYTHONPATH }}
run: |

View File

@@ -45,10 +45,10 @@ jobs:
NIGHTLY_VERSION="${CURRENT_VERSION}.dev$(date +%Y%m%d%H%M%S)"
# Overwrite pyproject.toml with nightly config
sed -i "0,/version = \"${CURRENT_VERSION}\"/s//version = \"${NIGHTLY_VERSION}\"/" pyproject.toml
sed -i 's/name = "pymemgpt"/name = "pymemgpt-nightly"/g' pyproject.toml
sed -i "s/__version__ = '.*'/__version__ = '${NIGHTLY_VERSION}'/g" memgpt/__init__.py
sed -i 's/name = "letta"/name = "letta-nightly"/g' pyproject.toml
sed -i "s/__version__ = '.*'/__version__ = '${NIGHTLY_VERSION}'/g" letta/__init__.py
cat pyproject.toml
cat memgpt/__init__.py
cat letta/__init__.py
- name: Configure poetry
env:

View File

@@ -1,24 +0,0 @@
# This GitHub Actions workflow was auto-generated by the `rdme` cli on 2023-12-18T23:15:45.852Z
# You can view our full documentation here: https://docs.readme.com/docs/rdme
name: ReadMe GitHub Action 🦉
on:
push:
branches:
# This workflow will run every time you push code to the following branch: `migrate-docs`
# Check out GitHub's docs for more info on configuring this:
# https://docs.github.com/actions/using-workflows/events-that-trigger-workflows
- main
jobs:
rdme-docs:
runs-on: ubuntu-latest
if: github.repository == 'cpacker/MemGPT' # TODO: if the repo org ever changes, this must be updated
steps:
- name: Check out repo 📚
uses: actions/checkout@v4
- name: Run `docs` command 🚀
uses: readmeio/rdme@v8
with:
rdme: docs docs --key=${{ secrets.README_API_KEY }} --version=1.0

View File

@@ -17,7 +17,7 @@ on:
jobs:
rdme-openapi:
runs-on: ubuntu-latest
if: github.repository == 'cpacker/MemGPT' # TODO: if the repo org ever changes, this must be updated
if: github.repository == 'cpacker/Letta' # TODO: if the repo org ever changes, this must be updated
steps:
- name: Check out repo 📚
uses: actions/checkout@v4
@@ -31,8 +31,8 @@ jobs:
- name: Generate openapi.json file
run: |
poetry run memgpt quickstart
poetry run memgpt server &
poetry run letta quickstart
poetry run letta server &
CMD_PID=$!
sleep 5 # Wait for 5 seconds
kill -SIGINT $CMD_PID
@@ -40,7 +40,7 @@ jobs:
- name: Run `openapi` command (file 1) 🚀
uses: readmeio/rdme@v8
with:
rdme: openapi openapi_memgpt.json --key=${{ secrets.README_API_KEY }} --id=6581305097523b004eedfd16
rdme: openapi openapi_letta.json --key=${{ secrets.README_API_KEY }} --id=6581305097523b004eedfd16
# - name: Run `openapi` command (file 2) 🚀
# uses: readmeio/rdme@v8

View File

@@ -28,7 +28,7 @@ jobs:
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
run: |
poetry run memgpt quickstart --backend anthropic
poetry run letta quickstart --backend anthropic
- name: Test LLM endpoint
env:

View File

@@ -1,4 +1,4 @@
name: Endpoint (MemGPT)
name: Endpoint (Letta)
on:
push:
@@ -23,8 +23,8 @@ jobs:
- name: Test LLM endpoint
run: |
poetry run pytest -s -vv tests/test_endpoints.py::test_llm_endpoint_memgpt_hosted
poetry run pytest -s -vv tests/test_endpoints.py::test_llm_endpoint_letta_hosted
- name: Test embedding endpoint
run: |
poetry run pytest -s -vv tests/test_endpoints.py::test_embedding_endpoint_memgpt_hosted
poetry run pytest -s -vv tests/test_endpoints.py::test_embedding_endpoint_letta_hosted

View File

@@ -28,7 +28,7 @@ jobs:
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: |
poetry run memgpt quickstart --backend openai
poetry run letta quickstart --backend openai
- name: Test LLM endpoint
env:

View File

@@ -1,7 +1,6 @@
name: Run All pytest Tests
env:
MEMGPT_PGURI: ${{ secrets.MEMGPT_PGURI }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
on:
@@ -36,48 +35,40 @@ jobs:
install-args: "-E dev -E postgres -E milvus -E crewai-tools"
- name: Initialize credentials
run: poetry run memgpt quickstart --backend openai
run: poetry run letta quickstart --backend openai
#- name: Run docker compose server
# env:
# MEMGPT_PG_DB: memgpt
# MEMGPT_PG_USER: memgpt
# MEMGPT_PG_PASSWORD: memgpt
# MEMGPT_SERVER_PASS: test_server_token
# MEMGPT_CONFIG_PATH: configs/server_config.yaml
# run: docker compose up -d
#
- name: Run tools tests
env:
MEMGPT_PG_PORT: 8888
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_DB: memgpt
MEMGPT_PG_HOST: localhost
MEMGPT_SERVER_PASS: test_server_token
LETTA_PG_PORT: 8888
LETTA_PG_USER: letta
LETTA_PG_PASSWORD: letta
LETTA_PG_DB: letta
LETTA_PG_HOST: localhost
LETTA_SERVER_PASS: test_server_token
run: |
poetry run pytest -s -vv tests/test_tools.py
- name: Run server tests
env:
MEMGPT_PG_PORT: 8888
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_DB: memgpt
MEMGPT_PG_HOST: localhost
MEMGPT_SERVER_PASS: test_server_token
run: |
poetry run pytest -s -vv tests/test_server.py
- name: Run tests with pytest
env:
MEMGPT_PG_PORT: 8888
MEMGPT_PG_USER: memgpt
MEMGPT_PG_PASSWORD: memgpt
MEMGPT_PG_HOST: localhost
MEMGPT_PG_DB: memgpt
MEMGPT_SERVER_PASS: test_server_token
LETTA_PG_PORT: 8888
LETTA_PG_USER: letta
LETTA_PG_PASSWORD: letta
LETTA_PG_HOST: localhost
LETTA_PG_DB: letta
LETTA_SERVER_PASS: test_server_token
PYTHONPATH: ${{ github.workspace }}:${{ env.PYTHONPATH }}
run: |
poetry run pytest -s -vv -k "not test_tools.py and not test_concurrent_connections.py and not test_quickstart and not test_endpoints and not test_storage and not test_server and not test_openai_client" tests
- name: Run server tests
env:
LETTA_PG_PORT: 8888
LETTA_PG_USER: letta
LETTA_PG_PASSWORD: letta
LETTA_PG_DB: letta
LETTA_PG_HOST: localhost
LETTA_SERVER_PASS: test_server_token
run: |
poetry run pytest -s -vv tests/test_server.py