diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..48cbd730 --- /dev/null +++ b/.env.example @@ -0,0 +1,44 @@ +########################################################## +Example enviornment variable configurations for the Letta +Docker container. Un-coment the sections you want to +configure with. + +Hint: You don't need to have the same LLM and +Embedding model backends (can mix and match). +########################################################## + + +########################################################## + OpenAI configuration +########################################################## +## LLM Model +#LETTA_LLM_ENDPOINT_TYPE=openai +#LETTA_LLM_MODEL=gpt-4o-mini +## Embeddings +#LETTA_EMBEDDING_ENDPOINT_TYPE=openai +#LETTA_EMBEDDING_MODEL=text-embedding-ada-002 + + +########################################################## + Ollama configuration +########################################################## +## LLM Model +#LETTA_LLM_ENDPOINT=http://host.docker.internal:11434 +#LETTA_LLM_ENDPOINT_TYPE=ollama +#LETTA_LLM_MODEL=dolphin2.2-mistral:7b-q6_K +#LETTA_LLM_CONTEXT_WINDOW=8192 +## Embeddings +#LETTA_EMBEDDING_ENDPOINT=http://host.docker.internal:11434 +#LETTA_EMBEDDING_ENDPOINT_TYPE=ollama +#LETTA_EMBEDDING_MODEL=mxbai-embed-large +#LETTA_EMBEDDING_DIM=512 + + +########################################################## + vLLM configuration +########################################################## +## LLM Model +#LETTA_LLM_ENDPOINT=http://host.docker.internal:8000 +#LETTA_LLM_ENDPOINT_TYPE=vllm +#LETTA_LLM_MODEL=ehartford/dolphin-2.2.1-mistral-7b +#LETTA_LLM_CONTEXT_WINDOW=8192 diff --git a/compose.yaml b/compose.yaml index 55c18bbf..142e3495 100644 --- a/compose.yaml +++ b/compose.yaml @@ -34,11 +34,11 @@ services: - LETTA_LLM_ENDPOINT=${LETTA_LLM_ENDPOINT} - LETTA_LLM_ENDPOINT_TYPE=${LETTA_LLM_ENDPOINT_TYPE} - LETTA_LLM_MODEL=${LETTA_LLM_MODEL:-gpt-4} - - LETTA_LLM_CONTEXT_WINDOW=${LETTA_LLM_CONTEXT_WINDOW} + - LETTA_LLM_CONTEXT_WINDOW=${LETTA_LLM_CONTEXT_WINDOW:-8192} - LETTA_EMBEDDING_ENDPOINT=${LETTA_EMBEDDING_ENDPOINT} - LETTA_EMBEDDING_ENDPOINT_TYPE=${LETTA_EMBEDDING_ENDPOINT_TYPE} - - LETTA_EMBEDDING_DIM=${LETTA_EMBEDDING_DIM} - - LETTA_EMBEDDING_MODEL=${LETTA_EMBEDDING_MODEL} + - LETTA_EMBEDDING_DIM=${LETTA_EMBEDDING_DIM:-1536} + - LETTA_EMBEDDING_MODEL=${LETTA_EMBEDDING_MODEL:-text-embedding-ada-002} - LETTA_DEBUG=True - OPENAI_API_KEY=${OPENAI_API_KEY} #volumes: diff --git a/docs/generate_docs.py b/docs/generate_docs.py index ffc0594d..c315ad4c 100644 --- a/docs/generate_docs.py +++ b/docs/generate_docs.py @@ -26,7 +26,9 @@ def generate_modules(config): return modules -folder = "/Users/sarahwooders/repos/mintlify-docs/python-reference" +# get PYTHON_DOC_DIR from environment +folder = os.getenv("PYTHON_DOC_DIR") +assert folder is not None, "PYTHON_DOC_DIR environment variable must be set" # Generate client documentation. This takes the documentation from the AbstractClient, but then appends the documentation from the LocalClient and RESTClient.