feat: add defaults to compose and .env.example (#1792)
This commit is contained in:
44
.env.example
Normal file
44
.env.example
Normal file
@@ -0,0 +1,44 @@
|
||||
##########################################################
|
||||
Example enviornment variable configurations for the Letta
|
||||
Docker container. Un-coment the sections you want to
|
||||
configure with.
|
||||
|
||||
Hint: You don't need to have the same LLM and
|
||||
Embedding model backends (can mix and match).
|
||||
##########################################################
|
||||
|
||||
|
||||
##########################################################
|
||||
OpenAI configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT_TYPE=openai
|
||||
#LETTA_LLM_MODEL=gpt-4o-mini
|
||||
## Embeddings
|
||||
#LETTA_EMBEDDING_ENDPOINT_TYPE=openai
|
||||
#LETTA_EMBEDDING_MODEL=text-embedding-ada-002
|
||||
|
||||
|
||||
##########################################################
|
||||
Ollama configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT=http://host.docker.internal:11434
|
||||
#LETTA_LLM_ENDPOINT_TYPE=ollama
|
||||
#LETTA_LLM_MODEL=dolphin2.2-mistral:7b-q6_K
|
||||
#LETTA_LLM_CONTEXT_WINDOW=8192
|
||||
## Embeddings
|
||||
#LETTA_EMBEDDING_ENDPOINT=http://host.docker.internal:11434
|
||||
#LETTA_EMBEDDING_ENDPOINT_TYPE=ollama
|
||||
#LETTA_EMBEDDING_MODEL=mxbai-embed-large
|
||||
#LETTA_EMBEDDING_DIM=512
|
||||
|
||||
|
||||
##########################################################
|
||||
vLLM configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT=http://host.docker.internal:8000
|
||||
#LETTA_LLM_ENDPOINT_TYPE=vllm
|
||||
#LETTA_LLM_MODEL=ehartford/dolphin-2.2.1-mistral-7b
|
||||
#LETTA_LLM_CONTEXT_WINDOW=8192
|
||||
@@ -34,11 +34,11 @@ services:
|
||||
- LETTA_LLM_ENDPOINT=${LETTA_LLM_ENDPOINT}
|
||||
- LETTA_LLM_ENDPOINT_TYPE=${LETTA_LLM_ENDPOINT_TYPE}
|
||||
- LETTA_LLM_MODEL=${LETTA_LLM_MODEL:-gpt-4}
|
||||
- LETTA_LLM_CONTEXT_WINDOW=${LETTA_LLM_CONTEXT_WINDOW}
|
||||
- LETTA_LLM_CONTEXT_WINDOW=${LETTA_LLM_CONTEXT_WINDOW:-8192}
|
||||
- LETTA_EMBEDDING_ENDPOINT=${LETTA_EMBEDDING_ENDPOINT}
|
||||
- LETTA_EMBEDDING_ENDPOINT_TYPE=${LETTA_EMBEDDING_ENDPOINT_TYPE}
|
||||
- LETTA_EMBEDDING_DIM=${LETTA_EMBEDDING_DIM}
|
||||
- LETTA_EMBEDDING_MODEL=${LETTA_EMBEDDING_MODEL}
|
||||
- LETTA_EMBEDDING_DIM=${LETTA_EMBEDDING_DIM:-1536}
|
||||
- LETTA_EMBEDDING_MODEL=${LETTA_EMBEDDING_MODEL:-text-embedding-ada-002}
|
||||
- LETTA_DEBUG=True
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||
#volumes:
|
||||
|
||||
@@ -26,7 +26,9 @@ def generate_modules(config):
|
||||
return modules
|
||||
|
||||
|
||||
folder = "/Users/sarahwooders/repos/mintlify-docs/python-reference"
|
||||
# get PYTHON_DOC_DIR from environment
|
||||
folder = os.getenv("PYTHON_DOC_DIR")
|
||||
assert folder is not None, "PYTHON_DOC_DIR environment variable must be set"
|
||||
|
||||
|
||||
# Generate client documentation. This takes the documentation from the AbstractClient, but then appends the documentation from the LocalClient and RESTClient.
|
||||
|
||||
Reference in New Issue
Block a user