feat: add defaults to compose and .env.example (#1792)
This commit is contained in:
44
.env.example
Normal file
44
.env.example
Normal file
@@ -0,0 +1,44 @@
|
||||
##########################################################
|
||||
Example enviornment variable configurations for the Letta
|
||||
Docker container. Un-coment the sections you want to
|
||||
configure with.
|
||||
|
||||
Hint: You don't need to have the same LLM and
|
||||
Embedding model backends (can mix and match).
|
||||
##########################################################
|
||||
|
||||
|
||||
##########################################################
|
||||
OpenAI configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT_TYPE=openai
|
||||
#LETTA_LLM_MODEL=gpt-4o-mini
|
||||
## Embeddings
|
||||
#LETTA_EMBEDDING_ENDPOINT_TYPE=openai
|
||||
#LETTA_EMBEDDING_MODEL=text-embedding-ada-002
|
||||
|
||||
|
||||
##########################################################
|
||||
Ollama configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT=http://host.docker.internal:11434
|
||||
#LETTA_LLM_ENDPOINT_TYPE=ollama
|
||||
#LETTA_LLM_MODEL=dolphin2.2-mistral:7b-q6_K
|
||||
#LETTA_LLM_CONTEXT_WINDOW=8192
|
||||
## Embeddings
|
||||
#LETTA_EMBEDDING_ENDPOINT=http://host.docker.internal:11434
|
||||
#LETTA_EMBEDDING_ENDPOINT_TYPE=ollama
|
||||
#LETTA_EMBEDDING_MODEL=mxbai-embed-large
|
||||
#LETTA_EMBEDDING_DIM=512
|
||||
|
||||
|
||||
##########################################################
|
||||
vLLM configuration
|
||||
##########################################################
|
||||
## LLM Model
|
||||
#LETTA_LLM_ENDPOINT=http://host.docker.internal:8000
|
||||
#LETTA_LLM_ENDPOINT_TYPE=vllm
|
||||
#LETTA_LLM_MODEL=ehartford/dolphin-2.2.1-mistral-7b
|
||||
#LETTA_LLM_CONTEXT_WINDOW=8192
|
||||
Reference in New Issue
Block a user