Files
letta-server/letta/local_llm/lmstudio/settings.py
Shubham Naik 5a743d1dc4 Add 'apps/core/' from commit 'ea2a7395f4023f5b9fab03e6273db3b64a1181d5'
git-subtree-dir: apps/core
git-subtree-mainline: a8963e11e7a5a0059acbc849ce768e1eee80df61
git-subtree-split: ea2a7395f4023f5b9fab03e6273db3b64a1181d5
2024-12-22 20:31:22 -08:00

30 lines
815 B
Python

SIMPLE = {
"stop": [
"\nUSER:",
"\nASSISTANT:",
"\nFUNCTION RETURN:",
"\nUSER",
"\nASSISTANT",
"\nFUNCTION RETURN",
"\nFUNCTION",
"\nFUNC",
"<|im_start|>",
"<|im_end|>",
"<|im_sep|>",
# '\n' +
# '</s>',
# '<|',
# '\n#',
# '\n\n\n',
],
# This controls the maximum number of tokens that the model can generate
# Cap this at the model context length (assuming 8k for Mistral 7B)
# "max_tokens": 8000,
# "max_tokens": LLM_MAX_TOKENS,
# This controls how LM studio handles context overflow
# In Letta we handle this ourselves, so this should be commented out
# "lmstudio": {"context_overflow_policy": 2},
"stream": False,
"model": "local model",
}