Files
letta-server/memgpt/local_llm/webui/settings.py
Charles Packer 8a7a64c7f9 patch web UI (#484)
* patch web UI

* set truncation_length
2023-11-19 14:56:10 -08:00

27 lines
593 B
Python

from ...constants import LLM_MAX_TOKENS
SIMPLE = {
# "stopping_strings": [
"stop": [
"\nUSER:",
"\nASSISTANT:",
"\nFUNCTION RETURN:",
"\nUSER",
"\nASSISTANT",
"\nFUNCTION RETURN",
"\nFUNCTION",
"\nFUNC",
"<|im_start|>",
"<|im_end|>",
"<|im_sep|>",
# '\n' +
# '</s>',
# '<|',
# '\n#',
# '\n\n\n',
],
# "max_tokens": 3072,
# "truncation_length": 4096, # assuming llama2 models
# "truncation_length": LLM_MAX_TOKENS, # assuming mistral 7b
}