Files
letta-server/memgpt/local_llm/koboldcpp/settings.py
Charles Packer 7f950b05e8 Patch local LLMs with context_window (#416)
* patch

* patch ollama

* patch lmstudio

* patch kobold
2023-11-10 12:06:41 -08:00

26 lines
557 B
Python

from ...constants import LLM_MAX_TOKENS
# see https://lite.koboldai.net/koboldcpp_api#/v1/post_v1_generate
SIMPLE = {
"stop_sequence": [
"\nUSER:",
"\nASSISTANT:",
"\nFUNCTION RETURN:",
"\nUSER",
"\nASSISTANT",
"\nFUNCTION RETURN",
"\nFUNCTION",
"\nFUNC",
"<|im_start|>",
"<|im_end|>",
"<|im_sep|>",
# '\n' +
# '</s>',
# '<|',
# '\n#',
# '\n\n\n',
],
# "max_context_length": LLM_MAX_TOKENS,
"max_length": 512,
}