From 2b3cfb179775a209e5804c7124e0eacb65339890 Mon Sep 17 00:00:00 2001 From: Ani Tunturi Date: Sat, 21 Mar 2026 17:41:56 -0400 Subject: [PATCH] =?UTF-8?q?feat:=20breathe=20=E2=80=94=20220K=20context=20?= =?UTF-8?q?window,=2025-result=20search=20pages?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Kimi K2.5 can handle it. Let her think bigger. --- conf.yaml | 2 +- letta/constants.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf.yaml b/conf.yaml index cbb160c8..2ab73c80 100644 --- a/conf.yaml +++ b/conf.yaml @@ -233,7 +233,7 @@ letta: # ============================================================================= model: # Global settings - global_max_context_window_limit: 32000 + global_max_context_window_limit: 220000 inner_thoughts_kwarg: thinking default_prompt_formatter: chatml diff --git a/letta/constants.py b/letta/constants.py index 2e1748e9..df39ed4d 100644 --- a/letta/constants.py +++ b/letta/constants.py @@ -247,7 +247,7 @@ CORE_MEMORY_LINE_NUMBER_WARNING = "# NOTE: Line numbers shown below (with arrows # Constants to do with summarization / conversation length window # The max amount of tokens supported by the underlying model (eg 8k for gpt-4 and Mistral 7B) LLM_MAX_CONTEXT_WINDOW = { - "DEFAULT": 30000, + "DEFAULT": 220000, # deepseek "deepseek-chat": 64000, "deepseek-reasoner": 64000, @@ -444,7 +444,7 @@ REQ_HEARTBEAT_MESSAGE = f"{NON_USER_MSG_PREFIX}Function called using request_hea FUNC_FAILED_HEARTBEAT_MESSAGE = f"{NON_USER_MSG_PREFIX}Function call failed, returning control" -RETRIEVAL_QUERY_DEFAULT_PAGE_SIZE = 5 +RETRIEVAL_QUERY_DEFAULT_PAGE_SIZE = 25 MAX_FILENAME_LENGTH = 255 RESERVED_FILENAMES = {"CON", "PRN", "AUX", "NUL", "COM1", "COM2", "LPT1", "LPT2"}