diff --git a/letta/llm_api/anthropic_client.py b/letta/llm_api/anthropic_client.py index 3f1b8aa0..8461167a 100644 --- a/letta/llm_api/anthropic_client.py +++ b/letta/llm_api/anthropic_client.py @@ -447,8 +447,14 @@ class AnthropicClient(LLMClientBase): else: max_output_tokens = llm_config.max_tokens + # Strip provider prefix from model name if present (e.g., "anthropic/claude-..." -> "claude-...") + # This handles cases where the handle format was incorrectly passed as the model name + model_name = llm_config.model + if "/" in model_name: + model_name = model_name.split("/", 1)[-1] + data = { - "model": llm_config.model, + "model": model_name, "max_tokens": max_output_tokens, "temperature": llm_config.temperature, } diff --git a/letta/schemas/providers/anthropic.py b/letta/schemas/providers/anthropic.py index a6e267c1..2e2faf6c 100644 --- a/letta/schemas/providers/anthropic.py +++ b/letta/schemas/providers/anthropic.py @@ -93,6 +93,16 @@ MODEL_LIST = [ "name": "claude-3-5-haiku-latest", "context_window": 200000, }, + # 4.5 + { + "name": "claude-haiku-4-5-20251001", + "context_window": 200000, + }, + # 4.5 latest + { + "name": "claude-haiku-4-5-latest", + "context_window": 200000, + }, ## Opus 4.5 { "name": "claude-opus-4-5-20251101",