fix: update Anthropic Haiku test model after 3.5 retirement (#9569)

* fix: migrate Anthropic Haiku test model off retired release

Update Anthropic Haiku references in integration and usage parsing tests to a supported model id so test requests stop failing with 404 model not found errors.

👾 Generated with [Letta Code](https://letta.com)

Co-Authored-By: Letta <noreply@letta.com>

* fix: use canonical Anthropic Haiku handle in tests

Replace dated Anthropic Haiku handle references with the canonical provider handle so handle-based model resolution does not fail in batch and client tests.

👾 Generated with [Letta Code](https://letta.com)

Co-Authored-By: Letta <noreply@letta.com>

---------

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
jnjpng
2026-02-19 18:22:26 -08:00
committed by Caren Thomas
parent ddaf4053f6
commit f10440b49c
8 changed files with 25 additions and 24 deletions

View File

@@ -1,6 +1,6 @@
{
"context_window": 200000,
"model": "claude-3-5-haiku-20241022",
"model": "claude-haiku-4-5-20251001",
"model_endpoint_type": "anthropic",
"model_endpoint": "https://api.anthropic.com/v1",
"model_wrapper": null,

View File

@@ -48,7 +48,7 @@ def _require_clickhouse_env() -> dict[str, str]:
def _anthropic_llm_config() -> LLMConfig:
return LLMConfig(
model="claude-3-5-haiku-20241022",
model="claude-haiku-4-5-20251001",
model_endpoint_type="anthropic",
model_endpoint="https://api.anthropic.com/v1",
context_window=200000,

View File

@@ -94,7 +94,7 @@ def create_send_message(agent_id, organization_id, assistant_text, tool_call_id,
"content": [{"type": "text", "text": f"Assistant reply generated at {timestamp.strftime('%Y-%m-%d %I:%M:%S %p PST-0800')}."}],
"organization_id": organization_id,
"agent_id": agent_id,
"model": "claude-3-5-haiku-20241022",
"model": "claude-haiku-4-5-20251001",
"name": None,
"tool_calls": [
{
@@ -126,7 +126,7 @@ def create_tool_message(agent_id, organization_id, tool_call_id, timestamp):
],
"organization_id": organization_id,
"agent_id": agent_id,
"model": "claude-3-5-haiku-20241022",
"model": "claude-haiku-4-5-20251001",
"name": "send_message",
"tool_calls": None,
"tool_call_id": tool_call_id,

View File

@@ -168,7 +168,7 @@ def agent(client: Letta):
agent_state = client.agents.create(
name="test_client",
memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
@@ -184,7 +184,7 @@ def search_agent_one(client: Letta):
agent_state = client.agents.create(
name="Search Agent One",
memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
@@ -200,7 +200,7 @@ def search_agent_two(client: Letta):
agent_state = client.agents.create(
name="Search Agent Two",
memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
@@ -237,7 +237,7 @@ def test_add_and_manage_tags_for_agent(client: Letta):
# Step 0: create an agent with no tags
agent = client.agents.create(
memory_blocks=[],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
assert len(agent.tags) == 0
@@ -281,21 +281,21 @@ def test_agent_tags(client: Letta, clear_tables):
agent1 = client.agents.create(
name=f"test_agent_{str(uuid.uuid4())}",
tags=["test", "agent1", "production"],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
agent2 = client.agents.create(
name=f"test_agent_{str(uuid.uuid4())}",
tags=["test", "agent2", "development"],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
agent3 = client.agents.create(
name=f"test_agent_{str(uuid.uuid4())}",
tags=["test", "agent3", "production"],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
@@ -348,14 +348,14 @@ def test_shared_blocks(disable_e2b_api_key, client: Letta):
name="agent1",
memory_blocks=[{"label": "persona", "value": "you are agent 1"}],
block_ids=[block.id],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
agent_state2 = client.agents.create(
name="agent2",
memory_blocks=[{"label": "persona", "value": "you are agent 2"}],
block_ids=[block.id],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)
@@ -374,7 +374,7 @@ def test_update_agent_memory_label(client: Letta):
"""Test that we can update the label of a block in an agent's memory"""
agent = client.agents.create(
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
memory_blocks=[{"label": "human", "value": ""}],
)
@@ -426,7 +426,7 @@ def test_update_agent_memory_limit(client: Letta):
"""Test that we can update the limit of a block in an agent's memory"""
agent = client.agents.create(
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
memory_blocks=[
{"label": "human", "value": "username: sarah", "limit": 1000},
@@ -485,7 +485,7 @@ def test_function_always_error(client: Letta):
tool = client.tools.upsert_from_function(func=testing_method)
agent = client.agents.create(
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
memory_blocks=[
{
@@ -687,7 +687,7 @@ def test_agent_creation(client: Letta):
},
{"label": "persona", "value": "you are an assistant"},
],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
tool_ids=[tool1.id, tool2.id],
include_base_tools=False,
@@ -726,7 +726,7 @@ def test_initial_sequence(client: Letta):
# create an agent
agent = client.agents.create(
memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
initial_message_sequence=[
MessageCreateParam(
@@ -758,7 +758,7 @@ def test_initial_sequence(client: Letta):
# def test_timezone(client: Letta):
# agent = client.agents.create(
# memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
# model="anthropic/claude-haiku-4-5-20251001",
# model="anthropic/claude-haiku-4-5",
# embedding="openai/text-embedding-3-small",
# timezone="America/Los_Angeles",
# )
@@ -793,7 +793,7 @@ def test_initial_sequence(client: Letta):
def test_attach_sleeptime_block(client: Letta):
agent = client.agents.create(
memory_blocks=[{"label": "human", "value": ""}, {"label": "persona", "value": ""}],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
enable_sleeptime=True,
)

View File

@@ -37,7 +37,7 @@ from tests.utils import create_tool_from_func
# Model identifiers used in tests
MODELS = {
"sonnet": "anthropic/claude-sonnet-4-20250514",
"haiku": "anthropic/claude-haiku-4-5-20251001",
"haiku": "anthropic/claude-haiku-4-5",
"opus": "anthropic/claude-opus-4-1-20250805",
}

View File

@@ -1112,7 +1112,7 @@ def test_include_return_message_types(client: LettaSDKClient, agent: AgentState,
memory_blocks=[
CreateBlockParam(label="user", value="Name: Charles"),
],
model="anthropic/claude-haiku-4-5-20251001",
model="anthropic/claude-haiku-4-5",
embedding="openai/text-embedding-3-small",
)

View File

@@ -200,7 +200,7 @@ async def test_anthropic_usage_via_adapter():
client = AnthropicClient()
llm_config = LLMConfig(
model="claude-3-5-haiku-20241022",
model="claude-haiku-4-5-20251001",
model_endpoint_type="anthropic",
model_endpoint="https://api.anthropic.com/v1",
context_window=200000,
@@ -363,7 +363,7 @@ async def test_anthropic_prefix_caching_via_adapter():
client = AnthropicClient()
llm_config = LLMConfig(
model="claude-3-5-haiku-20241022",
model="claude-haiku-4-5-20251001",
model_endpoint_type="anthropic",
model_endpoint="https://api.anthropic.com/v1",
context_window=200000,