* fix: migrate Anthropic Haiku test model off retired release Update Anthropic Haiku references in integration and usage parsing tests to a supported model id so test requests stop failing with 404 model not found errors. 👾 Generated with [Letta Code](https://letta.com) Co-Authored-By: Letta <noreply@letta.com> * fix: use canonical Anthropic Haiku handle in tests Replace dated Anthropic Haiku handle references with the canonical provider handle so handle-based model resolution does not fail in batch and client tests. 👾 Generated with [Letta Code](https://letta.com) Co-Authored-By: Letta <noreply@letta.com> --------- Co-authored-by: Letta <noreply@letta.com>
40 lines
1.8 KiB
Python
40 lines
1.8 KiB
Python
from typing import Dict
|
|
|
|
LLM_HANDLE_OVERRIDES: Dict[str, Dict[str, str]] = {
|
|
"anthropic": {
|
|
"claude-3-5-haiku-20241022": "claude-3-5-haiku",
|
|
"claude-haiku-4-5-20251001": "claude-haiku-4-5",
|
|
"claude-3-5-sonnet-20241022": "claude-3-5-sonnet",
|
|
"claude-3-opus-20240229": "claude-3-opus",
|
|
},
|
|
"openai": {
|
|
"chatgpt-4o-latest": "chatgpt-4o",
|
|
"gpt-3.5-turbo": "gpt-3.5-turbo",
|
|
"gpt-3.5-turbo-0125": "gpt-3.5-turbo-jan",
|
|
"gpt-3.5-turbo-1106": "gpt-3.5-turbo-nov",
|
|
"gpt-3.5-turbo-16k": "gpt-3.5-turbo-16k",
|
|
"gpt-3.5-turbo-instruct": "gpt-3.5-turbo-instruct",
|
|
"gpt-4-0125-preview": "gpt-4-preview-jan",
|
|
"gpt-4-0613": "gpt-4-june",
|
|
"gpt-4-1106-preview": "gpt-4-preview-nov",
|
|
"gpt-4-turbo-2024-04-09": "gpt-4-turbo-apr",
|
|
"gpt-4o-2024-05-13": "gpt-4o-may",
|
|
"gpt-4o-2024-08-06": "gpt-4o-aug",
|
|
"gpt-4o-mini-2024-07-18": "gpt-4o-mini-jul",
|
|
},
|
|
"together": {
|
|
"Qwen/Qwen2.5-72B-Instruct-Turbo": "qwen-2.5-72b-instruct",
|
|
"meta-llama/Llama-3-70b-chat-hf": "llama-3-70b",
|
|
"meta-llama/Meta-Llama-3-70B-Instruct-Turbo": "llama-3-70b-instruct",
|
|
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": "llama-3.1-405b-instruct",
|
|
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": "llama-3.1-70b-instruct",
|
|
"meta-llama/Llama-3.3-70B-Instruct-Turbo": "llama-3.3-70b-instruct",
|
|
"mistralai/Mistral-7B-Instruct-v0.2": "mistral-7b-instruct-v2",
|
|
"mistralai/Mistral-7B-Instruct-v0.3": "mistral-7b-instruct-v3",
|
|
"mistralai/Mixtral-8x22B-Instruct-v0.1": "mixtral-8x22b-instruct",
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
|
|
"mistralai/Mixtral-8x7B-v0.1": "mixtral-8x7b",
|
|
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": "hermes-2-mixtral",
|
|
},
|
|
}
|