fix: models need to be paginated (#9621)
This commit is contained in:
@@ -182,7 +182,11 @@ class AnthropicProvider(Provider):
|
||||
raise ValueError("No API key provided")
|
||||
|
||||
try:
|
||||
models = await anthropic_client.models.list()
|
||||
# Auto-paginate through all pages to ensure we get every model.
|
||||
# The default page size is 20, and Anthropic now has more models than that.
|
||||
models_data = []
|
||||
async for model in anthropic_client.models.list():
|
||||
models_data.append(model.model_dump())
|
||||
except AttributeError as e:
|
||||
if "_set_private_attributes" in str(e):
|
||||
raise LLMError(
|
||||
@@ -191,10 +195,6 @@ class AnthropicProvider(Provider):
|
||||
)
|
||||
raise
|
||||
|
||||
models_json = models.model_dump()
|
||||
assert "data" in models_json, f"Anthropic model query response missing 'data' field: {models_json}"
|
||||
models_data = models_json["data"]
|
||||
|
||||
return self._list_llm_models(models_data)
|
||||
|
||||
def _list_llm_models(self, models) -> list[LLMConfig]:
|
||||
|
||||
@@ -557,16 +557,31 @@ async def test_server_startup_syncs_base_providers(default_user, default_organiz
|
||||
async def mock_openai_get_model_list_async(*args, **kwargs):
|
||||
return mock_openai_models
|
||||
|
||||
# Mock Anthropic models.list() response
|
||||
from unittest.mock import MagicMock
|
||||
# Mock Anthropic models.list() response as an async iterable
|
||||
# (the real SDK returns an AsyncPage that supports async iteration)
|
||||
|
||||
mock_anthropic_response = MagicMock()
|
||||
mock_anthropic_response.model_dump.return_value = mock_anthropic_models
|
||||
class MockAnthropicModelItem:
|
||||
def __init__(self, data):
|
||||
self._data = data
|
||||
|
||||
def model_dump(self):
|
||||
return self._data
|
||||
|
||||
class MockAnthropicAsyncPage:
|
||||
def __init__(self, items):
|
||||
self._items = [MockAnthropicModelItem(item) for item in items]
|
||||
|
||||
def __aiter__(self):
|
||||
return self._async_iter()
|
||||
|
||||
async def _async_iter(self):
|
||||
for item in self._items:
|
||||
yield item
|
||||
|
||||
# Mock the Anthropic AsyncAnthropic client
|
||||
class MockAnthropicModels:
|
||||
async def list(self):
|
||||
return mock_anthropic_response
|
||||
return MockAnthropicAsyncPage(mock_anthropic_models["data"])
|
||||
|
||||
class MockAsyncAnthropic:
|
||||
def __init__(self, *args, **kwargs):
|
||||
@@ -833,12 +848,9 @@ async def test_server_startup_handles_api_errors_gracefully(default_user, defaul
|
||||
async def mock_openai_fail(*args, **kwargs):
|
||||
raise Exception("OpenAI API is down")
|
||||
|
||||
# Mock Anthropic to succeed
|
||||
from unittest.mock import MagicMock
|
||||
# Mock Anthropic to succeed (as async iterable, matching real SDK pagination)
|
||||
|
||||
mock_anthropic_response = MagicMock()
|
||||
mock_anthropic_response.model_dump.return_value = {
|
||||
"data": [
|
||||
mock_anthropic_data = [
|
||||
{
|
||||
"id": "claude-3-5-sonnet-20241022",
|
||||
"type": "model",
|
||||
@@ -846,11 +858,28 @@ async def test_server_startup_handles_api_errors_gracefully(default_user, defaul
|
||||
"created_at": "2024-10-22T00:00:00Z",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
class MockAnthropicModelItem:
|
||||
def __init__(self, data):
|
||||
self._data = data
|
||||
|
||||
def model_dump(self):
|
||||
return self._data
|
||||
|
||||
class MockAnthropicAsyncPage:
|
||||
def __init__(self, items):
|
||||
self._items = [MockAnthropicModelItem(item) for item in items]
|
||||
|
||||
def __aiter__(self):
|
||||
return self._async_iter()
|
||||
|
||||
async def _async_iter(self):
|
||||
for item in self._items:
|
||||
yield item
|
||||
|
||||
class MockAnthropicModels:
|
||||
async def list(self):
|
||||
return mock_anthropic_response
|
||||
return MockAnthropicAsyncPage(mock_anthropic_data)
|
||||
|
||||
class MockAsyncAnthropic:
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
||||
Reference in New Issue
Block a user