feat: Asyncify model listing for Together (#2286)
This commit is contained in:
@@ -1027,14 +1027,24 @@ class TogetherProvider(OpenAIProvider):
|
||||
def list_llm_models(self) -> List[LLMConfig]:
|
||||
from letta.llm_api.openai import openai_get_model_list
|
||||
|
||||
response = openai_get_model_list(self.base_url, api_key=self.api_key)
|
||||
models = openai_get_model_list(self.base_url, api_key=self.api_key)
|
||||
return self._list_llm_models(models)
|
||||
|
||||
async def list_llm_models_async(self) -> List[LLMConfig]:
|
||||
from letta.llm_api.openai import openai_get_model_list_async
|
||||
|
||||
models = await openai_get_model_list_async(self.base_url, api_key=self.api_key)
|
||||
return self._list_llm_models(models)
|
||||
|
||||
def _list_llm_models(self, models) -> List[LLMConfig]:
|
||||
pass
|
||||
|
||||
# TogetherAI's response is missing the 'data' field
|
||||
# assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
|
||||
if "data" in response:
|
||||
data = response["data"]
|
||||
if "data" in models:
|
||||
data = models["data"]
|
||||
else:
|
||||
data = response
|
||||
data = models
|
||||
|
||||
configs = []
|
||||
for model in data:
|
||||
|
||||
@@ -178,6 +178,23 @@ def test_together():
|
||||
# assert embedding_models[0].handle == f"{provider.name}/{embedding_models[0].embedding_model}"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_together_async():
|
||||
provider = TogetherProvider(
|
||||
name="together",
|
||||
api_key=model_settings.together_api_key,
|
||||
default_prompt_formatter=model_settings.default_prompt_formatter,
|
||||
)
|
||||
models = await provider.list_llm_models_async()
|
||||
assert len(models) > 0
|
||||
assert models[0].handle == f"{provider.name}/{models[0].model}"
|
||||
|
||||
# TODO: We don't have embedding models on together for CI
|
||||
# embedding_models = provider.list_embedding_models()
|
||||
# assert len(embedding_models) > 0
|
||||
# assert embedding_models[0].handle == f"{provider.name}/{embedding_models[0].embedding_model}"
|
||||
|
||||
|
||||
# TODO: Add back in, difficulty adding this to CI properly, need boto credentials
|
||||
# def test_anthropic_bedrock():
|
||||
# from letta.settings import model_settings
|
||||
|
||||
Reference in New Issue
Block a user