diff --git a/letta/llm_api/llm_client.py b/letta/llm_api/llm_client.py index 0941d854..5298a974 100644 --- a/letta/llm_api/llm_client.py +++ b/letta/llm_api/llm_client.py @@ -58,7 +58,7 @@ class LLMClient: put_inner_thoughts_first=put_inner_thoughts_first, actor=actor, ) - case ProviderType.openai | ProviderType.ollama: + case ProviderType.openai | ProviderType.ollama | ProviderType.hugging_face: from letta.llm_api.openai_client import OpenAIClient return OpenAIClient( diff --git a/letta/schemas/enums.py b/letta/schemas/enums.py index 899da24b..de4a48ec 100644 --- a/letta/schemas/enums.py +++ b/letta/schemas/enums.py @@ -3,21 +3,22 @@ from enum import Enum, StrEnum class ProviderType(str, Enum): anthropic = "anthropic" + azure = "azure" + bedrock = "bedrock" + cerebras = "cerebras" + deepseek = "deepseek" google_ai = "google_ai" google_vertex = "google_vertex" - openai = "openai" + groq = "groq" + hugging_face = "hugging-face" letta = "letta" - deepseek = "deepseek" - cerebras = "cerebras" lmstudio_openai = "lmstudio_openai" - xai = "xai" mistral = "mistral" ollama = "ollama" - groq = "groq" + openai = "openai" together = "together" - azure = "azure" vllm = "vllm" - bedrock = "bedrock" + xai = "xai" class ProviderCategory(str, Enum):