chore: clean up legacy azure path (#3904)

This commit is contained in:
cthomas
2025-08-13 14:59:15 -07:00
committed by GitHub
parent e967f6ed6b
commit 15d8cc224c
2 changed files with 0 additions and 63 deletions

View File

@@ -3,12 +3,6 @@ from collections import defaultdict
import requests
from openai import AzureOpenAI
from letta.llm_api.openai import prepare_openai_payload
from letta.schemas.llm_config import LLMConfig
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
from letta.schemas.openai.chat_completions import ChatCompletionRequest
from letta.settings import ModelSettings
def get_azure_chat_completions_endpoint(base_url: str, model: str, api_version: str):
return f"{base_url}/openai/deployments/{model}/chat/completions?api-version={api_version}"
@@ -98,21 +92,3 @@ def azure_openai_get_embeddings_model_list(base_url: str, api_key: str, api_vers
model_options = [m for m in model_list if valid_embedding_model(m)]
return model_options
def azure_openai_chat_completions_request(
model_settings: ModelSettings, llm_config: LLMConfig, chat_completion_request: ChatCompletionRequest
) -> ChatCompletionResponse:
"""https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions"""
assert model_settings.azure_api_key is not None, "Missing required api key field when calling Azure OpenAI"
assert model_settings.azure_api_version is not None, "Missing required api version field when calling Azure OpenAI"
assert model_settings.azure_base_url is not None, "Missing required base url field when calling Azure OpenAI"
data = prepare_openai_payload(chat_completion_request)
client = AzureOpenAI(
api_key=model_settings.azure_api_key, api_version=model_settings.azure_api_version, azure_endpoint=model_settings.azure_base_url
)
chat_completion = client.chat.completions.create(**data)
return ChatCompletionResponse(**chat_completion.model_dump())

View File

@@ -13,7 +13,6 @@ from letta.llm_api.anthropic import (
anthropic_chat_completions_request,
)
from letta.llm_api.aws_bedrock import has_valid_aws_credentials
from letta.llm_api.azure_openai import azure_openai_chat_completions_request
from letta.llm_api.deepseek import build_deepseek_chat_completions_request, convert_deepseek_response_to_chatcompletion
from letta.llm_api.helpers import add_inner_thoughts_to_functions, unpack_all_inner_thoughts_from_kwargs
from letta.llm_api.openai import (
@@ -312,44 +311,6 @@ def create(
return response
# azure
elif llm_config.model_endpoint_type == "azure":
if stream:
raise NotImplementedError(f"Streaming not yet implemented for {llm_config.model_endpoint_type}")
if model_settings.azure_api_key is None:
raise LettaConfigurationError(
message="Azure API key is missing. Did you set AZURE_API_KEY in your env?", missing_fields=["azure_api_key"]
)
if model_settings.azure_base_url is None:
raise LettaConfigurationError(
message="Azure base url is missing. Did you set AZURE_BASE_URL in your env?", missing_fields=["azure_base_url"]
)
if model_settings.azure_api_version is None:
raise LettaConfigurationError(
message="Azure API version is missing. Did you set AZURE_API_VERSION in your env?", missing_fields=["azure_api_version"]
)
# Set the llm config model_endpoint from model_settings
# For Azure, this model_endpoint is required to be configured via env variable, so users don't need to provide it in the LLM config
llm_config.model_endpoint = model_settings.azure_base_url
chat_completion_request = build_openai_chat_completions_request(
llm_config, messages, user_id, functions, function_call, use_tool_naming
)
response = azure_openai_chat_completions_request(
model_settings=model_settings,
llm_config=llm_config,
chat_completion_request=chat_completion_request,
)
if llm_config.put_inner_thoughts_in_kwargs:
response = unpack_all_inner_thoughts_from_kwargs(response=response, inner_thoughts_key=INNER_THOUGHTS_KWARG)
return response
elif llm_config.model_endpoint_type == "anthropic":
if not use_tool_naming:
raise NotImplementedError("Only tool calling supported on Anthropic API requests")