From 122faa78ea58a344c5b4a3f1d41b5024ecb8507c Mon Sep 17 00:00:00 2001 From: Matthew Zhou Date: Wed, 27 Nov 2024 15:00:42 -0800 Subject: [PATCH] fix: Remove hard failure on bad `stream_tokens` input (#2115) --- letta/server/rest_api/routers/v1/agents.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/letta/server/rest_api/routers/v1/agents.py b/letta/server/rest_api/routers/v1/agents.py index 470c456b..b04fd909 100644 --- a/letta/server/rest_api/routers/v1/agents.py +++ b/letta/server/rest_api/routers/v1/agents.py @@ -1,4 +1,5 @@ import asyncio +import warnings from datetime import datetime from typing import Dict, List, Optional, Union @@ -553,11 +554,11 @@ async def send_message_to_agent( # Disable token streaming if not OpenAI # TODO: cleanup this logic llm_config = letta_agent.agent_state.llm_config - if stream_steps and (llm_config.model_endpoint_type != "openai" or "inference.memgpt.ai" in llm_config.model_endpoint): - raise HTTPException( - status_code=400, - detail=f"Token streaming is only supported for models with type 'openai' or `inference.memgpt.ai` in the model_endpoint: agent has endpoint type {llm_config.model_endpoint_type} and {llm_config.model_endpoint}.", + if stream_tokens and (llm_config.model_endpoint_type != "openai" or "inference.memgpt.ai" in llm_config.model_endpoint): + warnings.warn( + "Token streaming is only supported for models with type 'openai' or `inference.memgpt.ai` in the model_endpoint: agent has endpoint type {llm_config.model_endpoint_type} and {llm_config.model_endpoint}. Setting stream_tokens to False." ) + stream_tokens = False # Create a new interface per request letta_agent.interface = StreamingServerInterface()