feat: Add max tokens exceeded to stop reasons [LET-6480] (#6576)

This commit is contained in:
Kevin Lin
2025-12-14 14:39:23 -08:00
committed by Caren Thomas
parent efac48e9ea
commit 4b9485a484
5 changed files with 33 additions and 1 deletions

View File

@@ -63,6 +63,18 @@ class LettaLLMAdapter(ABC):
"""
raise NotImplementedError
@property
def finish_reason(self) -> str | None:
"""
Get the finish_reason from the LLM response.
Returns:
str | None: The finish_reason if available, None otherwise
"""
if self.chat_completions_response and self.chat_completions_response.choices:
return self.chat_completions_response.choices[0].finish_reason
return None
def supports_token_streaming(self) -> bool:
"""
Check if the adapter supports token-level streaming.