fix: add LLMCallType enum and ensure call_type is set on all provider traces (#9258)
Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
committed by
Caren Thomas
parent
96c4b7175e
commit
eaf64fb510
@@ -23,7 +23,7 @@ from letta.local_llm.constants import INNER_THOUGHTS_KWARG
|
||||
from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
|
||||
from letta.orm.user import User
|
||||
from letta.otel.tracing import log_event, trace_method
|
||||
from letta.schemas.enums import ProviderCategory
|
||||
from letta.schemas.enums import LLMCallType, ProviderCategory
|
||||
from letta.schemas.llm_config import LLMConfig
|
||||
from letta.schemas.message import Message
|
||||
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
|
||||
@@ -245,7 +245,7 @@ def create(
|
||||
request_json=prepare_openai_payload(data),
|
||||
response_json=response.model_json_schema(),
|
||||
step_id=step_id,
|
||||
call_type="agent_step",
|
||||
call_type=LLMCallType.agent_step,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user