feat: track llm provider traces and tracking steps in async agent loop (#2219)

This commit is contained in:
Andy Li
2025-05-19 15:50:56 -07:00
committed by GitHub
parent 969f0d65c8
commit a78abc610e
28 changed files with 920 additions and 82 deletions

View File

@@ -40,6 +40,9 @@ class OpenAIStreamingInterface:
self.letta_assistant_message_id = Message.generate_id()
self.letta_tool_message_id = Message.generate_id()
self.message_id = None
self.model = None
# token counters
self.input_tokens = 0
self.output_tokens = 0
@@ -69,6 +72,10 @@ class OpenAIStreamingInterface:
prev_message_type = None
message_index = 0
async for chunk in stream:
if not self.model or not self.message_id:
self.model = chunk.model
self.message_id = chunk.id
# track usage
if chunk.usage:
self.input_tokens += len(chunk.usage.prompt_tokens)