fix: Fix anthropic step parallel tool calling and add tests [LET-5438] (#5379)

* Fix anthropic step parallel tool calling and add tests

* Remove print statements
This commit is contained in:
Matthew Zhou
2025-10-13 11:58:58 -07:00
committed by Caren Thomas
parent 248c7c0b44
commit 25f140bd13
3 changed files with 9 additions and 10 deletions

View File

@@ -71,10 +71,9 @@ class SimpleLLMRequestAdapter(LettaLLMRequestAdapter):
self.content = self.reasoning_content + (self.content or [])
# Extract tool call
if self.chat_completions_response.choices[0].message.tool_calls:
self.tool_call = self.chat_completions_response.choices[0].message.tool_calls[0]
else:
self.tool_call = None
tool_calls = self.chat_completions_response.choices[0].message.tool_calls or []
self.tool_calls = list(tool_calls)
self.tool_call = self.tool_calls[0] if self.tool_calls else None
# Extract usage statistics
self.usage.step_count = 1

View File

@@ -575,7 +575,7 @@ class AnthropicClient(LLMClientBase):
reasoning_content = None
reasoning_content_signature = None
redacted_reasoning_content = None
tool_calls = None
tool_calls: list[ToolCall] = []
if len(response.content) > 0:
for content_part in response.content:
@@ -594,7 +594,7 @@ class AnthropicClient(LLMClientBase):
arguments = str(tool_input["function"]["arguments"])
else:
arguments = json.dumps(tool_input, indent=2)
tool_calls = [
tool_calls.append(
ToolCall(
id=content_part.id,
type="function",
@@ -603,7 +603,7 @@ class AnthropicClient(LLMClientBase):
arguments=arguments,
),
)
]
)
if content_part.type == "thinking":
reasoning_content = content_part.thinking
reasoning_content_signature = content_part.signature
@@ -623,7 +623,7 @@ class AnthropicClient(LLMClientBase):
reasoning_content=reasoning_content,
reasoning_content_signature=reasoning_content_signature,
redacted_reasoning_content=redacted_reasoning_content,
tool_calls=tool_calls,
tool_calls=tool_calls or None,
),
)

View File

@@ -522,9 +522,9 @@ async def test_greeting(
TESTED_LLM_CONFIGS,
ids=[c.model for c in TESTED_LLM_CONFIGS],
)
@pytest.mark.parametrize("send_type", ["stream_tokens"]) # ["step", "stream_steps", "stream_tokens", "stream_tokens_background"])
@pytest.mark.parametrize("send_type", ["step", "stream_tokens"]) # ["step", "stream_steps", "stream_tokens", "stream_tokens_background"])
@pytest.mark.asyncio(loop_scope="function")
async def test_parallel_tool_call_anthropic_streaming(
async def test_parallel_tool_call_anthropic(
disable_e2b_api_key: Any,
client: AsyncLetta,
agent_state: AgentState,