feat: support approve tool call request (#4333)

This commit is contained in:
cthomas
2025-08-31 11:26:21 -07:00
committed by GitHub
parent 0854ba0d01
commit 86b073d726
6 changed files with 891 additions and 770 deletions

View File

@@ -161,6 +161,10 @@ async def _prepare_in_context_messages_no_persist_async(
f"Invalid approval request ID. Expected '{current_in_context_messages[-1].id}' "
f"but received '{input_messages[0].approval_request_id}'."
)
if input_messages[0].approve:
new_in_context_messages = []
else:
raise NotImplementedError("Deny flow not yet supported")
else:
# User is trying to send a regular message
if current_in_context_messages[-1].role == "approval":
@@ -169,10 +173,10 @@ async def _prepare_in_context_messages_no_persist_async(
"Please approve or deny the pending request before continuing."
)
# Create a new user message from the input but dont store it yet
new_in_context_messages = create_input_messages(
input_messages=input_messages, agent_id=agent_state.id, timezone=agent_state.timezone, actor=actor
)
# Create a new user message from the input but dont store it yet
new_in_context_messages = create_input_messages(
input_messages=input_messages, agent_id=agent_state.id, timezone=agent_state.timezone, actor=actor
)
return current_in_context_messages, new_in_context_messages

File diff suppressed because it is too large Load Diff

View File

@@ -769,11 +769,11 @@ class Message(BaseMessage):
"role": self.role,
}
elif self.role == "assistant":
elif self.role == "assistant" or self.role == "approval":
assert self.tool_calls is not None or text_content is not None
openai_message = {
"content": None if (put_inner_thoughts_in_kwargs and self.tool_calls is not None) else text_content,
"role": self.role,
"role": "assistant",
}
if self.tool_calls is not None:

View File

@@ -50,7 +50,7 @@ def cast_message_to_subtype(m_dict: dict) -> ChatMessage:
return SystemMessage(**m_dict)
elif role == "user":
return UserMessage(**m_dict)
elif role == "assistant":
elif role == "assistant" or role == "approval":
return AssistantMessage(**m_dict)
elif role == "tool":
return ToolMessage(**m_dict)

View File

@@ -233,34 +233,36 @@ def create_letta_messages_from_llm_response(
pre_computed_assistant_message_id: Optional[str] = None,
llm_batch_item_id: Optional[str] = None,
step_id: str | None = None,
is_approval: bool | None = None,
) -> List[Message]:
messages = []
# Construct the tool call with the assistant's message
# Force set request_heartbeat in tool_args to calculated continue_stepping
function_arguments[REQUEST_HEARTBEAT_PARAM] = continue_stepping
tool_call = OpenAIToolCall(
id=tool_call_id,
function=OpenAIFunction(
name=function_name,
arguments=json.dumps(function_arguments),
),
type="function",
)
# TODO: Use ToolCallContent instead of tool_calls
# TODO: This helps preserve ordering
assistant_message = Message(
role=MessageRole.assistant,
content=reasoning_content if reasoning_content else [],
agent_id=agent_id,
model=model,
tool_calls=[tool_call],
tool_call_id=tool_call_id,
created_at=get_utc_time(),
batch_item_id=llm_batch_item_id,
)
if pre_computed_assistant_message_id:
assistant_message.id = pre_computed_assistant_message_id
messages.append(assistant_message)
if not is_approval:
# Construct the tool call with the assistant's message
# Force set request_heartbeat in tool_args to calculated continue_stepping
function_arguments[REQUEST_HEARTBEAT_PARAM] = continue_stepping
tool_call = OpenAIToolCall(
id=tool_call_id,
function=OpenAIFunction(
name=function_name,
arguments=json.dumps(function_arguments),
),
type="function",
)
# TODO: Use ToolCallContent instead of tool_calls
# TODO: This helps preserve ordering
assistant_message = Message(
role=MessageRole.assistant,
content=reasoning_content if reasoning_content else [],
agent_id=agent_id,
model=model,
tool_calls=[tool_call],
tool_call_id=tool_call_id,
created_at=get_utc_time(),
batch_item_id=llm_batch_item_id,
)
if pre_computed_assistant_message_id:
assistant_message.id = pre_computed_assistant_message_id
messages.append(assistant_message)
# TODO: Use ToolReturnContent instead of TextContent
# TODO: This helps preserve ordering

View File

@@ -160,6 +160,8 @@ def test_send_message_with_approval_tool(
assert len(response.messages) == 2
assert response.messages[0].message_type == "reasoning_message"
assert response.messages[1].message_type == "approval_request_message"
approval_request_id = response.messages[0].id
tool_call_id = response.messages[1].tool_call.tool_call_id
# Attempt to send user message - should fail
with pytest.raises(ApiError, match="Please approve or deny the pending request before continuing"):
@@ -174,3 +176,22 @@ def test_send_message_with_approval_tool(
agent_id=agent.id,
messages=[ApprovalCreate(approve=True, approval_request_id="fake_id")],
)
response = client.agents.messages.create(
agent_id=agent.id,
messages=[
ApprovalCreate(
approve=True,
approval_request_id=approval_request_id,
),
],
)
# Basic assertion that we got a response with tool call return
assert response.messages is not None
assert len(response.messages) == 3
assert response.messages[0].message_type == "tool_return_message"
assert response.messages[0].tool_call_id == tool_call_id
assert response.messages[0].status == "success"
assert response.messages[1].message_type == "reasoning_message"
assert response.messages[2].message_type == "assistant_message"