refactor(cli): unify turn recovery policy between TUI and headless (#950)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
Charles Packer
2026-02-12 21:52:13 -08:00
committed by GitHub
parent 45bd0a6af9
commit 1f44612a01
7 changed files with 605 additions and 222 deletions

View File

@@ -35,6 +35,7 @@ import {
isApprovalPendingError,
isInvalidToolCallIdsError,
rebuildInputWithFreshDenials,
shouldAttemptApprovalRecovery,
} from "../agent/approval-recovery";
import { prefetchAvailableModelHandles } from "../agent/available-models";
import { getResumeData } from "../agent/check-approval";
@@ -3157,8 +3158,12 @@ export default function App({
// Shares llmApiErrorRetriesRef budget with LLM transient-error retries (max 3 per turn).
// Resets on each processConversation entry and on success.
if (
preStreamAction === "resolve_approval_pending" &&
llmApiErrorRetriesRef.current < LLM_API_ERROR_MAX_RETRIES
shouldAttemptApprovalRecovery({
approvalPendingDetected:
preStreamAction === "resolve_approval_pending",
retries: llmApiErrorRetriesRef.current,
maxRetries: LLM_API_ERROR_MAX_RETRIES,
})
) {
llmApiErrorRetriesRef.current += 1;
try {
@@ -4315,8 +4320,11 @@ export default function App({
isApprovalPendingError(latestErrorText);
if (
approvalPendingDetected &&
llmApiErrorRetriesRef.current < LLM_API_ERROR_MAX_RETRIES
shouldAttemptApprovalRecovery({
approvalPendingDetected,
retries: llmApiErrorRetriesRef.current,
maxRetries: LLM_API_ERROR_MAX_RETRIES,
})
) {
llmApiErrorRetriesRef.current += 1;