fix(listen): preserve interrupt error status through next-turn persistence (#1294)

This commit is contained in:
Charles Packer
2026-03-05 22:29:08 -08:00
committed by GitHub
parent cc6f754ca3
commit 52f2cc9924
6 changed files with 918 additions and 58 deletions

View File

@@ -0,0 +1,123 @@
import type { MessageCreate } from "@letta-ai/letta-client/resources/agents/agents";
import type { ApprovalCreate } from "@letta-ai/letta-client/resources/agents/messages";
import { INTERRUPTED_BY_USER } from "../constants";
import type { ApprovalResult } from "./approval-execution";
type OutgoingMessage = MessageCreate | ApprovalCreate;
export type ApprovalNormalizationOptions = {
/**
* Structured interrupt provenance: tool_call_ids known to have been interrupted.
* When provided, these IDs are forced to persist as status=error.
*/
interruptedToolCallIds?: Iterable<string>;
/**
* Temporary fallback guard for legacy drift where tool_return text is the only
* interrupt signal. Keep false by default for strict structured behavior.
*/
allowInterruptTextFallback?: boolean;
};
function normalizeToolReturnText(value: unknown): string {
if (typeof value === "string") return value;
if (Array.isArray(value)) {
const text = value
.filter(
(part): part is { type: "text"; text: string } =>
!!part &&
typeof part === "object" &&
"type" in part &&
(part as { type?: unknown }).type === "text" &&
"text" in part &&
typeof (part as { text?: unknown }).text === "string",
)
.map((part) => part.text)
.join("\n")
.trim();
return text;
}
if (value === null || value === undefined) return "";
try {
return JSON.stringify(value);
} catch {
return String(value);
}
}
export function normalizeApprovalResultsForPersistence(
approvals: ApprovalResult[] | null | undefined,
options: ApprovalNormalizationOptions = {},
): ApprovalResult[] {
if (!approvals || approvals.length === 0) return approvals ?? [];
const interruptedSet = new Set(options.interruptedToolCallIds ?? []);
return approvals.map((approval) => {
if (
!approval ||
typeof approval !== "object" ||
!("type" in approval) ||
approval.type !== "tool"
) {
return approval;
}
const toolCallId =
"tool_call_id" in approval && typeof approval.tool_call_id === "string"
? approval.tool_call_id
: "";
const interruptedByStructuredId =
toolCallId.length > 0 && interruptedSet.has(toolCallId);
const interruptedByLegacyText = options.allowInterruptTextFallback
? normalizeToolReturnText(
"tool_return" in approval ? approval.tool_return : "",
) === INTERRUPTED_BY_USER
: false;
if (
(interruptedByStructuredId || interruptedByLegacyText) &&
"status" in approval &&
approval.status !== "error"
) {
return {
...approval,
status: "error" as const,
};
}
return approval;
});
}
export function normalizeOutgoingApprovalMessages(
messages: OutgoingMessage[],
options: ApprovalNormalizationOptions = {},
): OutgoingMessage[] {
if (!messages || messages.length === 0) return messages;
return messages.map((message) => {
if (
!message ||
typeof message !== "object" ||
!("type" in message) ||
message.type !== "approval" ||
!("approvals" in message)
) {
return message;
}
const normalizedApprovals = normalizeApprovalResultsForPersistence(
message.approvals as ApprovalResult[],
options,
);
return {
...message,
approvals: normalizedApprovals,
} as ApprovalCreate;
});
}

View File

@@ -9,10 +9,15 @@ import type {
LettaStreamingResponse,
} from "@letta-ai/letta-client/resources/agents/messages";
import {
type ClientTool,
captureToolExecutionContext,
waitForToolsetReady,
} from "../tools/manager";
import { isTimingsEnabled } from "../utils/timing";
import {
type ApprovalNormalizationOptions,
normalizeOutgoingApprovalMessages,
} from "./approval-result-normalization";
import { getClient } from "./client";
const streamRequestStartTimes = new WeakMap<object, number>();
@@ -43,6 +48,40 @@ export function getStreamRequestContext(
return streamRequestContexts.get(stream as object);
}
export type SendMessageStreamOptions = {
streamTokens?: boolean;
background?: boolean;
agentId?: string; // Required when conversationId is "default"
approvalNormalization?: ApprovalNormalizationOptions;
};
export function buildConversationMessagesCreateRequestBody(
conversationId: string,
messages: Array<MessageCreate | ApprovalCreate>,
opts: SendMessageStreamOptions = { streamTokens: true, background: true },
clientTools: ClientTool[],
) {
const isDefaultConversation = conversationId === "default";
if (isDefaultConversation && !opts.agentId) {
throw new Error(
"agentId is required in opts when using default conversation",
);
}
return {
messages: normalizeOutgoingApprovalMessages(
messages,
opts.approvalNormalization,
),
streaming: true,
stream_tokens: opts.streamTokens ?? true,
background: opts.background ?? true,
client_tools: clientTools,
include_compaction_messages: true,
...(isDefaultConversation ? { agent_id: opts.agentId } : {}),
};
}
/**
* Send a message to a conversation and return a streaming response.
* Uses the conversations API for all conversations.
@@ -54,11 +93,7 @@ export function getStreamRequestContext(
export async function sendMessageStream(
conversationId: string,
messages: Array<MessageCreate | ApprovalCreate>,
opts: {
streamTokens?: boolean;
background?: boolean;
agentId?: string; // Required when conversationId is "default"
} = { streamTokens: true, background: true },
opts: SendMessageStreamOptions = { streamTokens: true, background: true },
// Disable SDK retries by default - state management happens outside the stream,
// so retries would violate idempotency and create race conditions
requestOptions: { maxRetries?: number; signal?: AbortSignal } = {
@@ -74,24 +109,13 @@ export async function sendMessageStream(
await waitForToolsetReady();
const { clientTools, contextId } = captureToolExecutionContext();
const isDefaultConversation = conversationId === "default";
if (isDefaultConversation && !opts.agentId) {
throw new Error(
"agentId is required in opts when using default conversation",
);
}
const resolvedConversationId = conversationId;
const requestBody = {
const requestBody = buildConversationMessagesCreateRequestBody(
conversationId,
messages,
streaming: true,
stream_tokens: opts.streamTokens ?? true,
background: opts.background ?? true,
client_tools: clientTools,
include_compaction_messages: true,
...(isDefaultConversation ? { agent_id: opts.agentId } : {}),
};
opts,
clientTools,
);
if (process.env.DEBUG) {
console.log(