refactor(cli): unify TUI and headless stream processing (#784)
Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
136
src/cli/helpers/approvalClassification.ts
Normal file
136
src/cli/helpers/approvalClassification.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
import type { ApprovalContext } from "../../permissions/analyzer";
|
||||
import { checkToolPermission, getToolSchema } from "../../tools/manager";
|
||||
import { safeJsonParseOr } from "./safeJsonParse";
|
||||
import type { ApprovalRequest } from "./streamProcessor";
|
||||
|
||||
type ToolPermission = Awaited<ReturnType<typeof checkToolPermission>>;
|
||||
|
||||
export type ClassifiedApproval<TContext = ApprovalContext | null> = {
|
||||
approval: ApprovalRequest;
|
||||
permission: ToolPermission;
|
||||
context: TContext | null;
|
||||
parsedArgs: Record<string, unknown>;
|
||||
missingRequiredArgs?: string[];
|
||||
denyReason?: string;
|
||||
};
|
||||
|
||||
export type ApprovalClassification<TContext = ApprovalContext | null> = {
|
||||
needsUserInput: ClassifiedApproval<TContext>[];
|
||||
autoAllowed: ClassifiedApproval<TContext>[];
|
||||
autoDenied: ClassifiedApproval<TContext>[];
|
||||
};
|
||||
|
||||
export type ClassifyApprovalsOptions<TContext = ApprovalContext | null> = {
|
||||
getContext?: (
|
||||
toolName: string,
|
||||
parsedArgs: Record<string, unknown>,
|
||||
) => Promise<TContext>;
|
||||
alwaysRequiresUserInput?: (toolName: string) => boolean;
|
||||
treatAskAsDeny?: boolean;
|
||||
denyReasonForAsk?: string;
|
||||
missingNameReason?: string;
|
||||
requireArgsForAutoApprove?: boolean;
|
||||
missingArgsReason?: (missing: string[]) => string;
|
||||
};
|
||||
|
||||
export async function getMissingRequiredArgs(
|
||||
toolName: string,
|
||||
parsedArgs: Record<string, unknown>,
|
||||
): Promise<string[]> {
|
||||
const schema = getToolSchema(toolName);
|
||||
const required =
|
||||
(schema?.input_schema?.required as string[] | undefined) || [];
|
||||
return required.filter(
|
||||
(key) => !(key in parsedArgs) || parsedArgs[key] == null,
|
||||
);
|
||||
}
|
||||
|
||||
export async function classifyApprovals<TContext = ApprovalContext | null>(
|
||||
approvals: ApprovalRequest[],
|
||||
opts: ClassifyApprovalsOptions<TContext> = {},
|
||||
): Promise<ApprovalClassification<TContext>> {
|
||||
const needsUserInput: ClassifiedApproval<TContext>[] = [];
|
||||
const autoAllowed: ClassifiedApproval<TContext>[] = [];
|
||||
const autoDenied: ClassifiedApproval<TContext>[] = [];
|
||||
const denyReasonForAsk =
|
||||
opts.denyReasonForAsk ?? "Tool requires approval (headless mode)";
|
||||
const missingNameReason =
|
||||
opts.missingNameReason ?? "Tool call incomplete - missing name";
|
||||
|
||||
for (const approval of approvals) {
|
||||
const toolName = approval.toolName;
|
||||
if (!toolName) {
|
||||
autoDenied.push({
|
||||
approval,
|
||||
permission: { decision: "deny", reason: missingNameReason },
|
||||
context: null,
|
||||
parsedArgs: {},
|
||||
denyReason: missingNameReason,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const parsedArgs = safeJsonParseOr<Record<string, unknown>>(
|
||||
approval.toolArgs || "{}",
|
||||
{},
|
||||
);
|
||||
const permission = await checkToolPermission(toolName, parsedArgs);
|
||||
const context = opts.getContext
|
||||
? await opts.getContext(toolName, parsedArgs)
|
||||
: null;
|
||||
let decision = permission.decision;
|
||||
|
||||
if (opts.alwaysRequiresUserInput?.(toolName) && decision === "allow") {
|
||||
decision = "ask";
|
||||
}
|
||||
|
||||
if (decision === "ask" && opts.treatAskAsDeny) {
|
||||
autoDenied.push({
|
||||
approval,
|
||||
permission,
|
||||
context,
|
||||
parsedArgs,
|
||||
denyReason: denyReasonForAsk,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (decision === "allow" && opts.requireArgsForAutoApprove) {
|
||||
const missingRequiredArgs = await getMissingRequiredArgs(
|
||||
toolName,
|
||||
parsedArgs,
|
||||
);
|
||||
if (missingRequiredArgs.length > 0) {
|
||||
const denyReason = opts.missingArgsReason
|
||||
? opts.missingArgsReason(missingRequiredArgs)
|
||||
: `Missing required parameter${missingRequiredArgs.length > 1 ? "s" : ""}: ${missingRequiredArgs.join(", ")}`;
|
||||
autoDenied.push({
|
||||
approval,
|
||||
permission,
|
||||
context,
|
||||
parsedArgs,
|
||||
missingRequiredArgs,
|
||||
denyReason,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const entry: ClassifiedApproval<TContext> = {
|
||||
approval,
|
||||
permission,
|
||||
context,
|
||||
parsedArgs,
|
||||
};
|
||||
|
||||
if (decision === "ask") {
|
||||
needsUserInput.push(entry);
|
||||
} else if (decision === "deny") {
|
||||
autoDenied.push(entry);
|
||||
} else {
|
||||
autoAllowed.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return { needsUserInput, autoAllowed, autoDenied };
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
markIncompleteToolsAsCancelled,
|
||||
onChunk,
|
||||
} from "./accumulator";
|
||||
import type { ErrorInfo } from "./streamProcessor";
|
||||
import { StreamProcessor } from "./streamProcessor";
|
||||
|
||||
export type ApprovalRequest = {
|
||||
@@ -21,6 +22,27 @@ export type ApprovalRequest = {
|
||||
toolArgs: string;
|
||||
};
|
||||
|
||||
export type DrainStreamHookContext = {
|
||||
chunk: LettaStreamingResponse;
|
||||
shouldOutput: boolean;
|
||||
errorInfo?: ErrorInfo;
|
||||
updatedApproval?: ApprovalRequest;
|
||||
streamProcessor: StreamProcessor;
|
||||
};
|
||||
|
||||
export type DrainStreamHookResult = {
|
||||
shouldOutput?: boolean;
|
||||
shouldAccumulate?: boolean;
|
||||
stopReason?: StopReasonType;
|
||||
};
|
||||
|
||||
export type DrainStreamHook = (
|
||||
ctx: DrainStreamHookContext,
|
||||
) =>
|
||||
| DrainStreamHookResult
|
||||
| undefined
|
||||
| Promise<DrainStreamHookResult | undefined>;
|
||||
|
||||
type DrainResult = {
|
||||
stopReason: StopReasonType;
|
||||
lastRunId?: string | null;
|
||||
@@ -37,6 +59,7 @@ export async function drainStream(
|
||||
refresh: () => void,
|
||||
abortSignal?: AbortSignal,
|
||||
onFirstMessage?: () => void,
|
||||
onChunkProcessed?: DrainStreamHook,
|
||||
): Promise<DrainResult> {
|
||||
const startTime = performance.now();
|
||||
|
||||
@@ -130,7 +153,8 @@ export async function drainStream(
|
||||
logTiming(`TTFT: ${formatDuration(ttft)} (from POST to first content)`);
|
||||
}
|
||||
|
||||
const { shouldOutput } = streamProcessor.processChunk(chunk);
|
||||
const { shouldOutput, errorInfo, updatedApproval } =
|
||||
streamProcessor.processChunk(chunk);
|
||||
|
||||
// Check abort signal before processing - don't add data after interrupt
|
||||
if (abortSignal?.aborted) {
|
||||
@@ -140,10 +164,40 @@ export async function drainStream(
|
||||
break;
|
||||
}
|
||||
|
||||
if (shouldOutput) {
|
||||
let shouldOutputChunk = shouldOutput;
|
||||
let shouldAccumulate = shouldOutput;
|
||||
|
||||
if (onChunkProcessed) {
|
||||
const hookResult = await onChunkProcessed({
|
||||
chunk,
|
||||
shouldOutput: shouldOutputChunk,
|
||||
errorInfo,
|
||||
updatedApproval,
|
||||
streamProcessor,
|
||||
});
|
||||
if (hookResult?.shouldOutput !== undefined) {
|
||||
shouldOutputChunk = hookResult.shouldOutput;
|
||||
}
|
||||
if (hookResult?.shouldAccumulate !== undefined) {
|
||||
shouldAccumulate = hookResult.shouldAccumulate;
|
||||
} else {
|
||||
shouldAccumulate = shouldOutputChunk;
|
||||
}
|
||||
if (hookResult?.stopReason) {
|
||||
stopReason = hookResult.stopReason;
|
||||
}
|
||||
} else {
|
||||
shouldAccumulate = shouldOutputChunk;
|
||||
}
|
||||
|
||||
if (shouldAccumulate) {
|
||||
onChunk(buffers, chunk);
|
||||
queueMicrotask(refresh);
|
||||
}
|
||||
|
||||
if (stopReason) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Handle stream errors (e.g., JSON parse errors from SDK, network issues)
|
||||
@@ -270,6 +324,7 @@ export async function drainStream(
|
||||
* @param refresh - Callback to refresh UI
|
||||
* @param abortSignal - Optional abort signal for cancellation
|
||||
* @param onFirstMessage - Optional callback to invoke on first message chunk
|
||||
* @param onChunkProcessed - Optional hook to observe/override per-chunk behavior
|
||||
* @returns Result with stop_reason, approval info, and timing
|
||||
*/
|
||||
export async function drainStreamWithResume(
|
||||
@@ -278,6 +333,7 @@ export async function drainStreamWithResume(
|
||||
refresh: () => void,
|
||||
abortSignal?: AbortSignal,
|
||||
onFirstMessage?: () => void,
|
||||
onChunkProcessed?: DrainStreamHook,
|
||||
): Promise<DrainResult> {
|
||||
const overallStartTime = performance.now();
|
||||
|
||||
@@ -288,6 +344,7 @@ export async function drainStreamWithResume(
|
||||
refresh,
|
||||
abortSignal,
|
||||
onFirstMessage,
|
||||
onChunkProcessed,
|
||||
);
|
||||
|
||||
// If stream ended without proper stop_reason and we have resume info, try once to reconnect
|
||||
@@ -333,6 +390,8 @@ export async function drainStreamWithResume(
|
||||
buffers,
|
||||
refresh,
|
||||
abortSignal,
|
||||
undefined,
|
||||
onChunkProcessed,
|
||||
);
|
||||
|
||||
// Use the resume result (should have proper stop_reason now)
|
||||
|
||||
Reference in New Issue
Block a user