feat: add usage tracking, output mode, and cli exit mode (#7)

This commit is contained in:
Charles Packer
2025-10-25 16:50:15 -07:00
committed by GitHub
parent a8dff2d86e
commit 1d65606697
9 changed files with 261 additions and 15 deletions

View File

@@ -4,6 +4,7 @@ import { Letta } from "@letta-ai/letta-client";
import { Box, Static } from "ink";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { sendMessageStream } from "../agent/message";
import { SessionStats } from "../agent/stats";
import type { ApprovalContext } from "../permissions/analyzer";
import { permissionMode } from "../permissions/mode";
import {
@@ -24,6 +25,7 @@ import { ModelSelector } from "./components/ModelSelector";
import { PlanModeDialog } from "./components/PlanModeDialog";
// import { ReasoningMessage } from "./components/ReasoningMessage";
import { ReasoningMessage } from "./components/ReasoningMessageRich";
import { SessionStats as SessionStatsComponent } from "./components/SessionStats";
// import { ToolCallMessage } from "./components/ToolCallMessage";
import { ToolCallMessage } from "./components/ToolCallMessageRich";
// import { UserMessage } from "./components/UserMessage";
@@ -128,6 +130,12 @@ export default function App({
getRandomThinkingMessage(),
);
// Session stats tracking
const sessionStatsRef = useRef(new SessionStats());
// Show exit stats on exit
const [showExitStats, setShowExitStats] = useState(false);
// Static items (things that are done rendering and can be frozen)
const [staticItems, setStaticItems] = useState<StaticItem[]>([]);
@@ -353,11 +361,16 @@ export default function App({
while (true) {
// Stream one turn
const stream = await sendMessageStream(agentId, currentInput);
const { stopReason, approval } = await drainStream(
const { stopReason, approval, apiDurationMs } = await drainStream(
stream,
buffersRef.current,
refreshDerivedThrottled,
);
// Track API duration
sessionStatsRef.current.endTurn(apiDurationMs);
sessionStatsRef.current.updateUsageFromBuffers(buffersRef.current);
// Immediate refresh after stream completes to show final state
refreshDerived();
@@ -479,6 +492,14 @@ export default function App({
[agentId, appendError, refreshDerived, refreshDerivedThrottled],
);
const handleExit = useCallback(() => {
setShowExitStats(true);
// Give React time to render the stats, then exit
setTimeout(() => {
process.exit(0);
}, 100);
}, []);
const onSubmit = useCallback(
async (message?: string) => {
const msg = message?.trim() ?? "";
@@ -509,6 +530,12 @@ export default function App({
return;
}
// Special handling for /exit command - show stats and exit
if (msg.trim() === "/exit") {
handleExit();
return;
}
// Special handling for /stream command - toggle and save
if (msg.trim() === "/stream") {
const newValue = !tokenStreamingEnabled;
@@ -658,6 +685,7 @@ export default function App({
tokenStreamingEnabled,
refreshDerived,
agentId,
handleExit,
],
);
@@ -1061,10 +1089,21 @@ export default function App({
{/* Ensure 1 blank line above input when there are no live items */}
{liveItems.length === 0 && <Box height={1} />}
{/* Show exit stats when exiting */}
{showExitStats && (
<SessionStatsComponent
stats={sessionStatsRef.current.getSnapshot()}
/>
)}
{/* Input row - always mounted to preserve state */}
<Input
visible={
!pendingApproval && !modelSelectorOpen && !planApprovalPending
!showExitStats &&
!pendingApproval &&
!modelSelectorOpen &&
!planApprovalPending
}
streaming={streaming}
commandRunning={commandRunning}
@@ -1073,6 +1112,7 @@ export default function App({
onSubmit={onSubmit}
permissionMode={uiPermissionMode}
onPermissionModeChange={setUiPermissionMode}
onExit={handleExit}
/>
{/* Model Selector - conditionally mounted as overlay */}

View File

@@ -29,6 +29,13 @@ export const commands: Record<string, Command> = {
return "Toggling token streaming...";
},
},
"/exit": {
desc: "Exit and show session stats",
handler: () => {
// Handled specially in App.tsx to show stats
return "Exiting...";
},
},
};
/**

View File

@@ -26,6 +26,7 @@ export function Input({
onSubmit,
permissionMode: externalMode,
onPermissionModeChange,
onExit,
}: {
visible?: boolean;
streaming: boolean;
@@ -35,6 +36,7 @@ export function Input({
onSubmit: (message?: string) => void;
permissionMode?: PermissionMode;
onPermissionModeChange?: (mode: PermissionMode) => void;
onExit?: () => void;
}) {
const [value, setValue] = useState("");
const [escapePressed, setEscapePressed] = useState(false);
@@ -84,7 +86,8 @@ export function Input({
useInput((input, key) => {
if (input === "c" && key.ctrl) {
if (ctrlCPressed) {
// Second CTRL-C - exit application
// Second CTRL-C - call onExit callback then exit application
if (onExit) onExit();
process.exit(0);
} else {
// First CTRL-C - start 1-second timer
@@ -209,7 +212,7 @@ export function Input({
message={thinkingMessage}
shimmerOffset={shimmerOffset}
/>
{shouldShowTokenCount && <Text dimColor> ({tokenCount})</Text>}
{shouldShowTokenCount && <Text dimColor> ({tokenCount} )</Text>}
</Box>
</Box>
)}

View File

@@ -0,0 +1,34 @@
import { Box, Text } from "ink";
import type { SessionStatsSnapshot } from "../../agent/stats";
interface SessionStatsProps {
stats: SessionStatsSnapshot;
}
function formatDuration(ms: number): string {
if (ms < 1000) {
return `${Math.round(ms)}ms`;
}
return `${(ms / 1000).toFixed(1)}s`;
}
function formatNumber(n: number): string {
return n.toLocaleString();
}
export function SessionStats({ stats }: SessionStatsProps) {
const wallDuration = formatDuration(stats.totalWallMs);
const apiDuration = formatDuration(stats.totalApiMs);
return (
<Box flexDirection="column" paddingTop={1}>
<Text dimColor>Total duration (API): {apiDuration}</Text>
<Text dimColor>Total duration (wall): {wallDuration}</Text>
<Text dimColor>
Usage: {stats.usage.stepCount} steps,{" "}
{formatNumber(stats.usage.promptTokens)} input,{" "}
{formatNumber(stats.usage.completionTokens)} output
</Text>
</Box>
);
}

View File

@@ -55,6 +55,14 @@ export type Buffers = {
toolCallIdToLineId: Map<string, string>;
lastOtid: string | null; // Track the last otid to detect transitions
pendingRefresh?: boolean; // Track throttled refresh state
usage: {
promptTokens: number;
completionTokens: number;
totalTokens: number;
cachedTokens: number;
reasoningTokens: number;
stepCount: number;
};
};
export function createBuffers(): Buffers {
@@ -65,6 +73,14 @@ export function createBuffers(): Buffers {
pendingToolByRun: new Map(),
toolCallIdToLineId: new Map(),
lastOtid: null,
usage: {
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
cachedTokens: 0,
reasoningTokens: 0,
stepCount: 0,
},
};
}
@@ -339,8 +355,26 @@ export function onChunk(
break;
}
case "usage_statistics": {
// Accumulate usage statistics from the stream
// These messages arrive after stop_reason in the stream
if (chunk.promptTokens !== undefined) {
b.usage.promptTokens += chunk.promptTokens;
}
if (chunk.completionTokens !== undefined) {
b.usage.completionTokens += chunk.completionTokens;
}
if (chunk.totalTokens !== undefined) {
b.usage.totalTokens += chunk.totalTokens;
}
if (chunk.stepCount !== undefined) {
b.usage.stepCount += chunk.stepCount;
}
break;
}
default:
break; // ignore ping/usage/etc
break; // ignore ping/etc
}
}

View File

@@ -16,6 +16,7 @@ type DrainResult = {
lastRunId?: string | null;
lastSeqId?: number | null;
approval?: ApprovalRequest | null; // present only if we ended due to approval
apiDurationMs: number; // time spent in API call
};
export async function drainStream(
@@ -23,6 +24,8 @@ export async function drainStream(
buffers: ReturnType<typeof createBuffers>,
refresh: () => void,
): Promise<DrainResult> {
const startTime = performance.now();
let approvalRequestId: string | null = null;
let toolCallId: string | null = null;
let toolName: string | null = null;
@@ -78,10 +81,15 @@ export async function drainStream(
if (chunk.messageType === "stop_reason") {
stopReason = chunk.stopReason;
break; // end of turn
// Continue reading stream to get usage_statistics that may come after
}
}
// Stream has ended, check if we captured a stop reason
if (!stopReason) {
stopReason = Letta.StopReasonType.Error;
}
// Mark the final line as finished now that stream has ended
markCurrentLineAsFinished(buffers);
queueMicrotask(refresh);
@@ -96,9 +104,7 @@ export async function drainStream(
}
: null;
if (!stopReason) {
stopReason = Letta.StopReasonType.Error;
}
const apiDurationMs = performance.now() - startTime;
return { stopReason, approval, lastRunId, lastSeqId };
return { stopReason, approval, lastRunId, lastSeqId, apiDurationMs };
}