feat(web): add Memory Palace static viewer (#1061)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
Charles Packer
2026-02-20 12:00:55 -08:00
committed by GitHub
parent 2da31bf2f7
commit b622eca198
9 changed files with 2322 additions and 115 deletions

View File

@@ -188,3 +188,11 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Brand Assets Exclusion
The Letta name, Letta Code name, logo, wordmark SVGs, and ASCII art
included in this repository are copyrighted assets of Letta, Inc.
and are not licensed under the Apache License, Version 2.0. These
assets may not be used in derivative works without written permission
from Letta, Inc.

View File

@@ -16,12 +16,12 @@ const __dirname = dirname(__filename);
const pkg = JSON.parse(readFileSync(join(__dirname, "package.json"), "utf-8"));
const version = pkg.version;
const useMagick = Bun.env.USE_MAGICK;
const features = []
const features = [];
console.log(`📦 Building Letta Code v${version}...`);
if (useMagick) {
console.log(`🪄 Using magick variant of imageResize...`);
features.push("USE_MAGICK")
features.push("USE_MAGICK");
}
await Bun.build({
@@ -44,6 +44,7 @@ await Bun.build({
".md": "text",
".mdx": "text",
".txt": "text",
},
features: features,
});

108
src/agent/memoryScanner.ts Normal file
View File

@@ -0,0 +1,108 @@
/**
* Shared memory filesystem scanner.
*
* Recursively scans the on-disk memory directory and returns a flat list of
* TreeNode objects that represent files and directories. Used by both the
* TUI MemfsTreeViewer and the web-based memory viewer generator.
*/
import { readdirSync, readFileSync, statSync } from "node:fs";
import { join, relative } from "node:path";
export interface TreeNode {
name: string; // Display name (e.g., "git.md" or "dev_workflow/")
relativePath: string; // Relative path from memory root
fullPath: string; // Full filesystem path
isDirectory: boolean;
depth: number;
isLast: boolean;
parentIsLast: boolean[];
}
/**
* Scan the memory filesystem directory and build tree nodes.
*/
export function scanMemoryFilesystem(memoryRoot: string): TreeNode[] {
const nodes: TreeNode[] = [];
const scanDir = (dir: string, depth: number, parentIsLast: boolean[]) => {
let entries: string[];
try {
entries = readdirSync(dir);
} catch {
return;
}
// Filter out hidden files and state file
const filtered = entries.filter((name) => !name.startsWith("."));
// Sort: directories first, "system" always first among dirs, then alphabetically
const sorted = filtered.sort((a, b) => {
const aPath = join(dir, a);
const bPath = join(dir, b);
let aIsDir = false;
let bIsDir = false;
try {
aIsDir = statSync(aPath).isDirectory();
} catch {}
try {
bIsDir = statSync(bPath).isDirectory();
} catch {}
if (aIsDir !== bIsDir) return aIsDir ? -1 : 1;
// "system" directory comes first (only at root level, depth 0)
if (aIsDir && bIsDir && depth === 0) {
if (a === "system") return -1;
if (b === "system") return 1;
}
return a.localeCompare(b);
});
sorted.forEach((name, index) => {
const fullPath = join(dir, name);
let isDir = false;
try {
isDir = statSync(fullPath).isDirectory();
} catch {
return; // Skip if we can't stat
}
const relativePath = relative(memoryRoot, fullPath);
const isLast = index === sorted.length - 1;
nodes.push({
name: isDir ? `${name}/` : name,
relativePath,
fullPath,
isDirectory: isDir,
depth,
isLast,
parentIsLast: [...parentIsLast],
});
if (isDir) {
scanDir(fullPath, depth + 1, [...parentIsLast, isLast]);
}
});
};
scanDir(memoryRoot, 0, []);
return nodes;
}
/**
* Get only file nodes (for navigation).
*/
export function getFileNodes(nodes: TreeNode[]): TreeNode[] {
return nodes.filter((n) => !n.isDirectory);
}
/**
* Read file content safely, returning empty string on failure.
*/
export function readFileContent(fullPath: string): string {
try {
return readFileSync(fullPath, "utf-8");
} catch {
return "(unable to read file)";
}
}

View File

@@ -12564,6 +12564,7 @@ Plan file path: ${planFilePath}`;
(settingsManager.isMemfsEnabled(agentId) ? (
<MemfsTreeViewer
agentId={agentId}
agentName={agentState?.name}
onClose={closeOverlay}
conversationId={conversationId}
/>

View File

@@ -1,9 +1,16 @@
import { existsSync, readdirSync, readFileSync, statSync } from "node:fs";
import { join, relative } from "node:path";
import { existsSync } from "node:fs";
import { Box, useInput } from "ink";
import Link from "ink-link";
import { useMemo, useState } from "react";
import { useEffect, useMemo, useRef, useState } from "react";
import { getMemoryFilesystemRoot } from "../../agent/memoryFilesystem";
import { isGitRepo } from "../../agent/memoryGit";
import {
getFileNodes,
readFileContent,
scanMemoryFilesystem,
type TreeNode,
} from "../../agent/memoryScanner";
import { generateAndOpenMemoryViewer } from "../../web/generate-memory-viewer";
import { useTerminalWidth } from "../hooks/useTerminalWidth";
import { colors } from "./colors";
import { Text } from "./Text";
@@ -16,100 +23,13 @@ const DOTTED_LINE = "╌";
const TREE_VISIBLE_LINES = 15;
const FULL_VIEW_VISIBLE_LINES = 16;
// Tree structure types
interface TreeNode {
name: string; // Display name (e.g., "git.md" or "dev_workflow/")
relativePath: string; // Relative path from memory root
fullPath: string; // Full filesystem path
isDirectory: boolean;
depth: number;
isLast: boolean;
parentIsLast: boolean[];
}
interface MemfsTreeViewerProps {
agentId: string;
agentName?: string;
onClose: () => void;
conversationId?: string;
}
/**
* Scan the memory filesystem directory and build tree nodes
*/
function scanMemoryFilesystem(memoryRoot: string): TreeNode[] {
const nodes: TreeNode[] = [];
const scanDir = (dir: string, depth: number, parentIsLast: boolean[]) => {
let entries: string[];
try {
entries = readdirSync(dir);
} catch {
return;
}
// Filter out hidden files and state file
const filtered = entries.filter((name) => !name.startsWith("."));
// Sort: directories first, "system" always first among dirs, then alphabetically
const sorted = filtered.sort((a, b) => {
const aPath = join(dir, a);
const bPath = join(dir, b);
let aIsDir = false;
let bIsDir = false;
try {
aIsDir = statSync(aPath).isDirectory();
} catch {}
try {
bIsDir = statSync(bPath).isDirectory();
} catch {}
if (aIsDir !== bIsDir) return aIsDir ? -1 : 1;
// "system" directory comes first (only at root level, depth 0)
if (aIsDir && bIsDir && depth === 0) {
if (a === "system") return -1;
if (b === "system") return 1;
}
return a.localeCompare(b);
});
sorted.forEach((name, index) => {
const fullPath = join(dir, name);
let isDir = false;
try {
isDir = statSync(fullPath).isDirectory();
} catch {
return; // Skip if we can't stat
}
const relativePath = relative(memoryRoot, fullPath);
const isLast = index === sorted.length - 1;
nodes.push({
name: isDir ? `${name}/` : name,
relativePath,
fullPath,
isDirectory: isDir,
depth,
isLast,
parentIsLast: [...parentIsLast],
});
if (isDir) {
scanDir(fullPath, depth + 1, [...parentIsLast, isLast]);
}
});
};
scanDir(memoryRoot, 0, []);
return nodes;
}
/**
* Get only file nodes (for navigation)
*/
function getFileNodes(nodes: TreeNode[]): TreeNode[] {
return nodes.filter((n) => !n.isDirectory);
}
/**
* Render tree line prefix based on depth and parent status
*/
@@ -122,19 +42,9 @@ function renderTreePrefix(node: TreeNode): string {
return prefix;
}
/**
* Read file content safely
*/
function readFileContent(fullPath: string): string {
try {
return readFileSync(fullPath, "utf-8");
} catch {
return "(unable to read file)";
}
}
export function MemfsTreeViewer({
agentId,
agentName,
onClose,
conversationId,
}: MemfsTreeViewerProps) {
@@ -148,10 +58,27 @@ export function MemfsTreeViewer({
const [treeScrollOffset, setTreeScrollOffset] = useState(0);
const [viewMode, setViewMode] = useState<"split" | "full">("split");
const [fullViewScrollOffset, setFullViewScrollOffset] = useState(0);
const [status, setStatus] = useState<string | null>(null);
const statusTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
// Get memory filesystem root
const memoryRoot = getMemoryFilesystemRoot(agentId);
const memoryExists = existsSync(memoryRoot);
const hasGitRepo = useMemo(() => isGitRepo(agentId), [agentId]);
function showStatus(msg: string, durationMs: number) {
if (statusTimerRef.current) clearTimeout(statusTimerRef.current);
setStatus(msg);
statusTimerRef.current = setTimeout(() => setStatus(null), durationMs);
}
// Cleanup status timer on unmount
useEffect(
() => () => {
if (statusTimerRef.current) clearTimeout(statusTimerRef.current);
},
[],
);
// Scan filesystem and build tree
const treeNodes = useMemo(
@@ -182,6 +109,20 @@ export function MemfsTreeViewer({
return;
}
// O: open memory viewer in browser (works in both split and full view)
if ((input === "o" || input === "O") && hasGitRepo) {
showStatus("Opening in browser...", 10000);
generateAndOpenMemoryViewer(agentId, { agentName })
.then(() => showStatus("Opened in browser", 3000))
.catch((err: unknown) =>
showStatus(
err instanceof Error ? err.message : "Failed to open viewer",
5000,
),
);
return;
}
// ESC: close or return from full view
if (key.escape) {
if (viewMode === "full") {
@@ -349,7 +290,17 @@ export function MemfsTreeViewer({
{" "}
{charCount.toLocaleString()} chars
</Text>
<Text dimColor>{" "} scroll · Esc back</Text>
{status ? (
<Text dimColor>
{" "}
{status}
</Text>
) : (
<Text dimColor>
{" "} scroll{hasGitRepo ? " · O open in browser" : ""} · Esc
back
</Text>
)}
</Box>
</Box>
);
@@ -507,16 +458,24 @@ export function MemfsTreeViewer({
{/* Footer */}
<Box flexDirection="column" marginTop={1}>
<Box>
<Text dimColor>{" "} navigate · Enter view · </Text>
{!isTmux && (
<Link url={adeUrl}>
<Text dimColor>Edit in ADE</Text>
</Link>
)}
{isTmux && <Text dimColor>Edit in ADE: {adeUrl}</Text>}
<Text dimColor> · Esc close</Text>
</Box>
{status ? (
<Text dimColor>
{" "}
{status}
</Text>
) : (
<Box>
<Text dimColor>{" "} navigate · Enter view · </Text>
{!isTmux && (
<Link url={adeUrl}>
<Text dimColor>Edit in ADE</Text>
</Link>
)}
{isTmux && <Text dimColor>Edit in ADE: {adeUrl}</Text>}
{hasGitRepo && <Text dimColor> · O open in browser</Text>}
<Text dimColor> · Esc close</Text>
</Box>
)}
</Box>
</Box>
);

View File

@@ -0,0 +1,458 @@
/**
* Memory Viewer Generator
*
* Collects data from the git-backed memory filesystem, injects it into the
* self-contained HTML template, writes the result to ~/.letta/viewers/, and
* opens it in the user's browser.
*/
import { execFile as execFileCb } from "node:child_process";
import { chmodSync, existsSync, mkdirSync, writeFileSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { promisify } from "node:util";
import { getClient, getServerUrl } from "../agent/client";
import { getMemoryFilesystemRoot } from "../agent/memoryFilesystem";
import { getMemoryRepoDir, isGitRepo } from "../agent/memoryGit";
import {
getFileNodes,
readFileContent,
scanMemoryFilesystem,
} from "../agent/memoryScanner";
import memoryViewerTemplate from "./memory-viewer-template.txt";
import type {
ContextData,
MemoryCommit,
MemoryFile,
MemoryViewerData,
} from "./types";
const execFile = promisify(execFileCb);
const VIEWERS_DIR = join(homedir(), ".letta", "viewers");
const MAX_COMMITS = 500;
const RECENT_DIFF_COUNT = 50;
const PER_DIFF_CAP = 100_000; // 100KB per diff
const TOTAL_PAYLOAD_CAP = 5_000_000; // 5MB total
const RECORD_SEP = "\x1e";
export interface GenerateResult {
filePath: string;
}
// ---------------------------------------------------------------------------
// Git helpers
// ---------------------------------------------------------------------------
async function runGitSafe(cwd: string, args: string[]): Promise<string> {
try {
const { stdout } = await execFile("git", args, {
cwd,
maxBuffer: 10 * 1024 * 1024,
timeout: 60_000,
});
return stdout?.toString() ?? "";
} catch {
return "";
}
}
// ---------------------------------------------------------------------------
// Data collectors
// ---------------------------------------------------------------------------
/** Parse frontmatter from a .md file's raw content. */
function parseFrontmatter(raw: string): {
frontmatter: Record<string, string>;
body: string;
} {
if (!raw.startsWith("---")) {
return { frontmatter: {}, body: raw };
}
const closingIdx = raw.indexOf("\n---", 3);
if (closingIdx === -1) {
return { frontmatter: {}, body: raw };
}
const fmBlock = raw.slice(4, closingIdx);
const fm: Record<string, string> = {};
for (const line of fmBlock.split("\n")) {
const colonIdx = line.indexOf(":");
if (colonIdx > 0) {
const key = line.slice(0, colonIdx).trim();
const value = line.slice(colonIdx + 1).trim();
if (key) fm[key] = value;
}
}
const body = raw.slice(closingIdx + 4).replace(/^\n/, "");
return { frontmatter: fm, body };
}
/** Collect memory files from the working tree on disk. */
function collectFiles(memoryRoot: string): MemoryFile[] {
const treeNodes = scanMemoryFilesystem(memoryRoot);
const fileNodes = getFileNodes(treeNodes);
return fileNodes
.filter((n) => n.name.endsWith(".md"))
.map((n) => {
const raw = readFileContent(n.fullPath);
const { frontmatter, body } = parseFrontmatter(raw);
return {
path: n.relativePath,
isSystem:
n.relativePath.startsWith("system/") ||
n.relativePath.startsWith("system\\"),
frontmatter,
content: body,
};
});
}
/** Collect commit metadata via a single git log call. */
async function collectMetadata(repoDir: string): Promise<
Array<{
hash: string;
author: string;
date: string;
subject: string;
body: string;
}>
> {
// Use RECORD_SEP between commits and NUL between fixed fields.
// Body (%b) can be empty, so we use exactly 4 NUL delimiters per record
// and treat everything after the 4th NUL (up to the next RECORD_SEP) as body.
const raw = await runGitSafe(repoDir, [
"log",
"-n",
String(MAX_COMMITS),
"--first-parent",
`--format=${RECORD_SEP}%H%x00%an%x00%aI%x00%s%x00%b`,
]);
if (!raw.trim()) return [];
const records = raw.split(RECORD_SEP).filter((s) => s.trim().length > 0);
const commits: Array<{
hash: string;
author: string;
date: string;
subject: string;
body: string;
}> = [];
for (const record of records) {
const parts = record.replace(/^\n+/, "");
// Split on first 4 NUL bytes only
const nulPositions: number[] = [];
for (let j = 0; j < parts.length && nulPositions.length < 4; j++) {
if (parts[j] === "\0") nulPositions.push(j);
}
if (nulPositions.length < 4) continue;
const [p0, p1, p2, p3] = nulPositions as [number, number, number, number];
const hash = parts.slice(0, p0).trim();
const author = parts.slice(p0 + 1, p1).trim();
const date = parts.slice(p1 + 1, p2).trim();
const subject = parts.slice(p2 + 1, p3).trim();
const body = parts.slice(p3 + 1).trim();
if (!hash || !/^[0-9a-f]{40}$/i.test(hash)) continue;
commits.push({ hash, author, date, subject, body });
}
return commits;
}
/** Collect diffstats via a single git log call. Returns a hash -> stat map. */
async function collectStats(repoDir: string): Promise<Map<string, string>> {
const raw = await runGitSafe(repoDir, [
"log",
"-n",
String(MAX_COMMITS),
"--first-parent",
`--format=${RECORD_SEP}%H`,
"--stat",
]);
if (!raw.trim()) return new Map();
const map = new Map<string, string>();
const chunks = raw.split(RECORD_SEP).filter((s) => s.trim().length > 0);
for (const chunk of chunks) {
const normalized = chunk.replace(/^\n+/, "");
const firstNewline = normalized.indexOf("\n");
if (firstNewline === -1) continue;
const hash = normalized.slice(0, firstNewline).trim();
if (!/^[0-9a-f]{40}$/i.test(hash)) continue;
map.set(hash, normalized.slice(firstNewline + 1).trim());
}
return map;
}
/** Collect full diffs for the most recent N commits. Returns hash -> patch map. */
async function collectDiffs(repoDir: string): Promise<Map<string, string>> {
const raw = await runGitSafe(repoDir, [
"log",
"-n",
String(RECENT_DIFF_COUNT),
"--first-parent",
`--format=${RECORD_SEP}%H`,
"-p",
]);
if (!raw.trim()) return new Map();
const map = new Map<string, string>();
const chunks = raw.split(RECORD_SEP).filter((s) => s.trim().length > 0);
for (const chunk of chunks) {
const normalized = chunk.replace(/^\n+/, "");
const firstNewline = normalized.indexOf("\n");
if (firstNewline === -1) continue;
const hash = normalized.slice(0, firstNewline).trim();
if (!/^[0-9a-f]{40}$/i.test(hash)) continue;
map.set(hash, normalized.slice(firstNewline + 1));
}
return map;
}
/** Get total commit count (may exceed MAX_COMMITS). */
async function getTotalCommitCount(repoDir: string): Promise<number> {
const raw = await runGitSafe(repoDir, ["rev-list", "--count", "HEAD"]);
const n = parseInt(raw.trim(), 10);
return Number.isNaN(n) ? 0 : n;
}
const REFLECTION_PATTERN = /\(reflection\)|🔮|reflection:/i;
/** Assemble all data into a MemoryViewerData object. */
async function collectMemoryData(
agentId: string,
repoDir: string,
memoryRoot: string,
): Promise<MemoryViewerData> {
// Filesystem scan (synchronous)
const files = collectFiles(memoryRoot);
// Git calls (parallel)
const [metadata, statsMap, diffsMap, totalCount] = await Promise.all([
collectMetadata(repoDir),
collectStats(repoDir),
collectDiffs(repoDir),
getTotalCommitCount(repoDir),
]);
// Merge into commits with payload size caps
let cumulativeSize = 0;
const commits: MemoryCommit[] = metadata.map((m) => {
const message = m.body ? `${m.subject}\n\n${m.body}` : m.subject;
const stats = statsMap.get(m.hash) ?? "";
let diff = diffsMap.get(m.hash);
let truncated = false;
if (diff !== undefined) {
if (diff.length > PER_DIFF_CAP) {
diff = `${diff.slice(0, PER_DIFF_CAP)}\n\n[diff truncated - exceeded ${Math.round(PER_DIFF_CAP / 1024)}KB]`;
truncated = true;
}
cumulativeSize += diff.length;
if (cumulativeSize > TOTAL_PAYLOAD_CAP) {
diff = undefined;
truncated = true;
}
}
return {
hash: m.hash,
shortHash: m.hash.slice(0, 7),
author: m.author,
date: m.date,
message,
stats,
diff,
truncated,
isReflection: REFLECTION_PATTERN.test(m.subject),
};
});
let serverUrl: string;
try {
serverUrl = getServerUrl();
} catch {
serverUrl = process.env.LETTA_BASE_URL || "https://api.letta.com";
}
// Fetch agent info and context breakdown (best-effort, parallel)
let agentName = agentId;
let context: ContextData | undefined;
let model = "unknown";
// Try SDK client for agent name + model info
try {
const client = await getClient();
const agent = await client.agents.retrieve(agentId);
if (agent.name) agentName = agent.name;
model = agent.llm_config?.model ?? "unknown";
// Fetch context breakdown via raw API (not in SDK)
const apiKey =
(client as unknown as { apiKey?: string }).apiKey ||
process.env.LETTA_API_KEY ||
"";
const contextWindow = agent.llm_config?.context_window ?? 0;
try {
const contextRes = await fetch(
`${serverUrl}/v1/agents/${agentId}/context`,
{
headers: { Authorization: `Bearer ${apiKey}` },
signal: AbortSignal.timeout(5000),
},
);
if (contextRes.ok) {
const overview = (await contextRes.json()) as {
context_window_size_max: number;
context_window_size_current: number;
num_tokens_system: number;
num_tokens_core_memory: number;
num_tokens_external_memory_summary: number;
num_tokens_summary_memory: number;
num_tokens_functions_definitions: number;
num_tokens_messages: number;
};
context = {
contextWindow: contextWindow || overview.context_window_size_max,
usedTokens: overview.context_window_size_current,
model,
breakdown: {
system: overview.num_tokens_system,
coreMemory: overview.num_tokens_core_memory,
externalMemory: overview.num_tokens_external_memory_summary,
summaryMemory: overview.num_tokens_summary_memory,
tools: overview.num_tokens_functions_definitions,
messages: overview.num_tokens_messages,
},
};
}
} catch {
// Context fetch failed - continue without it
}
} catch {
// SDK client failed - try raw API with env key as fallback
try {
const apiKey = process.env.LETTA_API_KEY || "";
if (apiKey && serverUrl) {
// Fetch agent info + context in parallel
const [agentRes, contextRes] = await Promise.all([
fetch(`${serverUrl}/v1/agents/${agentId}`, {
headers: { Authorization: `Bearer ${apiKey}` },
signal: AbortSignal.timeout(5000),
}).catch(() => null),
fetch(`${serverUrl}/v1/agents/${agentId}/context`, {
headers: { Authorization: `Bearer ${apiKey}` },
signal: AbortSignal.timeout(5000),
}).catch(() => null),
]);
if (agentRes?.ok) {
const agentData = (await agentRes.json()) as {
name?: string;
llm_config?: { model?: string; context_window?: number };
};
if (agentData.name) agentName = agentData.name;
if (agentData.llm_config?.model) model = agentData.llm_config.model;
}
if (contextRes?.ok) {
const overview = (await contextRes.json()) as {
context_window_size_max: number;
context_window_size_current: number;
num_tokens_system: number;
num_tokens_core_memory: number;
num_tokens_external_memory_summary: number;
num_tokens_summary_memory: number;
num_tokens_functions_definitions: number;
num_tokens_messages: number;
};
context = {
contextWindow: overview.context_window_size_max,
usedTokens: overview.context_window_size_current,
model,
breakdown: {
system: overview.num_tokens_system,
coreMemory: overview.num_tokens_core_memory,
externalMemory: overview.num_tokens_external_memory_summary,
summaryMemory: overview.num_tokens_summary_memory,
tools: overview.num_tokens_functions_definitions,
messages: overview.num_tokens_messages,
},
};
}
}
} catch {
// All API calls failed - continue without context
}
}
return {
agent: { id: agentId, name: agentName, serverUrl },
generatedAt: new Date().toISOString(),
totalCommitCount: totalCount || commits.length,
files,
commits,
context,
};
}
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
export async function generateAndOpenMemoryViewer(
agentId: string,
options?: { agentName?: string },
): Promise<GenerateResult> {
const repoDir = getMemoryRepoDir(agentId);
const memoryRoot = getMemoryFilesystemRoot(agentId);
if (!isGitRepo(agentId)) {
throw new Error("Memory viewer requires memfs. Run /memfs enable first.");
}
// 1. Collect data
const data = await collectMemoryData(agentId, repoDir, memoryRoot);
// Override agent name if provided by caller
if (options?.agentName) {
data.agent.name = options.agentName;
}
// 2. Safely embed JSON - escape < to \u003c to prevent </script> injection
const jsonPayload = JSON.stringify(data).replace(/</g, "\\u003c");
const html = memoryViewerTemplate.replace(
"<!--LETTA_DATA_PLACEHOLDER-->",
jsonPayload,
);
// 3. Write to ~/.letta/viewers/ with owner-only permissions
if (!existsSync(VIEWERS_DIR)) {
mkdirSync(VIEWERS_DIR, { recursive: true, mode: 0o700 });
}
try {
chmodSync(VIEWERS_DIR, 0o700);
} catch {}
const filePath = join(
VIEWERS_DIR,
`memory-${encodeURIComponent(agentId)}.html`,
);
writeFileSync(filePath, html);
chmodSync(filePath, 0o600);
// 4. Open in browser
try {
const { default: openUrl } = await import("open");
await openUrl(filePath, { wait: false });
} catch (err) {
throw new Error(
`Failed to open browser. File saved to: ${filePath}${err instanceof Error ? ` (${err.message})` : ""}`,
);
}
return { filePath };
}

4
src/web/html.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module "*memory-viewer-template.txt" {
const content: string;
export default content;
}

File diff suppressed because one or more lines are too long

41
src/web/types.ts Normal file
View File

@@ -0,0 +1,41 @@
export interface ContextData {
contextWindow: number; // max tokens
usedTokens: number; // current total
model: string;
breakdown: {
system: number;
coreMemory: number;
externalMemory: number;
summaryMemory: number;
tools: number;
messages: number;
};
}
export interface MemoryViewerData {
agent: { id: string; name: string; serverUrl: string };
generatedAt: string; // ISO 8601 timestamp
totalCommitCount: number; // total commits in repo (may exceed commits.length)
files: MemoryFile[];
commits: MemoryCommit[];
context?: ContextData; // from GET /v1/agents/{id}/context
}
export interface MemoryFile {
path: string; // e.g. "system/persona/soul.md"
isSystem: boolean; // under system/ directory
frontmatter: Record<string, string>;
content: string; // raw markdown body (after frontmatter)
}
export interface MemoryCommit {
hash: string;
shortHash: string;
author: string;
date: string; // ISO 8601
message: string;
stats: string; // diffstat summary
diff?: string; // full unified diff patch (only for recent N commits)
truncated?: boolean; // diff was truncated due to size cap
isReflection: boolean; // commit message matches reflection/sleeptime pattern
}