fix: centralize directory and memfs cap tuning (#1377)

Co-authored-by: Letta Code <noreply@letta.com>
This commit is contained in:
Cameron
2026-03-19 11:42:48 -06:00
committed by GitHub
parent cf576bca8d
commit 3ae4f6feac
6 changed files with 707 additions and 41 deletions

View File

@@ -10,11 +10,25 @@
import { existsSync, mkdirSync } from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import {
DIRECTORY_LIMIT_DEFAULTS,
getDirectoryLimits,
} from "../utils/directoryLimits";
export const MEMORY_FS_ROOT = ".letta";
export const MEMORY_FS_AGENTS_DIR = "agents";
export const MEMORY_FS_MEMORY_DIR = "memory";
export const MEMORY_SYSTEM_DIR = "system";
export const MEMORY_TREE_MAX_LINES = DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxLines;
export const MEMORY_TREE_MAX_CHARS = DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxChars;
export const MEMORY_TREE_MAX_CHILDREN_PER_DIR =
DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxChildrenPerDir;
export interface MemoryTreeRenderOptions {
maxLines?: number;
maxChars?: number;
maxChildrenPerDir?: number;
}
// ----- Directory helpers -----
@@ -69,6 +83,7 @@ export function labelFromRelativePath(relativePath: string): string {
export function renderMemoryFilesystemTree(
systemLabels: string[],
detachedLabels: string[],
options: MemoryTreeRenderOptions = {},
): string {
type TreeNode = { children: Map<string, TreeNode>; isFile: boolean };
@@ -112,22 +127,99 @@ export function renderMemoryFilesystemTree(
});
};
const lines: string[] = ["/memory/"];
const limits = getDirectoryLimits();
const maxLines = Math.max(2, options.maxLines ?? limits.memfsTreeMaxLines);
const maxChars = Math.max(128, options.maxChars ?? limits.memfsTreeMaxChars);
const maxChildrenPerDir = Math.max(
1,
options.maxChildrenPerDir ?? limits.memfsTreeMaxChildrenPerDir,
);
const render = (node: TreeNode, prefix: string) => {
const entries = sortedEntries(node);
entries.forEach(([name, child], index) => {
const isLast = index === entries.length - 1;
const branch = isLast ? "└──" : "├──";
lines.push(`${prefix}${branch} ${name}${child.isFile ? "" : "/"}`);
const rootLine = "/memory/";
const lines: string[] = [rootLine];
let totalChars = rootLine.length;
const countTreeEntries = (node: TreeNode): number => {
let total = 0;
for (const [, child] of node.children) {
total += 1;
if (child.children.size > 0) {
const nextPrefix = `${prefix}${isLast ? " " : "│ "}`;
render(child, nextPrefix);
total += countTreeEntries(child);
}
});
}
return total;
};
render(root, "");
const canAppendLine = (line: string): boolean => {
const nextLineCount = lines.length + 1;
const nextCharCount = totalChars + 1 + line.length;
return nextLineCount <= maxLines && nextCharCount <= maxChars;
};
const render = (node: TreeNode, prefix: string): boolean => {
const entries = sortedEntries(node);
const visibleEntries = entries.slice(0, maxChildrenPerDir);
const omittedEntries = Math.max(0, entries.length - visibleEntries.length);
const renderItems: Array<
| { kind: "entry"; name: string; child: TreeNode }
| { kind: "omitted"; omittedCount: number }
> = visibleEntries.map(([name, child]) => ({
kind: "entry",
name,
child,
}));
if (omittedEntries > 0) {
renderItems.push({ kind: "omitted", omittedCount: omittedEntries });
}
for (const [index, item] of renderItems.entries()) {
const isLast = index === renderItems.length - 1;
const branch = isLast ? "└──" : "├──";
const line =
item.kind === "entry"
? `${prefix}${branch} ${item.name}${item.child.isFile ? "" : "/"}`
: `${prefix}${branch} … (${item.omittedCount.toLocaleString()} more entries)`;
if (!canAppendLine(line)) {
return false;
}
lines.push(line);
totalChars += 1 + line.length;
if (item.kind === "entry" && item.child.children.size > 0) {
const nextPrefix = `${prefix}${isLast ? " " : "│ "}`;
if (!render(item.child, nextPrefix)) {
return false;
}
}
}
return true;
};
const totalEntries = countTreeEntries(root);
const fullyRendered = render(root, "");
if (!fullyRendered) {
while (lines.length > 1) {
const shownEntries = Math.max(0, lines.length - 1); // Exclude /memory/
const omittedEntries = Math.max(1, totalEntries - shownEntries);
const notice = `[Tree truncated: showing ${shownEntries.toLocaleString()} of ${totalEntries.toLocaleString()} entries. ${omittedEntries.toLocaleString()} omitted.]`;
if (canAppendLine(notice)) {
lines.push(notice);
break;
}
const removed = lines.pop();
if (removed) {
totalChars -= 1 + removed.length;
}
}
}
return lines.join("\n");
}

View File

@@ -1,7 +1,7 @@
---
label: memory_filesystem
description: Filesystem view of memory blocks (system + user)
limit: 20000
limit: 8000
---
/memory/

View File

@@ -13,6 +13,23 @@ import {
labelFromRelativePath,
renderMemoryFilesystemTree,
} from "../../agent/memoryFilesystem";
import { DIRECTORY_LIMIT_ENV } from "../../utils/directoryLimits";
const DIRECTORY_LIMIT_ENV_KEYS = Object.values(DIRECTORY_LIMIT_ENV);
const ORIGINAL_DIRECTORY_ENV = Object.fromEntries(
DIRECTORY_LIMIT_ENV_KEYS.map((key) => [key, process.env[key]]),
) as Record<string, string | undefined>;
function restoreDirectoryLimitEnv(): void {
for (const key of DIRECTORY_LIMIT_ENV_KEYS) {
const original = ORIGINAL_DIRECTORY_ENV[key];
if (original === undefined) {
delete process.env[key];
} else {
process.env[key] = original;
}
}
}
// Helper to create a mock client
function createMockClient(options: {
@@ -114,6 +131,10 @@ describe("labelFromRelativePath", () => {
});
describe("renderMemoryFilesystemTree", () => {
afterEach(() => {
restoreDirectoryLimitEnv();
});
test("renders empty tree", () => {
const tree = renderMemoryFilesystemTree([], []);
expect(tree).toContain("/memory/");
@@ -145,6 +166,123 @@ describe("renderMemoryFilesystemTree", () => {
// Should NOT have user/ directory anymore
expect(tree).not.toContain("user/");
});
test("truncates very large trees and includes a notice", () => {
const detachedLabels = Array.from({ length: 2_000 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree([], detachedLabels, {
maxLines: 50,
maxChars: 2_000,
});
const lines = tree.split("\n");
expect(lines.length).toBeLessThanOrEqual(50);
expect(tree.length).toBeLessThanOrEqual(2_000);
expect(tree).toContain("[Tree truncated: showing");
expect(tree).toContain("omitted.");
});
test("truncates within wide folders and adds an omission marker", () => {
const detachedLabels = Array.from({ length: 200 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree([], detachedLabels, {
maxLines: 500,
maxChars: 20_000,
maxChildrenPerDir: 5,
});
expect(tree).toContain("… (195 more entries)");
expect(tree).not.toContain("topic-0199.md");
expect(tree).not.toContain("[Tree truncated: showing");
});
test("uses env overrides for per-folder child caps", () => {
process.env[DIRECTORY_LIMIT_ENV.memfsTreeMaxChildrenPerDir] = "3";
const detachedLabels = Array.from({ length: 10 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree([], detachedLabels, {
maxLines: 500,
maxChars: 20_000,
});
expect(tree).toContain("… (7 more entries)");
expect(tree).not.toContain("topic-0009.md");
});
test("applies leaf truncation within nested system folders", () => {
const systemLabels = Array.from({ length: 60 }, (_, idx) => {
return `project/notes/item-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree(systemLabels, [], {
maxLines: 500,
maxChars: 20_000,
maxChildrenPerDir: 5,
});
expect(tree).toContain("notes/");
expect(tree).toContain("item-0000.md");
expect(tree).toContain("… (55 more entries)");
expect(tree).not.toContain("item-0059.md");
});
test("retains leaf omission markers when global caps also truncate", () => {
const detachedLabels = [
...Array.from({ length: 200 }, (_, idx) => {
return `journal/entry-${String(idx).padStart(4, "0")}`;
}),
...Array.from({ length: 200 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
}),
];
const tree = renderMemoryFilesystemTree([], detachedLabels, {
maxLines: 13,
maxChars: 5_000,
maxChildrenPerDir: 5,
});
expect(tree).toContain("… (195 more entries)");
expect(tree).toContain("[Tree truncated: showing");
});
test("uses env overrides for default tree limits", () => {
process.env[DIRECTORY_LIMIT_ENV.memfsTreeMaxLines] = "20";
process.env[DIRECTORY_LIMIT_ENV.memfsTreeMaxChars] = "500";
const detachedLabels = Array.from({ length: 2_000 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree([], detachedLabels);
const lines = tree.split("\n");
expect(lines.length).toBeLessThanOrEqual(20);
expect(tree.length).toBeLessThanOrEqual(500);
expect(tree).toContain("[Tree truncated: showing");
});
test("falls back to defaults for invalid env overrides", () => {
process.env[DIRECTORY_LIMIT_ENV.memfsTreeMaxLines] = "invalid";
process.env[DIRECTORY_LIMIT_ENV.memfsTreeMaxChars] = "-1";
const detachedLabels = Array.from({ length: 2_000 }, (_, idx) => {
return `notes/topic-${String(idx).padStart(4, "0")}`;
});
const tree = renderMemoryFilesystemTree([], detachedLabels, {
maxChildrenPerDir: 10_000,
});
// Default max lines is 500; ensure invalid env did not force tiny values.
expect(tree.split("\n").length).toBeGreaterThan(100);
expect(tree).toContain("[Tree truncated: showing");
});
});
describe("syncMemoryFilesystem", () => {

View File

@@ -1,16 +1,56 @@
import { describe, expect, test } from "bun:test";
import { afterEach, describe, expect, test } from "bun:test";
import { promises as fs } from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { list_dir } from "../tools/impl/ListDirCodex.js";
import { DIRECTORY_LIMIT_ENV } from "../utils/directoryLimits";
const DIRECTORY_LIMIT_ENV_KEYS = Object.values(DIRECTORY_LIMIT_ENV);
const ORIGINAL_DIRECTORY_ENV = Object.fromEntries(
DIRECTORY_LIMIT_ENV_KEYS.map((key) => [key, process.env[key]]),
) as Record<string, string | undefined>;
function restoreDirectoryLimitEnv(): void {
for (const key of DIRECTORY_LIMIT_ENV_KEYS) {
const original = ORIGINAL_DIRECTORY_ENV[key];
if (original === undefined) {
delete process.env[key];
} else {
process.env[key] = original;
}
}
}
describe("list_dir codex tool", () => {
let tempDir: string;
let tempDirs: string[] = [];
afterEach(async () => {
restoreDirectoryLimitEnv();
await Promise.all(
tempDirs.map((dir) => fs.rm(dir, { recursive: true, force: true })),
);
tempDirs = [];
});
test("uses env overrides for per-folder child cap", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir] = "3";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 10; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({ dir_path: dir, limit: 200, depth: 2 });
expect(result.content).toContain("… (7 more entries)");
expect(result.content).not.toContain("file-0009.txt");
});
async function setupTempDir(): Promise<string> {
if (!tempDir) {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "list-dir-test-"));
}
const tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "list-dir-test-"));
tempDirs.push(tempDir);
return tempDir;
}
@@ -61,12 +101,15 @@ describe("list_dir codex tool", () => {
// Skip first 2 entries
const result = await list_dir({ dir_path: dir, offset: 3, limit: 10 });
// Should not contain first two entries (when sorted alphabetically)
const lines = result.content.split("\n");
// First line is "Absolute path: ..."
const lines = result.content.split("\n");
expect(lines[0]).toContain("Absolute path:");
// Remaining lines should be limited entries
expect(lines.length).toBeGreaterThan(1);
// Entries should start at ccc.txt after skipping aaa/bbb
expect(result.content).toContain("ccc.txt");
expect(result.content).toContain("ddd.txt");
expect(result.content).not.toContain("aaa.txt");
expect(result.content).not.toContain("bbb.txt");
});
test("respects limit parameter", async () => {
@@ -80,8 +123,9 @@ describe("list_dir codex tool", () => {
const result = await list_dir({ dir_path: dir, limit: 2 });
// Should have "More than 2 entries found" message
expect(result.content).toContain("More than 2 entries found");
expect(result.content).toContain(
"More entries available. Use offset=3 to continue.",
);
});
test("respects depth parameter", async () => {
@@ -136,21 +180,49 @@ describe("list_dir codex tool", () => {
test("throws error for offset < 1", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, offset: 0 })).rejects.toThrow(
"offset must be a 1-indexed entry number",
"offset must be a positive integer (1-indexed)",
);
});
test("throws error for non-integer offset", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, offset: 1.5 })).rejects.toThrow(
"offset must be a positive integer (1-indexed)",
);
});
test("throws error for very large offset", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, offset: 10_001 })).rejects.toThrow(
"offset must be less than or equal to 10,000",
);
});
test("throws error for limit < 1", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, limit: 0 })).rejects.toThrow(
"limit must be greater than zero",
"limit must be a positive integer",
);
});
test("throws error for non-integer limit", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, limit: 2.5 })).rejects.toThrow(
"limit must be a positive integer",
);
});
test("throws error for depth < 1", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, depth: 0 })).rejects.toThrow(
"depth must be greater than zero",
"depth must be a positive integer",
);
});
test("throws error for non-integer depth", async () => {
const dir = await setupTempDir();
await expect(list_dir({ dir_path: dir, depth: 1.2 })).rejects.toThrow(
"depth must be a positive integer",
);
});
@@ -164,4 +236,153 @@ describe("list_dir codex tool", () => {
const lines = result.content.split("\n").filter((l) => l.trim() !== "");
expect(lines.length).toBe(1);
});
test("caps oversized limit/depth requests and reports capping", async () => {
const structure: Record<string, string | null> = {};
for (let i = 0; i < 260; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({
dir_path: dir,
limit: 1000,
depth: 99,
});
expect(result.content).toContain(
"[Request capped: limit=1000->200, depth=99->5]",
);
expect(result.content).toMatch(/… \([\d,]+ more entries\)/);
expect(result.content).toContain(
"More entries may exist beyond the current truncated view.",
);
});
test("truncates large folders in-place with omission markers", async () => {
const structure: Record<string, string | null> = {};
for (let i = 0; i < 60; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({ dir_path: dir, limit: 200, depth: 2 });
expect(result.content).toContain("… (10 more entries)");
expect(result.content).not.toContain("file-0059.txt");
});
test("truncates nested folder children in-place", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir] = "5";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 60; i++) {
structure[`parent/child-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({ dir_path: dir, limit: 200, depth: 3 });
expect(result.content).toContain("parent/");
expect(result.content).toContain(" child-0000.txt");
expect(result.content).toContain(" … (55 more entries)");
expect(result.content).not.toContain("child-0059.txt");
});
test("offset paginates truncated view with stable omission marker ordering", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir] = "3";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 10; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({
dir_path: dir,
offset: 4,
limit: 2,
depth: 2,
});
const lines = result.content.split("\n").slice(1);
expect(lines[0]).toBe("… (7 more entries)");
expect(result.content).toContain(
"More entries may exist beyond the current truncated view.",
);
});
test("offset beyond truncated view is rejected", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir] = "3";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 10; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
await expect(
list_dir({ dir_path: dir, offset: 5, limit: 1, depth: 2 }),
).rejects.toThrow(
"offset exceeds available entries in current view (max offset: 4)",
);
});
test("does not traverse subdirectories omitted by per-folder cap", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir] = "1";
const dir = await createStructure({
"alpha/visible.txt": "visible",
"zeta/deep/hidden.txt": "hidden",
});
const result = await list_dir({ dir_path: dir, limit: 200, depth: 5 });
expect(result.content).toContain("alpha/");
expect(result.content).toContain(" visible.txt");
expect(result.content).toContain("… (1 more entries)");
expect(result.content).not.toContain("zeta/");
expect(result.content).not.toContain("hidden.txt");
});
test("uses env overrides for list_dir caps", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxLimit] = "3";
process.env[DIRECTORY_LIMIT_ENV.listDirMaxDepth] = "2";
process.env[DIRECTORY_LIMIT_ENV.listDirMaxOffset] = "99";
process.env[DIRECTORY_LIMIT_ENV.listDirMaxCollectedEntries] = "99";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 25; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({ dir_path: dir, limit: 50, depth: 10 });
expect(result.content).toContain(
"[Request capped: limit=50->3, depth=10->2]",
);
expect(result.content).toContain(
"More entries available. Use offset=4 to continue.",
);
});
test("falls back to defaults for invalid list_dir env overrides", async () => {
process.env[DIRECTORY_LIMIT_ENV.listDirMaxLimit] = "invalid";
process.env[DIRECTORY_LIMIT_ENV.listDirMaxDepth] = "-1";
const structure: Record<string, string | null> = {};
for (let i = 0; i < 260; i++) {
structure[`file-${String(i).padStart(4, "0")}.txt`] = String(i);
}
const dir = await createStructure(structure);
const result = await list_dir({ dir_path: dir, limit: 1000, depth: 99 });
// Defaults should still apply when env values are invalid.
expect(result.content).toContain(
"[Request capped: limit=1000->200, depth=99->5]",
);
});
});

View File

@@ -1,9 +1,13 @@
import { promises as fs } from "node:fs";
import * as path from "node:path";
import { getDirectoryLimits } from "../../utils/directoryLimits.js";
import { validateRequiredParams } from "./validation.js";
const MAX_ENTRY_LENGTH = 500;
const INDENTATION_SPACES = 2;
const DEFAULT_OFFSET = 1;
const DEFAULT_LIMIT = 25;
const DEFAULT_DEPTH = 2;
interface ListDirCodexArgs {
dir_path: string;
@@ -20,39 +24,77 @@ interface DirEntry {
name: string; // Full relative path for sorting
displayName: string; // Just the filename for display
depth: number; // Indentation depth
kind: "directory" | "file" | "symlink" | "other";
kind: "directory" | "file" | "symlink" | "other" | "omitted";
}
interface CollectEntriesResult {
hitCollectionCap: boolean;
hitFolderTruncation: boolean;
}
/**
* Codex-style list_dir tool.
* Lists entries with pagination (offset/limit) and depth control.
* Lists entries with pagination and depth control.
*
* Defaults:
* - offset: 1 (1-indexed)
* - limit: 25
* - depth: 2 (immediate children + one nested level)
*/
export async function list_dir(
args: ListDirCodexArgs,
): Promise<ListDirCodexResult> {
validateRequiredParams(args, ["dir_path"], "list_dir");
const limits = getDirectoryLimits();
const { dir_path, offset = 1, limit = 25, depth = 2 } = args;
const {
dir_path,
offset = DEFAULT_OFFSET,
limit = DEFAULT_LIMIT,
depth = DEFAULT_DEPTH,
} = args;
const userCwd = process.env.USER_CWD || process.cwd();
const resolvedPath = path.isAbsolute(dir_path)
? dir_path
: path.resolve(userCwd, dir_path);
if (offset < 1) {
throw new Error("offset must be a 1-indexed entry number");
if (!Number.isInteger(offset) || offset < 1) {
throw new Error("offset must be a positive integer (1-indexed)");
}
if (limit < 1) {
throw new Error("limit must be greater than zero");
if (offset > limits.listDirMaxOffset) {
throw new Error(
`offset must be less than or equal to ${limits.listDirMaxOffset.toLocaleString()}`,
);
}
if (depth < 1) {
throw new Error("depth must be greater than zero");
if (!Number.isInteger(limit) || limit < 1) {
throw new Error("limit must be a positive integer");
}
const entries = await listDirSlice(resolvedPath, offset, limit, depth);
if (!Number.isInteger(depth) || depth < 1) {
throw new Error("depth must be a positive integer");
}
const effectiveLimit = Math.min(limit, limits.listDirMaxLimit);
const effectiveDepth = Math.min(depth, limits.listDirMaxDepth);
const entries = await listDirSlice(
resolvedPath,
offset,
effectiveLimit,
effectiveDepth,
limits.listDirMaxCollectedEntries,
limits.listDirMaxChildrenPerDir,
);
const output = [`Absolute path: ${resolvedPath}`, ...entries];
if (effectiveLimit !== limit || effectiveDepth !== depth) {
output.push(
`[Request capped: limit=${limit}->${effectiveLimit}, depth=${depth}->${effectiveDepth}]`,
);
}
return { content: output.join("\n") };
}
@@ -64,9 +106,21 @@ async function listDirSlice(
offset: number,
limit: number,
maxDepth: number,
maxCollectedEntries: number,
maxChildrenPerDir: number,
): Promise<string[]> {
const entries: DirEntry[] = [];
await collectEntries(dirPath, "", maxDepth, entries);
// Collect one extra entry when possible so callers can tell if more data exists.
const maxEntriesToCollect = Math.min(offset + limit, maxCollectedEntries);
const { hitCollectionCap, hitFolderTruncation } = await collectEntries(
dirPath,
"",
maxDepth,
entries,
maxEntriesToCollect,
maxChildrenPerDir,
);
if (entries.length === 0) {
return [];
@@ -74,7 +128,9 @@ async function listDirSlice(
const startIndex = offset - 1;
if (startIndex >= entries.length) {
throw new Error("offset exceeds directory entry count");
throw new Error(
`offset exceeds available entries in current view (max offset: ${entries.length.toLocaleString()})`,
);
}
const remainingEntries = entries.length - startIndex;
@@ -91,7 +147,11 @@ async function listDirSlice(
}
if (endIndex < entries.length) {
formatted.push(`More than ${cappedLimit} entries found`);
formatted.push(
`More entries available. Use offset=${endIndex + 1} to continue.`,
);
} else if (hitCollectionCap || hitFolderTruncation) {
formatted.push("More entries may exist beyond the current truncated view.");
}
return formatted;
@@ -105,12 +165,19 @@ async function collectEntries(
relativePrefix: string,
remainingDepth: number,
entries: DirEntry[],
): Promise<void> {
maxEntriesToCollect: number,
maxChildrenPerDir: number,
): Promise<CollectEntriesResult> {
const queue: Array<{ absPath: string; prefix: string; depth: number }> = [
{ absPath: dirPath, prefix: relativePrefix, depth: remainingDepth },
];
let hitFolderTruncation = false;
while (queue.length > 0) {
if (entries.length >= maxEntriesToCollect) {
return { hitCollectionCap: true, hitFolderTruncation };
}
const current = queue.shift();
if (!current) break;
const { absPath, prefix, depth } = current;
@@ -162,7 +229,38 @@ async function collectEntries(
// Sort entries alphabetically
dirEntries.sort((a, b) => a.entry.name.localeCompare(b.entry.name));
for (const item of dirEntries) {
const visibleEntries = dirEntries.slice(0, maxChildrenPerDir);
const omittedEntries = Math.max(
0,
dirEntries.length - visibleEntries.length,
);
if (omittedEntries > 0) {
hitFolderTruncation = true;
const omittedSortKey = formatEntryName(
`${prefix ? `${prefix}/` : ""}\uffff-omitted`,
);
const omittedDepth = prefix ? prefix.split(path.sep).length : 0;
visibleEntries.push({
absPath,
relativePath: prefix,
kind: "omitted",
entry: {
name: omittedSortKey,
displayName: `… (${omittedEntries.toLocaleString()} more entries)`,
depth: omittedDepth,
kind: "omitted",
},
});
}
for (const item of visibleEntries) {
if (entries.length >= maxEntriesToCollect) {
return { hitCollectionCap: true, hitFolderTruncation };
}
// Queue subdirectories for traversal if depth allows
if (item.kind === "directory" && depth > 1) {
queue.push({
@@ -174,6 +272,8 @@ async function collectEntries(
entries.push(item.entry);
}
}
return { hitCollectionCap: false, hitFolderTruncation };
}
/**
@@ -214,6 +314,8 @@ function formatEntryLine(entry: DirEntry): string {
case "other":
name += "?";
break;
case "omitted":
break;
default:
// "file" type has no suffix
break;

View File

@@ -0,0 +1,113 @@
/**
* Centralized directory/memfs limits with env overrides for rapid testing.
*/
export const DIRECTORY_LIMIT_ENV = {
memfsTreeMaxLines: "LETTA_MEMFS_TREE_MAX_LINES",
memfsTreeMaxChars: "LETTA_MEMFS_TREE_MAX_CHARS",
memfsTreeMaxChildrenPerDir: "LETTA_MEMFS_TREE_MAX_CHILDREN_PER_DIR",
listDirMaxLimit: "LETTA_LIST_DIR_MAX_LIMIT",
listDirMaxDepth: "LETTA_LIST_DIR_MAX_DEPTH",
listDirMaxOffset: "LETTA_LIST_DIR_MAX_OFFSET",
listDirMaxCollectedEntries: "LETTA_LIST_DIR_MAX_COLLECTED_ENTRIES",
listDirMaxChildrenPerDir: "LETTA_LIST_DIR_MAX_CHILDREN_PER_DIR",
} as const;
export const DIRECTORY_LIMIT_DEFAULTS = {
memfsTreeMaxLines: 500,
memfsTreeMaxChars: 20_000,
memfsTreeMaxChildrenPerDir: 50,
listDirMaxLimit: 200,
listDirMaxDepth: 5,
listDirMaxOffset: 10_000,
listDirMaxCollectedEntries: 12_000,
listDirMaxChildrenPerDir: 50,
} as const;
export interface DirectoryLimits {
memfsTreeMaxLines: number;
memfsTreeMaxChars: number;
memfsTreeMaxChildrenPerDir: number;
listDirMaxLimit: number;
listDirMaxDepth: number;
listDirMaxOffset: number;
listDirMaxCollectedEntries: number;
listDirMaxChildrenPerDir: number;
}
function parsePositiveIntEnv(
value: string | undefined,
fallback: number,
min: number,
max: number,
): number {
if (!value || value.trim() === "") {
return fallback;
}
const parsed = Number.parseInt(value.trim(), 10);
if (Number.isNaN(parsed)) {
return fallback;
}
if (parsed < min || parsed > max) {
return fallback;
}
return parsed;
}
export function getDirectoryLimits(
env: NodeJS.ProcessEnv = process.env,
): DirectoryLimits {
return {
memfsTreeMaxLines: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.memfsTreeMaxLines],
DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxLines,
2,
50_000,
),
memfsTreeMaxChars: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.memfsTreeMaxChars],
DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxChars,
128,
5_000_000,
),
memfsTreeMaxChildrenPerDir: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.memfsTreeMaxChildrenPerDir],
DIRECTORY_LIMIT_DEFAULTS.memfsTreeMaxChildrenPerDir,
1,
5_000,
),
listDirMaxLimit: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.listDirMaxLimit],
DIRECTORY_LIMIT_DEFAULTS.listDirMaxLimit,
1,
10_000,
),
listDirMaxDepth: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.listDirMaxDepth],
DIRECTORY_LIMIT_DEFAULTS.listDirMaxDepth,
1,
100,
),
listDirMaxOffset: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.listDirMaxOffset],
DIRECTORY_LIMIT_DEFAULTS.listDirMaxOffset,
1,
1_000_000,
),
listDirMaxCollectedEntries: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.listDirMaxCollectedEntries],
DIRECTORY_LIMIT_DEFAULTS.listDirMaxCollectedEntries,
10,
2_000_000,
),
listDirMaxChildrenPerDir: parsePositiveIntEnv(
env[DIRECTORY_LIMIT_ENV.listDirMaxChildrenPerDir],
DIRECTORY_LIMIT_DEFAULTS.listDirMaxChildrenPerDir,
1,
5_000,
),
};
}