feat(tools): add client-side memory_apply_patch for Codex toolsets (#1485)

Co-authored-by: Letta Code <noreply@letta.com>
This commit is contained in:
Sarah Wooders
2026-03-23 14:55:37 -07:00
committed by GitHub
parent 457ccd8988
commit 7b29a0005f
17 changed files with 1290 additions and 37 deletions

View File

@@ -202,7 +202,7 @@ const DynamicPreview: React.FC<DynamicPreviewProps> = ({
);
}
if (t === "apply_patch" || t === "applypatch") {
if (t === "apply_patch" || t === "applypatch" || t === "memory_apply_patch") {
const inputVal = parsedArgs?.input;
if (typeof inputVal === "string") {
const operations = parsePatchOperations(inputVal);
@@ -683,7 +683,12 @@ export const ApprovalDialog = memo(function ApprovalDialog({
// For Patch tools - parse hunks directly (patches ARE diffs, no need to recompute)
const t = approvalRequest.toolName.toLowerCase();
if ((t === "apply_patch" || t === "applypatch") && parsedArgs?.input) {
if (
(t === "apply_patch" ||
t === "applypatch" ||
t === "memory_apply_patch") &&
parsedArgs?.input
) {
const operations = parsePatchOperations(parsedArgs.input as string);
for (const op of operations) {
const key = `${toolCallId}:${op.path}`;
@@ -747,7 +752,11 @@ export const ApprovalDialog = memo(function ApprovalDialog({
if (!approvalRequest) return "";
const t = approvalRequest.toolName.toLowerCase();
// For patch tools, determine header from operation type
if (t === "apply_patch" || t === "applypatch") {
if (
t === "apply_patch" ||
t === "applypatch" ||
t === "memory_apply_patch"
) {
if (parsedArgs?.input && typeof parsedArgs.input === "string") {
const operations = parsePatchOperations(parsedArgs.input);
if (operations.length > 0) {
@@ -819,7 +828,12 @@ export const ApprovalDialog = memo(function ApprovalDialog({
}
}
// For patch tools, show file path(s) being modified
if ((t === "apply_patch" || t === "applypatch") && parsedArgs.input) {
if (
(t === "apply_patch" ||
t === "applypatch" ||
t === "memory_apply_patch") &&
parsedArgs.input
) {
const operations = parsePatchOperations(parsedArgs.input as string);
if (operations.length > 0) {
const { relative } = require("node:path");
@@ -958,6 +972,7 @@ function getHeaderLabel(toolName: string): string {
if (t === "list_dir") return "List Files";
if (t === "grep_files") return "Search in Files";
if (t === "apply_patch") return "Apply Patch";
if (t === "memory_apply_patch") return "Memory Patch";
if (t === "update_plan") return "Plan update";
// Codex toolset (PascalCase → lowercased)
if (t === "shellcommand") return "Shell command";

View File

@@ -161,7 +161,8 @@ export const ApprovalPreview = memo(
toolName === "str_replace_editor" ||
toolName === "str_replace_based_edit_tool" ||
toolName === "apply_patch" ||
toolName === "ApplyPatch"
toolName === "ApplyPatch" ||
toolName === "memory_apply_patch"
) {
const headerText = getFileEditHeader(toolName, toolArgs);
@@ -171,7 +172,9 @@ export const ApprovalPreview = memo(
// Handle patch tools (can have multiple files)
if (
args.input &&
(toolName === "apply_patch" || toolName === "ApplyPatch")
(toolName === "apply_patch" ||
toolName === "ApplyPatch" ||
toolName === "memory_apply_patch")
) {
const operations = parsePatchOperations(args.input);

View File

@@ -59,7 +59,7 @@ function getHeaderText(fileEdit: FileEditInfo): string {
const t = fileEdit.toolName.toLowerCase();
// Handle patch tools (multi-file)
if (t === "apply_patch" || t === "applypatch") {
if (t === "apply_patch" || t === "applypatch" || t === "memory_apply_patch") {
if (fileEdit.patchInput) {
const operations = parsePatchOperations(fileEdit.patchInput);
if (operations.length > 1) {

View File

@@ -22,12 +22,11 @@ export function MemoryDiffRenderer({
try {
const args = JSON.parse(argsText);
// Handle memory_apply_patch tool (unified diff format)
// Handle memory_apply_patch tool (codex-style apply_patch input)
if (toolName === "memory_apply_patch") {
const label = args.label || "unknown";
const patch = args.patch || "";
const patch = typeof args.input === "string" ? args.input : "";
return (
<PatchDiffRenderer label={label} patch={patch} columns={columns} />
<PatchDiffRenderer label="memory" patch={patch} columns={columns} />
);
}

View File

@@ -35,6 +35,7 @@ export function getDisplayToolName(rawName: string): string {
if (rawName === "list_dir") return "LS";
if (rawName === "grep_files") return "Search";
if (rawName === "apply_patch") return "Patch";
if (rawName === "memory_apply_patch") return "Memory Patch";
// Codex toolset (PascalCase)
if (rawName === "UpdatePlan") return "Planning";

View File

@@ -189,6 +189,16 @@ export async function computeDiffPreviews(
}
// Delete operations don't produce diffs
}
} else if (toolName === "memory_apply_patch" && toolArgs.input) {
const operations = parsePatchOperations(toolArgs.input as string);
for (const op of operations) {
if (op.kind === "add" || op.kind === "update") {
const result = parsePatchToAdvancedDiff(op.patchLines, op.path);
if (result) {
previews.push(toDiffPreview(result, basename(op.path)));
}
}
}
}
} catch {
// Ignore diff computation errors — return whatever we have so far

View File

@@ -282,6 +282,7 @@ class PermissionModeManager {
"MultiEdit",
"NotebookEdit",
"apply_patch",
"memory_apply_patch",
"replace",
"write_file",
].includes(toolName)
@@ -339,6 +340,7 @@ class PermissionModeManager {
// Codex toolset (snake_case and PascalCase)
"apply_patch",
"ApplyPatch",
"memory_apply_patch",
// Gemini toolset (snake_case and PascalCase)
"write_file_gemini",
"WriteFileGemini",
@@ -362,7 +364,9 @@ class PermissionModeManager {
// ApplyPatch/apply_patch: extract all file directives.
if (
(toolName === "ApplyPatch" || toolName === "apply_patch") &&
(toolName === "ApplyPatch" ||
toolName === "apply_patch" ||
toolName === "memory_apply_patch") &&
toolArgs?.input
) {
const input = toolArgs.input as string;

View File

@@ -5,7 +5,7 @@ import { createAgentWithBaseToolsRecovery } from "../../agent/create";
function missingBaseToolsError(): Error & { status: number } {
return Object.assign(
new Error(
`400 {"detail":"Tools not found by name: {'fetch_webpage', 'memory'}"}`,
`400 {"detail":"Tools not found by name: {'fetch_webpage', 'memory_apply_patch'}"}`,
),
{ status: 400 },
);
@@ -25,7 +25,7 @@ describe("createAgentWithBaseToolsRecovery", () => {
const agent = await createAgentWithBaseToolsRecovery(
createWithTools,
["memory", "web_search", "fetch_webpage"],
["memory_apply_patch", "web_search", "fetch_webpage"],
addBaseTools,
);
@@ -33,12 +33,12 @@ describe("createAgentWithBaseToolsRecovery", () => {
expect(addBaseTools).toHaveBeenCalledTimes(1);
expect(createWithTools).toHaveBeenCalledTimes(2);
expect(createWithTools.mock.calls[0]?.[0]).toEqual([
"memory",
"memory_apply_patch",
"web_search",
"fetch_webpage",
]);
expect(createWithTools.mock.calls[1]?.[0]).toEqual([
"memory",
"memory_apply_patch",
"web_search",
"fetch_webpage",
]);
@@ -59,7 +59,7 @@ describe("createAgentWithBaseToolsRecovery", () => {
const agent = await createAgentWithBaseToolsRecovery(
createWithTools,
["memory", "web_search", "fetch_webpage"],
["memory_apply_patch", "web_search", "fetch_webpage"],
addBaseTools,
);

View File

@@ -5,6 +5,12 @@ import {
isShellOutputTool,
} from "../../cli/helpers/toolNameMapping";
describe("toolNameMapping display mappings", () => {
test("maps memory_apply_patch to a friendly label", () => {
expect(getDisplayToolName("memory_apply_patch")).toBe("Memory Patch");
});
});
describe("toolNameMapping.isMemoryTool", () => {
test("recognizes all supported memory tool names", () => {
expect(isMemoryTool("memory")).toBe(true);

View File

@@ -217,6 +217,25 @@ describe("computeDiffPreviews", () => {
expect(previews.map((p) => p.fileName).sort()).toEqual(["a.txt", "b.txt"]);
});
it("returns one preview per file for memory_apply_patch", async () => {
const patch = [
"*** Begin Patch",
"*** Update File: system/a.md",
"@@ -1 +1 @@",
"-old",
"+new",
"*** Add File: reference/b.md",
"+hello",
"*** End Patch",
].join("\n");
const previews = await computeDiffPreviews("memory_apply_patch", {
input: patch,
});
expect(previews).toHaveLength(2);
expect(previews.map((p) => p.fileName).sort()).toEqual(["a.md", "b.md"]);
});
it("resolves relative file paths against the provided working directory", async () => {
const tempRoot = await mkdtemp(
path.join(os.tmpdir(), "letta-diff-preview-"),

View File

@@ -0,0 +1,259 @@
import { afterEach, beforeEach, describe, expect, mock, test } from "bun:test";
import { execFile as execFileCb } from "node:child_process";
import { mkdtempSync, writeFileSync } from "node:fs";
import { rm } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { promisify } from "node:util";
const execFile = promisify(execFileCb);
const TEST_AGENT_ID = "agent-test-memory-apply-patch";
const TEST_AGENT_NAME = "Bob";
mock.module("../../agent/context", () => ({
getCurrentAgentId: () => TEST_AGENT_ID,
}));
mock.module("../../agent/client", () => ({
getClient: mock(() =>
Promise.resolve({
agents: {
retrieve: mock(() => Promise.resolve({ name: TEST_AGENT_NAME })),
},
}),
),
getServerUrl: () => "http://localhost:8283",
}));
const { memory_apply_patch } = await import(
"../../tools/impl/MemoryApplyPatch"
);
async function runGit(cwd: string, args: string[]): Promise<string> {
const { stdout } = await execFile("git", args, { cwd });
return String(stdout ?? "").trim();
}
describe("memory_apply_patch tool", () => {
let tempRoot: string;
let memoryDir: string;
let remoteDir: string;
const originalMemoryDir = process.env.MEMORY_DIR;
const originalAgentId = process.env.AGENT_ID;
const originalAgentName = process.env.AGENT_NAME;
beforeEach(async () => {
tempRoot = mkdtempSync(join(tmpdir(), "letta-memory-apply-patch-"));
memoryDir = join(tempRoot, "memory");
remoteDir = join(tempRoot, "remote.git");
await execFile("git", ["init", "--bare", remoteDir]);
await execFile("git", ["init", "-b", "main", memoryDir]);
await runGit(memoryDir, ["config", "user.name", "setup"]);
await runGit(memoryDir, ["config", "user.email", "setup@example.com"]);
await runGit(memoryDir, ["remote", "add", "origin", remoteDir]);
writeFileSync(join(memoryDir, ".gitkeep"), "", "utf8");
await runGit(memoryDir, ["add", ".gitkeep"]);
await runGit(memoryDir, ["commit", "-m", "initial"]);
await runGit(memoryDir, ["push", "-u", "origin", "main"]);
process.env.MEMORY_DIR = memoryDir;
process.env.AGENT_ID = TEST_AGENT_ID;
process.env.AGENT_NAME = TEST_AGENT_NAME;
});
afterEach(async () => {
if (originalMemoryDir === undefined) delete process.env.MEMORY_DIR;
else process.env.MEMORY_DIR = originalMemoryDir;
if (originalAgentId === undefined) delete process.env.AGENT_ID;
else process.env.AGENT_ID = originalAgentId;
if (originalAgentName === undefined) delete process.env.AGENT_NAME;
else process.env.AGENT_NAME = originalAgentName;
if (tempRoot) {
await rm(tempRoot, { recursive: true, force: true });
}
});
test("requires reason and input", async () => {
await expect(
memory_apply_patch({
input: "*** Begin Patch\n*** End Patch",
} as Parameters<typeof memory_apply_patch>[0]),
).rejects.toThrow(/missing required parameter/i);
});
test("adds and updates memory files with commit reason and agent author", async () => {
const seedPatch = [
"*** Begin Patch",
"*** Add File: system/contacts.md",
"+---",
"+description: Contacts",
"+---",
"+Sarah: cofounder",
"*** End Patch",
].join("\n");
await memory_apply_patch({
reason: "Create contacts memory via patch",
input: seedPatch,
});
const updatePatch = [
"*** Begin Patch",
"*** Update File: system/contacts.md",
"@@",
"-Sarah: cofounder",
"+Sarah: Letta cofounder",
"*** End Patch",
].join("\n");
await memory_apply_patch({
reason: "Refine contacts memory via patch",
input: updatePatch,
});
const content = await runGit(memoryDir, [
"show",
"HEAD:system/contacts.md",
]);
expect(content).toContain("Sarah: Letta cofounder");
const logOutput = await runGit(memoryDir, [
"log",
"-1",
"--pretty=format:%s%n%an%n%ae",
]);
const [subject, authorName, authorEmail] = logOutput.split("\n");
expect(subject).toBe("Refine contacts memory via patch");
expect(authorName).toBe(TEST_AGENT_NAME);
expect(authorEmail).toBe(`${TEST_AGENT_ID}@letta.com`);
});
test("rejects absolute paths outside MEMORY_DIR", async () => {
const patch = [
"*** Begin Patch",
"*** Add File: /tmp/outside.md",
"+hello",
"*** End Patch",
].join("\n");
await expect(
memory_apply_patch({
reason: "should fail",
input: patch,
}),
).rejects.toThrow(/only be used to modify files/i);
});
test("rejects editing read_only memory files", async () => {
await memory_apply_patch({
reason: "seed read only",
input: [
"*** Begin Patch",
"*** Add File: system/ro.md",
"+---",
"+description: Read only",
"+read_only: true",
"+---",
"+keep",
"*** End Patch",
].join("\n"),
});
await expect(
memory_apply_patch({
reason: "attempt edit ro",
input: [
"*** Begin Patch",
"*** Update File: system/ro.md",
"@@",
"-keep",
"+change",
"*** End Patch",
].join("\n"),
}),
).rejects.toThrow(/read_only/i);
});
test("returns error when push fails but keeps local commit", async () => {
await memory_apply_patch({
reason: "seed notes",
input: [
"*** Begin Patch",
"*** Add File: reference/history/notes.md",
"+old",
"*** End Patch",
].join("\n"),
});
await runGit(memoryDir, [
"remote",
"set-url",
"origin",
join(tempRoot, "missing-remote.git"),
]);
const reason = "Update notes with failing push";
await expect(
memory_apply_patch({
reason,
input: [
"*** Begin Patch",
"*** Update File: reference/history/notes.md",
"@@",
"-old",
"+new",
"*** End Patch",
].join("\n"),
}),
).rejects.toThrow(/committed .* but push failed/i);
const subject = await runGit(memoryDir, [
"log",
"-1",
"--pretty=format:%s",
]);
expect(subject).toBe(reason);
});
test("updates files that omit frontmatter limit", async () => {
await memory_apply_patch({
reason: "seed no-limit memory",
input: [
"*** Begin Patch",
"*** Add File: system/no-limit.md",
"+---",
"+description: No limit",
"+---",
"+before",
"*** End Patch",
].join("\n"),
});
await memory_apply_patch({
reason: "update no-limit memory",
input: [
"*** Begin Patch",
"*** Update File: system/no-limit.md",
"@@",
"-before",
"+after",
"*** End Patch",
].join("\n"),
});
const content = await runGit(memoryDir, [
"show",
"HEAD:system/no-limit.md",
]);
expect(content).toContain("description: No limit");
expect(content).not.toContain("limit:");
expect(content).toContain("after");
});
});

View File

@@ -0,0 +1,46 @@
Apply a codex-style patch to memory files in `$MEMORY_DIR`, then automatically commit and push the change.
This is similar to `apply_patch`, but scoped to the memory filesystem and with memory-aware guardrails.
- Required args:
- `reason` — git commit message for the memory change
- `input` — patch text using the standard apply_patch format
Patch format:
- `*** Begin Patch`
- `*** Add File: <path>`
- `*** Update File: <path>`
- optional `*** Move to: <path>`
- one or more `@@` hunks with ` `, `-`, `+` lines
- `*** Delete File: <path>`
- `*** End Patch`
Path rules:
- Relative paths are interpreted inside memory repo
- Absolute paths are allowed only when under `$MEMORY_DIR`
- Paths outside memory repo are rejected
Memory rules:
- Operates on markdown memory files (`.md`)
- Updated/deleted files must be valid memory files with frontmatter
- `read_only: true` files cannot be modified
- If adding a file without frontmatter, frontmatter is created automatically
Git behavior:
- Stages changed memory paths
- Commits with `reason`
- Uses agent identity author (`<agent_id>@letta.com`)
- Pushes to remote
Example:
```python
memory_apply_patch(
reason="Refine coding preferences",
input="""*** Begin Patch
*** Update File: system/human/prefs/coding.md
@@
-Use broad abstractions
+Prefer small focused helpers
*** End Patch"""
)
```

View File

@@ -0,0 +1,871 @@
import { execFile as execFileCb } from "node:child_process";
import { existsSync } from "node:fs";
import {
access,
mkdir,
readFile,
rm,
stat,
unlink,
writeFile,
} from "node:fs/promises";
import { homedir } from "node:os";
import { dirname, isAbsolute, relative, resolve } from "node:path";
import { promisify } from "node:util";
import { getClient } from "../../agent/client";
import { getCurrentAgentId } from "../../agent/context";
import { validateRequiredParams } from "./validation";
const execFile = promisify(execFileCb);
type ParsedPatchOp =
| {
kind: "add";
targetLabel: string;
targetRelPath: string;
contentLines: string[];
}
| {
kind: "update";
sourceLabel: string;
sourceRelPath: string;
targetLabel: string;
targetRelPath: string;
hunks: Hunk[];
}
| {
kind: "delete";
targetLabel: string;
targetRelPath: string;
};
interface Hunk {
lines: string[];
}
interface ParsedMemoryFile {
frontmatter: {
description: string;
read_only?: string;
};
body: string;
}
interface MemoryApplyPatchArgs {
reason: string;
input: string;
}
interface MemoryApplyPatchResult {
message: string;
}
async function getAgentIdentity(): Promise<{
agentId: string;
agentName: string;
}> {
const envAgentId = (
process.env.AGENT_ID ||
process.env.LETTA_AGENT_ID ||
""
).trim();
const contextAgentId = (() => {
try {
return getCurrentAgentId().trim();
} catch {
return "";
}
})();
const agentId = contextAgentId || envAgentId;
if (!agentId) {
throw new Error(
"memory_apply_patch: unable to resolve agent id for git author email",
);
}
let agentName = "";
try {
const client = await getClient();
const agent = await client.agents.retrieve(agentId);
agentName = (agent.name || "").trim();
} catch {
// best-effort fallback below
}
if (!agentName) {
agentName = (process.env.AGENT_NAME || "").trim() || agentId;
}
return { agentId, agentName };
}
export async function memory_apply_patch(
args: MemoryApplyPatchArgs,
): Promise<MemoryApplyPatchResult> {
validateRequiredParams(args, ["reason", "input"], "memory_apply_patch");
const reason = args.reason.trim();
if (!reason) {
throw new Error("memory_apply_patch: 'reason' must be a non-empty string");
}
const input = args.input;
if (typeof input !== "string" || !input.trim()) {
throw new Error("memory_apply_patch: 'input' must be a non-empty string");
}
const memoryDir = resolveMemoryDir();
ensureMemoryRepo(memoryDir);
const ops = parsePatchOperations(memoryDir, input);
if (ops.length === 0) {
throw new Error("memory_apply_patch: no file operations found in patch");
}
const pendingWrites = new Map<string, string>();
const pendingDeletes = new Set<string>();
const affectedPaths = new Set<string>();
const loadCurrentContent = async (
relPath: string,
sourcePathForErrors: string,
): Promise<string> => {
const absPath = resolveMemoryPath(memoryDir, relPath);
if (pendingDeletes.has(absPath) && !pendingWrites.has(absPath)) {
throw new Error(
`memory_apply_patch: file not found for update: ${sourcePathForErrors}`,
);
}
const pending = pendingWrites.get(absPath);
if (pending !== undefined) {
return pending;
}
const content = await readFile(absPath, "utf8").catch((error) => {
const message = error instanceof Error ? error.message : String(error);
throw new Error(
`memory_apply_patch: failed to read ${sourcePathForErrors}: ${message}`,
);
});
return content.replace(/\r\n/g, "\n");
};
for (const op of ops) {
if (op.kind === "add") {
const absPath = resolveMemoryFilePath(memoryDir, op.targetLabel);
if (pendingWrites.has(absPath)) {
throw new Error(
`memory_apply_patch: duplicate add/update target in patch: ${op.targetRelPath}`,
);
}
if (!(await isMissing(absPath))) {
throw new Error(
`memory_apply_patch: cannot add existing memory file: ${op.targetRelPath}`,
);
}
const rawContent = op.contentLines.join("\n");
const rendered = normalizeAddedContent(op.targetLabel, rawContent);
pendingWrites.set(absPath, rendered);
pendingDeletes.delete(absPath);
affectedPaths.add(toRepoRelative(memoryDir, absPath));
continue;
}
if (op.kind === "delete") {
const absPath = resolveMemoryFilePath(memoryDir, op.targetLabel);
await loadEditableMemoryFile(absPath, op.targetRelPath);
pendingWrites.delete(absPath);
pendingDeletes.add(absPath);
affectedPaths.add(toRepoRelative(memoryDir, absPath));
continue;
}
const sourceAbsPath = resolveMemoryFilePath(memoryDir, op.sourceLabel);
const targetAbsPath = resolveMemoryFilePath(memoryDir, op.targetLabel);
const currentContent = await loadCurrentContent(
op.sourceRelPath,
op.sourceRelPath,
);
const currentParsed = parseMemoryFile(currentContent);
if (currentParsed.frontmatter.read_only === "true") {
throw new Error(
`memory_apply_patch: ${op.sourceRelPath} is read_only and cannot be modified`,
);
}
let nextContent = currentContent;
for (const hunk of op.hunks) {
nextContent = applyHunk(nextContent, hunk.lines, op.sourceRelPath);
}
const validated = parseMemoryFile(nextContent);
if (validated.frontmatter.read_only === "true") {
throw new Error(
`memory_apply_patch: ${op.targetRelPath} cannot be written with read_only=true`,
);
}
pendingWrites.set(targetAbsPath, nextContent);
pendingDeletes.delete(targetAbsPath);
affectedPaths.add(toRepoRelative(memoryDir, targetAbsPath));
if (sourceAbsPath !== targetAbsPath) {
if (!pendingDeletes.has(sourceAbsPath)) {
pendingWrites.delete(sourceAbsPath);
pendingDeletes.add(sourceAbsPath);
}
affectedPaths.add(toRepoRelative(memoryDir, sourceAbsPath));
}
}
for (const [absPath, content] of pendingWrites.entries()) {
await mkdir(dirname(absPath), { recursive: true });
await writeFile(absPath, content, "utf8");
}
for (const absPath of pendingDeletes) {
if (pendingWrites.has(absPath)) continue;
if (await isMissing(absPath)) continue;
const stats = await stat(absPath);
if (stats.isDirectory()) {
await rm(absPath, { recursive: true, force: false });
} else {
await unlink(absPath);
}
}
const pathspecs = Array.from(affectedPaths).filter((p) => p.length > 0);
if (pathspecs.length === 0) {
return { message: "memory_apply_patch completed with no changed paths." };
}
const commitResult = await commitAndPush(memoryDir, pathspecs, reason);
if (!commitResult.committed) {
return {
message:
"memory_apply_patch made no effective changes; skipped commit and push.",
};
}
return {
message: `memory_apply_patch applied and pushed (${commitResult.sha?.slice(0, 7) ?? "unknown"}).`,
};
}
function parsePatchOperations(
memoryDir: string,
input: string,
): ParsedPatchOp[] {
const lines = input.split(/\r?\n/);
const beginIndex = lines.findIndex(
(line) => line.trim() === "*** Begin Patch",
);
if (beginIndex !== 0) {
throw new Error(
'memory_apply_patch: patch must start with "*** Begin Patch"',
);
}
const endIndex = lines.findIndex((line) => line.trim() === "*** End Patch");
if (endIndex === -1) {
throw new Error('memory_apply_patch: patch must end with "*** End Patch"');
}
for (let tail = endIndex + 1; tail < lines.length; tail += 1) {
if ((lines[tail] ?? "").trim().length > 0) {
throw new Error(
"memory_apply_patch: unexpected content after *** End Patch",
);
}
}
const ops: ParsedPatchOp[] = [];
let i = 1;
while (i < endIndex) {
const line = lines[i]?.trim();
if (!line) {
i += 1;
continue;
}
if (line.startsWith("*** Add File:")) {
const rawPath = line.replace("*** Add File:", "").trim();
const label = normalizeMemoryLabel(memoryDir, rawPath, "Add File path");
const targetRelPath = `${label}.md`;
i += 1;
const contentLines: string[] = [];
while (i < endIndex) {
const raw = lines[i];
if (raw === undefined || raw.startsWith("*** ")) {
break;
}
if (!raw.startsWith("+")) {
throw new Error(
`memory_apply_patch: invalid Add File line at ${i + 1}: expected '+' prefix`,
);
}
contentLines.push(raw.slice(1));
i += 1;
}
if (contentLines.length === 0) {
throw new Error(
`memory_apply_patch: Add File for ${rawPath} must include at least one + line`,
);
}
ops.push({
kind: "add",
targetLabel: label,
targetRelPath,
contentLines,
});
continue;
}
if (line.startsWith("*** Update File:")) {
const rawSourcePath = line.replace("*** Update File:", "").trim();
const sourceLabel = normalizeMemoryLabel(
memoryDir,
rawSourcePath,
"Update File path",
);
let targetLabel = sourceLabel;
i += 1;
if (i < endIndex) {
const moveLine = lines[i];
if (moveLine?.startsWith("*** Move to:")) {
const rawTargetPath = moveLine.replace("*** Move to:", "").trim();
targetLabel = normalizeMemoryLabel(
memoryDir,
rawTargetPath,
"Move to path",
);
i += 1;
}
}
const hunks: Hunk[] = [];
while (i < endIndex) {
const hLine = lines[i];
if (hLine === undefined || hLine.startsWith("*** ")) {
break;
}
if (!hLine.startsWith("@@")) {
throw new Error(
`memory_apply_patch: invalid Update File body at ${i + 1}: expected '@@' hunk header`,
);
}
i += 1;
const hunkLines: string[] = [];
while (i < endIndex) {
const l = lines[i];
if (l === undefined || l.startsWith("@@") || l.startsWith("*** ")) {
break;
}
if (l === "*** End of File") {
i += 1;
break;
}
if (
l.startsWith(" ") ||
l.startsWith("+") ||
l.startsWith("-") ||
l === ""
) {
hunkLines.push(l);
} else {
throw new Error(
`memory_apply_patch: invalid hunk line at ${i + 1}: expected one of ' ', '+', '-'`,
);
}
i += 1;
}
hunks.push({ lines: hunkLines });
}
if (hunks.length === 0) {
throw new Error(
`memory_apply_patch: Update File for ${rawSourcePath} has no hunks`,
);
}
ops.push({
kind: "update",
sourceLabel,
sourceRelPath: `${sourceLabel}.md`,
targetLabel,
targetRelPath: `${targetLabel}.md`,
hunks,
});
continue;
}
if (line.startsWith("*** Delete File:")) {
const rawPath = line.replace("*** Delete File:", "").trim();
const label = normalizeMemoryLabel(
memoryDir,
rawPath,
"Delete File path",
);
ops.push({
kind: "delete",
targetLabel: label,
targetRelPath: `${label}.md`,
});
i += 1;
continue;
}
throw new Error(
`memory_apply_patch: unknown patch directive at line ${i + 1}: ${line}`,
);
}
return ops;
}
function normalizeAddedContent(label: string, rawContent: string): string {
try {
const parsed = parseMemoryFile(rawContent);
return renderMemoryFile(parsed.frontmatter, parsed.body);
} catch {
return renderMemoryFile(
{
description: `Memory block ${label}`,
},
rawContent,
);
}
}
function resolveMemoryDir(): string {
const direct = process.env.MEMORY_DIR || process.env.LETTA_MEMORY_DIR;
if (direct && direct.trim().length > 0) {
return resolve(direct);
}
const contextAgentId = (() => {
try {
return getCurrentAgentId().trim();
} catch {
return "";
}
})();
const agentId =
contextAgentId ||
(process.env.AGENT_ID || process.env.LETTA_AGENT_ID || "").trim();
if (agentId && agentId.trim().length > 0) {
return resolve(homedir(), ".letta", "agents", agentId, "memory");
}
throw new Error(
"memory_apply_patch: unable to resolve memory directory. Ensure MEMORY_DIR (or AGENT_ID) is available.",
);
}
function ensureMemoryRepo(memoryDir: string): void {
if (!existsSync(memoryDir)) {
throw new Error(
`memory_apply_patch: memory directory does not exist: ${memoryDir}`,
);
}
if (!existsSync(resolve(memoryDir, ".git"))) {
throw new Error(
`memory_apply_patch: ${memoryDir} is not a git repository. This tool requires a git-backed memory filesystem.`,
);
}
}
function normalizeMemoryLabel(
memoryDir: string,
inputPath: string,
fieldName: string,
): string {
const raw = inputPath.trim();
if (!raw) {
throw new Error(
`memory_apply_patch: '${fieldName}' must be a non-empty string`,
);
}
if (raw.startsWith("~/") || raw.startsWith("$HOME/")) {
throw new Error(
`memory_apply_patch: '${fieldName}' must be a memory-relative file path, not a home-relative filesystem path`,
);
}
const isWindowsAbsolute = /^[a-zA-Z]:[\\/]/.test(raw);
if (isAbsolute(raw) || isWindowsAbsolute) {
const absolutePath = resolve(raw);
const relToMemory = relative(memoryDir, absolutePath);
if (
relToMemory &&
!relToMemory.startsWith("..") &&
!isAbsolute(relToMemory)
) {
return normalizeRelativeMemoryLabel(relToMemory, fieldName);
}
throw new Error(memoryPrefixError(memoryDir));
}
return normalizeRelativeMemoryLabel(raw, fieldName);
}
function normalizeRelativeMemoryLabel(
inputPath: string,
fieldName: string,
): string {
const raw = inputPath.trim();
if (!raw) {
throw new Error(
`memory_apply_patch: '${fieldName}' must be a non-empty string`,
);
}
const normalized = raw.replace(/\\/g, "/");
if (normalized.startsWith("/")) {
throw new Error(
`memory_apply_patch: '${fieldName}' must be a relative path like system/contacts.md`,
);
}
let label = normalized;
label = label.replace(/^memory\//, "");
label = label.replace(/\.md$/, "");
if (!label) {
throw new Error(
`memory_apply_patch: '${fieldName}' resolves to an empty memory label`,
);
}
const segments = label.split("/").filter(Boolean);
if (segments.length === 0) {
throw new Error(
`memory_apply_patch: '${fieldName}' resolves to an empty memory label`,
);
}
for (const segment of segments) {
if (segment === "." || segment === "..") {
throw new Error(
`memory_apply_patch: '${fieldName}' contains invalid path traversal segment`,
);
}
if (segment.includes("\0")) {
throw new Error(
`memory_apply_patch: '${fieldName}' contains invalid null bytes`,
);
}
}
return segments.join("/");
}
function memoryPrefixError(memoryDir: string): string {
return `The memory_apply_patch tool can only be used to modify files in {${memoryDir}} or provided as a relative path`;
}
function resolveMemoryPath(memoryDir: string, path: string): string {
const absolute = resolve(memoryDir, path);
const rel = relative(memoryDir, absolute);
if (rel.startsWith("..") || isAbsolute(rel)) {
throw new Error(
"memory_apply_patch: resolved path escapes memory directory",
);
}
return absolute;
}
function resolveMemoryFilePath(memoryDir: string, label: string): string {
return resolveMemoryPath(memoryDir, `${label}.md`);
}
function toRepoRelative(memoryDir: string, absolutePath: string): string {
const rel = relative(memoryDir, absolutePath);
if (!rel || rel.startsWith("..") || isAbsolute(rel)) {
throw new Error("memory_apply_patch: path is outside memory repository");
}
return rel.replace(/\\/g, "/");
}
async function loadEditableMemoryFile(
filePath: string,
sourcePath: string,
): Promise<ParsedMemoryFile> {
const content = await readFile(filePath, "utf8").catch((error) => {
const message = error instanceof Error ? error.message : String(error);
throw new Error(
`memory_apply_patch: failed to read ${sourcePath}: ${message}`,
);
});
const parsed = parseMemoryFile(content);
if (parsed.frontmatter.read_only === "true") {
throw new Error(
`memory_apply_patch: ${sourcePath} is read_only and cannot be modified`,
);
}
return parsed;
}
function parseMemoryFile(content: string): ParsedMemoryFile {
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n?([\s\S]*)$/);
if (!match) {
throw new Error(
"memory_apply_patch: target file is missing required frontmatter",
);
}
const frontmatterText = match[1] ?? "";
const body = match[2] ?? "";
let description: string | undefined;
let readOnly: string | undefined;
for (const line of frontmatterText.split(/\r?\n/)) {
const idx = line.indexOf(":");
if (idx <= 0) continue;
const key = line.slice(0, idx).trim();
const value = line.slice(idx + 1).trim();
if (key === "description") {
description = value;
} else if (key === "read_only") {
readOnly = value;
}
}
if (!description || !description.trim()) {
throw new Error(
"memory_apply_patch: target file frontmatter is missing 'description'",
);
}
return {
frontmatter: {
description,
...(readOnly !== undefined ? { read_only: readOnly } : {}),
},
body,
};
}
function renderMemoryFile(
frontmatter: { description: string; read_only?: string },
body: string,
): string {
const description = frontmatter.description.trim();
if (!description) {
throw new Error("memory_apply_patch: 'description' must not be empty");
}
const lines = [
"---",
`description: ${sanitizeFrontmatterValue(description)}`,
];
if (frontmatter.read_only !== undefined) {
lines.push(`read_only: ${frontmatter.read_only}`);
}
lines.push("---");
const header = lines.join("\n");
if (!body) {
return `${header}\n`;
}
return `${header}\n${body}`;
}
function sanitizeFrontmatterValue(value: string): string {
return value.replace(/\r?\n/g, " ").trim();
}
async function runGit(
memoryDir: string,
args: string[],
): Promise<{ stdout: string; stderr: string }> {
try {
const result = await execFile("git", args, {
cwd: memoryDir,
maxBuffer: 10 * 1024 * 1024,
env: {
...process.env,
PAGER: "cat",
GIT_PAGER: "cat",
},
});
return {
stdout: result.stdout?.toString() ?? "",
stderr: result.stderr?.toString() ?? "",
};
} catch (error) {
const stderr =
typeof error === "object" && error !== null && "stderr" in error
? String((error as { stderr?: string }).stderr ?? "")
: "";
const stdout =
typeof error === "object" && error !== null && "stdout" in error
? String((error as { stdout?: string }).stdout ?? "")
: "";
const message = error instanceof Error ? error.message : String(error);
throw new Error(
`git ${args.join(" ")} failed: ${stderr || stdout || message}`.trim(),
);
}
}
async function commitAndPush(
memoryDir: string,
pathspecs: string[],
reason: string,
): Promise<{ committed: boolean; sha?: string }> {
await runGit(memoryDir, ["add", "-A", "--", ...pathspecs]);
const status = await runGit(memoryDir, [
"status",
"--porcelain",
"--",
...pathspecs,
]);
if (!status.stdout.trim()) {
return { committed: false };
}
const { agentId, agentName } = await getAgentIdentity();
const authorName = agentName.trim() || agentId;
const authorEmail = `${agentId}@letta.com`;
await runGit(memoryDir, [
"-c",
`user.name=${authorName}`,
"-c",
`user.email=${authorEmail}`,
"commit",
"-m",
reason,
]);
const head = await runGit(memoryDir, ["rev-parse", "HEAD"]);
const sha = head.stdout.trim();
try {
await runGit(memoryDir, ["push"]);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
throw new Error(
`Memory changes were committed (${sha.slice(0, 7)}) but push failed: ${message}`,
);
}
return {
committed: true,
sha,
};
}
async function isMissing(filePath: string): Promise<boolean> {
try {
await access(filePath);
return false;
} catch {
return true;
}
}
function applyHunk(
content: string,
hunkLines: string[],
filePath: string,
): string {
const { oldChunk, newChunk } = buildOldNewChunks(hunkLines);
if (oldChunk.length === 0) {
throw new Error(
`memory_apply_patch: failed to apply hunk to ${filePath}: hunk has no anchor/context`,
);
}
const index = content.indexOf(oldChunk);
if (index !== -1) {
return (
content.slice(0, index) +
newChunk +
content.slice(index + oldChunk.length)
);
}
if (oldChunk.endsWith("\n")) {
const oldWithoutTrailingNewline = oldChunk.slice(0, -1);
const indexWithoutTrailingNewline = content.indexOf(
oldWithoutTrailingNewline,
);
if (indexWithoutTrailingNewline !== -1) {
const replacement = newChunk.endsWith("\n")
? newChunk.slice(0, -1)
: newChunk;
return (
content.slice(0, indexWithoutTrailingNewline) +
replacement +
content.slice(
indexWithoutTrailingNewline + oldWithoutTrailingNewline.length,
)
);
}
}
throw new Error(
`memory_apply_patch: failed to apply hunk to ${filePath}: context not found`,
);
}
function buildOldNewChunks(lines: string[]): {
oldChunk: string;
newChunk: string;
} {
const oldParts: string[] = [];
const newParts: string[] = [];
for (const raw of lines) {
if (raw === "") {
oldParts.push("\n");
newParts.push("\n");
continue;
}
const prefix = raw[0];
const text = raw.slice(1);
if (prefix === " ") {
oldParts.push(`${text}\n`);
newParts.push(`${text}\n`);
} else if (prefix === "-") {
oldParts.push(`${text}\n`);
} else if (prefix === "+") {
newParts.push(`${text}\n`);
}
}
return {
oldChunk: oldParts.join(""),
newChunk: newParts.join(""),
};
}

View File

@@ -35,6 +35,7 @@ const FILE_MODIFYING_TOOLS = new Set([
"ShellCommand",
"shell_command",
"apply_patch",
"memory_apply_patch",
// Gemini toolset
"Replace",
"replace",
@@ -120,7 +121,7 @@ export const OPENAI_DEFAULT_TOOLS: ToolName[] = [
// TODO(codex-parity): add once request_user_input tool exists in raw codex path.
// "request_user_input",
"apply_patch",
"memory",
"memory_apply_patch",
"update_plan",
"view_image",
];
@@ -146,7 +147,7 @@ export const OPENAI_PASCAL_TOOLS: ToolName[] = [
"AskUserQuestion",
"EnterPlanMode",
"ExitPlanMode",
"memory",
"memory_apply_patch",
"Task",
"TaskOutput",
"TaskStop",
@@ -193,6 +194,7 @@ const TOOL_PERMISSIONS: Record<ToolName, { requiresApproval: boolean }> = {
TaskStop: { requiresApproval: true },
LS: { requiresApproval: false },
memory: { requiresApproval: true },
memory_apply_patch: { requiresApproval: true },
MultiEdit: { requiresApproval: true },
Read: { requiresApproval: false },
view_image: { requiresApproval: false },

View File

@@ -0,0 +1,16 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"reason": {
"type": "string",
"description": "Required commit message for this memory patch change. Used as the git commit message."
},
"input": {
"type": "string",
"description": "The entire contents of the apply_patch command, constrained to memory files under MEMORY_DIR."
}
},
"required": ["reason", "input"],
"additionalProperties": false
}

View File

@@ -15,6 +15,7 @@ import ListDirCodexDescription from "./descriptions/ListDirCodex.md";
import ListDirectoryGeminiDescription from "./descriptions/ListDirectoryGemini.md";
import LSDescription from "./descriptions/LS.md";
import MemoryDescription from "./descriptions/Memory.md";
import MemoryApplyPatchDescription from "./descriptions/MemoryApplyPatch.md";
import MultiEditDescription from "./descriptions/MultiEdit.md";
import ReadDescription from "./descriptions/Read.md";
import ReadFileCodexDescription from "./descriptions/ReadFileCodex.md";
@@ -53,6 +54,7 @@ import { list_dir } from "./impl/ListDirCodex";
import { list_directory } from "./impl/ListDirectoryGemini";
import { ls } from "./impl/LS";
import { memory } from "./impl/Memory";
import { memory_apply_patch } from "./impl/MemoryApplyPatch";
import { multi_edit } from "./impl/MultiEdit";
import { read } from "./impl/Read";
import { read_file } from "./impl/ReadFileCodex";
@@ -91,6 +93,7 @@ import ListDirCodexSchema from "./schemas/ListDirCodex.json";
import ListDirectoryGeminiSchema from "./schemas/ListDirectoryGemini.json";
import LSSchema from "./schemas/LS.json";
import MemorySchema from "./schemas/Memory.json";
import MemoryApplyPatchSchema from "./schemas/MemoryApplyPatch.json";
import MultiEditSchema from "./schemas/MultiEdit.json";
import ReadSchema from "./schemas/Read.json";
import ReadFileCodexSchema from "./schemas/ReadFileCodex.json";
@@ -187,6 +190,11 @@ const toolDefinitions = {
description: MemoryDescription.trim(),
impl: memory as unknown as ToolImplementation,
},
memory_apply_patch: {
schema: MemoryApplyPatchSchema,
description: MemoryApplyPatchDescription.trim(),
impl: memory_apply_patch as unknown as ToolImplementation,
},
MultiEdit: {
schema: MultiEditSchema,
description: MultiEditDescription.trim(),

View File

@@ -50,27 +50,23 @@ export function deriveToolsetFromModel(
}
/**
* Ensures the correct memory tool is attached to the agent based on the model.
* - OpenAI/Codex models use memory_apply_patch
* - Claude/Gemini models use memory
* Ensures the server-side memory tool is attached to the agent.
* Client toolsets may use memory_apply_patch, but server-side base memory tool remains memory.
*
* This is a server-side tool swap - client tools are passed via client_tools per-request.
*
* @param agentId - The agent ID to update
* @param modelIdentifier - Model handle to determine which memory tool to use
* @param useMemoryPatch - Optional override: true = use memory_apply_patch, false = use memory
* @param modelIdentifier - Model handle (kept for API compatibility)
* @param useMemoryPatch - Unused compatibility parameter
*/
export async function ensureCorrectMemoryTool(
agentId: string,
modelIdentifier: string,
useMemoryPatch?: boolean,
): Promise<void> {
const resolvedModel = resolveModel(modelIdentifier) ?? modelIdentifier;
void resolveModel(modelIdentifier);
void useMemoryPatch;
const client = await getClient();
const shouldUsePatch =
useMemoryPatch !== undefined
? useMemoryPatch
: isOpenAIModel(resolvedModel);
try {
// Need full agent state for tool_rules, so use retrieve with include
@@ -89,8 +85,8 @@ export async function ensureCorrectMemoryTool(
}
// Determine which memory tool we want
// Only OpenAI (Codex) uses memory_apply_patch; Claude and Gemini use memory
const desiredMemoryTool = shouldUsePatch ? "memory_apply_patch" : "memory";
// OpenAI/Codex models use client-side memory_apply_patch now; keep server memory tool as "memory" for all models
const desiredMemoryTool = "memory";
const otherMemoryTool =
desiredMemoryTool === "memory" ? "memory_apply_patch" : "memory";
@@ -184,9 +180,8 @@ export async function reattachMemoryTool(
agentId: string,
modelIdentifier: string,
): Promise<void> {
const resolvedModel = resolveModel(modelIdentifier) ?? modelIdentifier;
void resolveModel(modelIdentifier);
const client = await getClient();
const shouldUsePatch = isOpenAIModel(resolvedModel);
try {
const agentWithTools = await client.agents.retrieve(agentId, {
@@ -196,7 +191,7 @@ export async function reattachMemoryTool(
const mapByName = new Map(currentTools.map((t) => [t.name, t.id]));
// Determine which memory tool we want
const desiredMemoryTool = shouldUsePatch ? "memory_apply_patch" : "memory";
const desiredMemoryTool = "memory";
// Already has the tool?
if (mapByName.has(desiredMemoryTool)) {
@@ -303,8 +298,7 @@ export async function forceToolsetSwitch(
modelForLoading = "anthropic/claude-sonnet-4";
}
// Ensure base memory tool is correct for the toolset
// Codex uses memory_apply_patch; Claude and Gemini use memory
// Ensure base server memory tool is correct for the toolset
const useMemoryPatch =
toolsetName === "codex" || toolsetName === "codex_snake";
await ensureCorrectMemoryTool(agentId, modelForLoading, useMemoryPatch);
@@ -345,7 +339,7 @@ export async function switchToolsetForModel(
}
}
// Ensure base memory tool is correct for the model
// Ensure base server memory tool is attached
await ensureCorrectMemoryTool(agentId, resolvedModel);
const toolsetName = deriveToolsetFromModel(resolvedModel);