feat: memory filesystem sync (#905)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
Charles Packer
2026-02-10 18:06:05 -08:00
committed by GitHub
parent eaa813ddb9
commit d1a6eeb40a
13 changed files with 1085 additions and 3079 deletions

File diff suppressed because it is too large Load Diff

435
src/agent/memoryGit.ts Normal file
View File

@@ -0,0 +1,435 @@
/**
* Git operations for git-backed agent memory.
*
* When memFS is enabled, the agent's memory is stored in a git repo
* on the server at $LETTA_BASE_URL/v1/git/$AGENT_ID/state.git.
* This module provides the CLI harness helpers: clone on first run,
* pull on startup, and status check for system reminders.
*
* The agent itself handles commit/push via Bash tool calls.
*/
import { execFile as execFileCb } from "node:child_process";
import {
chmodSync,
existsSync,
mkdirSync,
renameSync,
rmSync,
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { join } from "node:path";
import { promisify } from "node:util";
import { debugLog, debugWarn } from "../utils/debug";
import { getClient, getServerUrl } from "./client";
const execFile = promisify(execFileCb);
const GIT_MEMORY_ENABLED_TAG = "git-memory-enabled";
/** Get the agent root directory (~/.letta/agents/{id}/) */
export function getAgentRootDir(agentId: string): string {
return join(homedir(), ".letta", "agents", agentId);
}
/** Get the git repo directory for memory (now ~/.letta/agents/{id}/memory/) */
export function getMemoryRepoDir(agentId: string): string {
return join(getAgentRootDir(agentId), "memory");
}
/** Git remote URL for the agent's state repo */
function getGitRemoteUrl(agentId: string): string {
const baseUrl = getServerUrl();
return `${baseUrl}/v1/git/${agentId}/state.git`;
}
/**
* Get a fresh auth token for git operations.
* Reuses the same token resolution flow as getClient()
* (env var → settings → OAuth refresh).
*/
async function getAuthToken(): Promise<string> {
const client = await getClient();
// The client constructor resolves the token; extract it
// biome-ignore lint/suspicious/noExplicitAny: accessing internal client options
return (client as any)._options?.apiKey ?? "";
}
/**
* Run a git command in the given directory.
* If a token is provided, passes it as an auth header.
*/
async function runGit(
cwd: string,
args: string[],
token?: string,
): Promise<{ stdout: string; stderr: string }> {
const authArgs = token
? [
"-c",
`http.extraHeader=Authorization: Basic ${Buffer.from(`letta:${token}`).toString("base64")}`,
]
: [];
const allArgs = [...authArgs, ...args];
debugLog("memfs-git", `git ${args.join(" ")} (in ${cwd})`);
const result = await execFile("git", allArgs, {
cwd,
maxBuffer: 10 * 1024 * 1024, // 10MB
timeout: 60_000, // 60s
});
return {
stdout: result.stdout?.toString() ?? "",
stderr: result.stderr?.toString() ?? "",
};
}
/**
* Configure a local credential helper in the repo's .git/config
* so plain `git push` / `git pull` work without auth prefixes.
*/
async function configureLocalCredentialHelper(
dir: string,
token: string,
): Promise<void> {
const baseUrl = getServerUrl();
const helper = `!f() { echo "username=letta"; echo "password=${token}"; }; f`;
await runGit(dir, ["config", `credential.${baseUrl}.helper`, helper]);
debugLog("memfs-git", "Configured local credential helper");
}
/**
* Bash pre-commit hook that validates frontmatter in memory .md files.
*
* Rules:
* - Frontmatter is REQUIRED (must start with ---)
* - Must be properly closed with ---
* - Required fields: description (non-empty string), limit (positive integer)
* - read_only is a PROTECTED field: agent cannot add, remove, or change it.
* Files where HEAD has read_only: true cannot be modified at all.
* - Only allowed agent-editable keys: description, limit
* - read_only may exist (from server) but agent must not change it
*/
export const PRE_COMMIT_HOOK_SCRIPT = `#!/usr/bin/env bash
# Validate frontmatter in staged memory .md files
# Installed by Letta Code CLI
AGENT_EDITABLE_KEYS="description limit"
PROTECTED_KEYS="read_only"
ALL_KNOWN_KEYS="description limit read_only"
errors=""
# Helper: extract a frontmatter value from content
get_fm_value() {
local content="$1" key="$2"
local closing_line
closing_line=$(echo "$content" | tail -n +2 | grep -n '^---$' | head -1 | cut -d: -f1)
[ -z "$closing_line" ] && return
echo "$content" | tail -n +2 | head -n $((closing_line - 1)) | grep "^$key:" | cut -d: -f2- | sed 's/^ *//;s/ *$//'
}
for file in $(git diff --cached --name-only --diff-filter=ACM | grep '^memory/.*\\.md$'); do
staged=$(git show ":$file")
# Frontmatter is required
first_line=$(echo "$staged" | head -1)
if [ "$first_line" != "---" ]; then
errors="$errors\\n $file: missing frontmatter (must start with ---)"
continue
fi
# Check frontmatter is properly closed
closing_line=$(echo "$staged" | tail -n +2 | grep -n '^---$' | head -1 | cut -d: -f1)
if [ -z "$closing_line" ]; then
errors="$errors\\n $file: frontmatter opened but never closed (missing closing ---)"
continue
fi
# Check read_only protection against HEAD version
head_content=$(git show "HEAD:$file" 2>/dev/null || true)
if [ -n "$head_content" ]; then
head_ro=$(get_fm_value "$head_content" "read_only")
if [ "$head_ro" = "true" ]; then
errors="$errors\\n $file: file is read_only and cannot be modified"
continue
fi
fi
# Extract frontmatter lines
frontmatter=$(echo "$staged" | tail -n +2 | head -n $((closing_line - 1)))
# Track required fields
has_description=false
has_limit=false
# Validate each line
while IFS= read -r line; do
[ -z "$line" ] && continue
key=$(echo "$line" | cut -d: -f1 | tr -d ' ')
value=$(echo "$line" | cut -d: -f2- | sed 's/^ *//;s/ *$//')
# Check key is known
known=false
for k in $ALL_KNOWN_KEYS; do
if [ "$key" = "$k" ]; then
known=true
break
fi
done
if [ "$known" = "false" ]; then
errors="$errors\\n $file: unknown frontmatter key '$key' (allowed: $ALL_KNOWN_KEYS)"
continue
fi
# Check if agent is trying to modify a protected key
for k in $PROTECTED_KEYS; do
if [ "$key" = "$k" ]; then
# Compare against HEAD — if value changed (or key was added), reject
if [ -n "$head_content" ]; then
head_val=$(get_fm_value "$head_content" "$key")
if [ "$value" != "$head_val" ]; then
errors="$errors\\n $file: '$key' is a protected field and cannot be changed by the agent"
fi
else
# New file with read_only — agent shouldn't set this
errors="$errors\\n $file: '$key' is a protected field and cannot be set by the agent"
fi
fi
done
# Validate value types
case "$key" in
limit)
has_limit=true
if ! echo "$value" | grep -qE '^[0-9]+$' || [ "$value" = "0" ]; then
errors="$errors\\n $file: 'limit' must be a positive integer, got '$value'"
fi
;;
description)
has_description=true
if [ -z "$value" ]; then
errors="$errors\\n $file: 'description' must not be empty"
fi
;;
esac
done <<< "$frontmatter"
# Check required fields
if [ "$has_description" = "false" ]; then
errors="$errors\\n $file: missing required field 'description'"
fi
if [ "$has_limit" = "false" ]; then
errors="$errors\\n $file: missing required field 'limit'"
fi
# Check if protected keys were removed (existed in HEAD but not in staged)
if [ -n "$head_content" ]; then
for k in $PROTECTED_KEYS; do
head_val=$(get_fm_value "$head_content" "$k")
if [ -n "$head_val" ]; then
staged_val=$(get_fm_value "$staged" "$k")
if [ -z "$staged_val" ]; then
errors="$errors\\n $file: '$k' is a protected field and cannot be removed by the agent"
fi
fi
done
fi
done
if [ -n "$errors" ]; then
echo "Frontmatter validation failed:"
echo -e "$errors"
exit 1
fi
`;
/**
* Install the pre-commit hook for frontmatter validation.
*/
function installPreCommitHook(dir: string): void {
const hooksDir = join(dir, ".git", "hooks");
const hookPath = join(hooksDir, "pre-commit");
if (!existsSync(hooksDir)) {
mkdirSync(hooksDir, { recursive: true });
}
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, "utf-8");
chmodSync(hookPath, 0o755);
debugLog("memfs-git", "Installed pre-commit hook");
}
/** Check if the memory directory is a git repo */
export function isGitRepo(agentId: string): boolean {
return existsSync(join(getMemoryRepoDir(agentId), ".git"));
}
/**
* Clone the agent's state repo into the memory directory.
*
* Git root is ~/.letta/agents/{id}/memory/ (not the agent root).
*/
export async function cloneMemoryRepo(agentId: string): Promise<void> {
const token = await getAuthToken();
const url = getGitRemoteUrl(agentId);
const dir = getMemoryRepoDir(agentId);
debugLog("memfs-git", `Cloning ${url}${dir}`);
if (!existsSync(dir)) {
// Fresh clone into new memory directory
mkdirSync(dir, { recursive: true });
await runGit(dir, ["clone", url, "."], token);
} else if (!existsSync(join(dir, ".git"))) {
// Directory exists but isn't a git repo (legacy local layout)
// Clone to temp, move .git/ into existing dir, then checkout files.
const tmpDir = `${dir}-git-clone-tmp`;
try {
if (existsSync(tmpDir)) {
rmSync(tmpDir, { recursive: true, force: true });
}
mkdirSync(tmpDir, { recursive: true });
await runGit(tmpDir, ["clone", url, "."], token);
// Move .git into the existing memory directory
renameSync(join(tmpDir, ".git"), join(dir, ".git"));
// Reset to match remote state
await runGit(dir, ["checkout", "--", "."], token);
debugLog("memfs-git", "Migrated existing memory directory to git repo");
} finally {
if (existsSync(tmpDir)) {
rmSync(tmpDir, { recursive: true, force: true });
}
}
}
// Configure local credential helper so the agent can do plain
// `git push` / `git pull` without auth prefixes.
await configureLocalCredentialHelper(dir, token);
// Install pre-commit hook to validate frontmatter
installPreCommitHook(dir);
}
/**
* Pull latest changes from the server.
* Called on startup to ensure local state is current.
*/
export async function pullMemory(
agentId: string,
): Promise<{ updated: boolean; summary: string }> {
const token = await getAuthToken();
const dir = getMemoryRepoDir(agentId);
// Self-healing: ensure credential helper and pre-commit hook are configured
await configureLocalCredentialHelper(dir, token);
installPreCommitHook(dir);
try {
const { stdout, stderr } = await runGit(dir, ["pull", "--ff-only"]);
const output = stdout + stderr;
const updated = !output.includes("Already up to date");
return {
updated,
summary: updated ? output.trim() : "Already up to date",
};
} catch {
// If ff-only fails (diverged), try rebase
debugWarn("memfs-git", "Fast-forward pull failed, trying rebase");
try {
const { stdout, stderr } = await runGit(dir, ["pull", "--rebase"]);
return { updated: true, summary: (stdout + stderr).trim() };
} catch (rebaseErr) {
const msg =
rebaseErr instanceof Error ? rebaseErr.message : String(rebaseErr);
debugWarn("memfs-git", `Pull failed: ${msg}`);
return { updated: false, summary: `Pull failed: ${msg}` };
}
}
}
export interface MemoryGitStatus {
/** Uncommitted changes in working tree */
dirty: boolean;
/** Local commits not pushed to remote */
aheadOfRemote: boolean;
/** Human-readable summary for system reminder */
summary: string;
}
/**
* Check git status of the memory directory.
* Used to decide whether to inject a sync reminder.
*/
export async function getMemoryGitStatus(
agentId: string,
): Promise<MemoryGitStatus> {
const dir = getMemoryRepoDir(agentId);
// Check for uncommitted changes
const { stdout: statusOut } = await runGit(dir, ["status", "--porcelain"]);
const dirty = statusOut.trim().length > 0;
// Check if local is ahead of remote
let aheadOfRemote = false;
try {
const { stdout: revListOut } = await runGit(dir, [
"rev-list",
"--count",
"@{u}..HEAD",
]);
const aheadCount = parseInt(revListOut.trim(), 10);
aheadOfRemote = aheadCount > 0;
} catch {
// No upstream configured or other error - ignore
}
// Build summary
const parts: string[] = [];
if (dirty) {
const changedFiles = statusOut
.trim()
.split("\n")
.filter((l) => l.trim())
.map((l) => l.trim());
parts.push(`${changedFiles.length} uncommitted change(s)`);
}
if (aheadOfRemote) {
parts.push("local commits not pushed to remote");
}
return {
dirty,
aheadOfRemote,
summary: parts.length > 0 ? parts.join(", ") : "clean",
};
}
/**
* Add the git-memory-enabled tag to an agent.
* This triggers the backend to create the git repo.
*/
export async function addGitMemoryTag(agentId: string): Promise<void> {
const client = await getClient();
try {
const agent = await client.agents.retrieve(agentId);
const tags = agent.tags || [];
if (!tags.includes(GIT_MEMORY_ENABLED_TAG)) {
await client.agents.update(agentId, {
tags: [...tags, GIT_MEMORY_ENABLED_TAG],
});
debugLog("memfs-git", `Added ${GIT_MEMORY_ENABLED_TAG} tag`);
}
} catch (err) {
debugWarn(
"memfs-git",
`Failed to add git-memory tag: ${err instanceof Error ? err.message : String(err)}`,
);
}
}

View File

@@ -1,65 +1,44 @@
## Memory Filesystem (memFS)
Your memory blocks are mirrored as Markdown files on disk at:
`~/.letta/agents/<agent-id>/memory/`
This provides:
- **Persistent storage**: memory edits survive restarts and can be version-controlled
- **Two-way sync**: edits in files sync to memory blocks, and edits in blocks sync to files
- **Visibility**: a `memory_filesystem` block shows the current on-disk tree
## Memory Filesystem
Your memory is stored in a git repository at `~/.letta/agents/<agent-id>/memory/`. This provides full version control, sync with the server, and branching for parallel edits.
### Structure
```
~/.letta/agents/<agent-id>/memory/
── system/ # Attached system blocks (in system prompt)
├── persona/ # Namespaced blocks (e.g. persona/git_safety.md)
── human.md
│ └── ...
├── <label>.md # Detached blocks live at the memory root (NOT in system prompt)
└── .sync-state.json # Internal sync state (do not edit)
~/.letta/agents/<agent-id>/
── memory/ # Git repo root
├── .git/
── system/ # Blocks in your system prompt
├── persona/
├── human/
└── ...
```
### How It Works
1. Each `.md` file in `memory/system/` maps to a memory block label (e.g., `memory/system/persona/soul.md` → label `system/persona/soul`)
2. Files contain raw block content (no frontmatter)
3. Changes pushed to git sync to the API within seconds
4. API changes sync to git automatically
5. The `memory_filesystem` block auto-updates with the current tree view
### Label mapping
- A file path maps to a block label by stripping the `.md` extension and using `/` as the separator.
- Example: `system/persona/git_safety.md` → label `persona/git_safety`
- Example: `human_notes.md` → label `human_notes`
### Sync behavior
- **Startup**: automatic sync when the CLI starts
- **After memory edits**: automatic sync after using memory tools
- **Manual**: `/memfs sync` triggers an interactive sync inside the CLI UI
### Git-like memFS commands (inspect/resolve)
When you need to inspect status, view diffs, or resolve conflicts non-interactively, use the CLI subcommands:
### Syncing
```bash
letta memfs status [--agent <id>]
letta memfs diff [--agent <id>]
letta memfs resolve --resolutions '<JSON>' [--agent <id>]
cd ~/.letta/agents/<agent-id>/memory
# See what changed
git status
# Commit and push your changes
git add system/
git commit -m "<type>: <what changed>" # e.g. "fix: update user prefs", "refactor: reorganize persona blocks"
git push
# Get latest from server
git pull
```
- Requires agent id via `--agent`/`--agent-id` or `LETTA_AGENT_ID`
- Output is **JSON only**
- `letta memfs diff` writes a Markdown diff file and returns its `diffPath` in JSON
The system will remind you when your memory has uncommitted changes. Sync when convenient.
### Conflicts
A **conflict** occurs when **both** the file and the corresponding block were modified since the last sync.
- Non-conflicting changes (only one side changed) are auto-resolved on the next sync
- If conflicts are detected, you will receive a system reminder with conflicting labels
- Use `letta memfs diff` to review both versions, then resolve each label to either:
- `"file"` (keep the on-disk Markdown file)
- `"block"` (keep the in-API block value)
### Read-only blocks
Some blocks are read-only in the API (e.g., certain system-managed blocks). For read-only blocks:
- Sync is **API → file** only
- File edits for those blocks are ignored
### History
```bash
git -C ~/.letta/agents/<agent-id>/memory log --oneline
```

View File

@@ -43,16 +43,8 @@ import { getLettaCodeHeaders } from "../agent/http-headers";
import { ISOLATED_BLOCK_LABELS } from "../agent/memory";
import {
checkMemoryFilesystemStatus,
detachMemoryFilesystemBlock,
ensureMemoryFilesystemBlock,
ensureMemoryFilesystemDirs,
formatMemorySyncSummary,
getMemoryFilesystemRoot,
type MemorySyncConflict,
type MemorySyncResolution,
syncMemoryFilesystem,
updateMemoryFilesystemBlock,
} from "../agent/memoryFilesystem";
import { sendMessageStream } from "../agent/message";
import { getModelInfo, getModelShortName } from "../agent/model";
@@ -125,7 +117,6 @@ import { EventMessage } from "./components/EventMessage";
import { FeedbackDialog } from "./components/FeedbackDialog";
import { HelpDialog } from "./components/HelpDialog";
import { HooksManager } from "./components/HooksManager";
import { InlineQuestionApproval } from "./components/InlineQuestionApproval";
import { Input } from "./components/InputRich";
import { McpConnectFlow } from "./components/McpConnectFlow";
import { McpSelector } from "./components/McpSelector";
@@ -233,7 +224,6 @@ import {
import {
isFileEditTool,
isFileWriteTool,
isMemoryTool,
isPatchTool,
isShellTool,
} from "./helpers/toolNameMapping";
@@ -1137,20 +1127,16 @@ export default function App({
openingOutput: string;
dismissOutput: string;
} | null>(null);
const [memorySyncConflicts, setMemorySyncConflicts] = useState<
MemorySyncConflict[] | null
>(null);
const memorySyncProcessedToolCallsRef = useRef<Set<string>>(new Set());
const memorySyncCommandIdRef = useRef<string | null>(null);
const memorySyncCommandInputRef = useRef<string>("/memfs sync");
const memorySyncInFlightRef = useRef(false);
const memoryFilesystemInitializedRef = useRef(false);
const pendingMemfsConflictsRef = useRef<MemorySyncConflict[] | null>(null);
const memfsDirtyRef = useRef(false);
const memfsWatcherRef = useRef<ReturnType<
typeof import("node:fs").watch
> | null>(null);
const memfsConflictCheckInFlightRef = useRef(false);
const memfsGitCheckInFlightRef = useRef(false);
const pendingGitReminderRef = useRef<{
dirty: boolean;
aheadOfRemote: boolean;
summary: string;
} | null>(null);
const [feedbackPrefill, setFeedbackPrefill] = useState("");
const [searchQuery, setSearchQuery] = useState("");
const [modelSelectorOptions, setModelSelectorOptions] = useState<{
@@ -2606,156 +2592,32 @@ export default function App({
[refreshDerived],
);
const runMemoryFilesystemSync = useCallback(
async (source: "startup" | "auto" | "command", commandId?: string) => {
if (!agentId || agentId === "loading") {
return;
}
if (memorySyncInFlightRef.current) {
// If called from a command while another sync is in flight, update the UI
if (source === "command" && commandId) {
updateMemorySyncCommand(
commandId,
"Sync already in progress — try again in a moment",
false,
);
}
return;
}
memorySyncInFlightRef.current = true;
try {
await ensureMemoryFilesystemBlock(agentId);
const result = await syncMemoryFilesystem(agentId);
if (result.conflicts.length > 0) {
if (source === "command") {
// User explicitly ran /memfs sync — show the interactive overlay
memorySyncCommandIdRef.current = commandId ?? null;
setMemorySyncConflicts(result.conflicts);
setActiveOverlay("memfs-sync");
if (commandId) {
updateMemorySyncCommand(
commandId,
`Memory sync paused — resolve ${result.conflicts.length} conflict${
result.conflicts.length === 1 ? "" : "s"
} to continue.`,
false,
"/memfs sync",
true, // keepRunning - don't commit until conflicts resolved
);
}
} else {
// Auto or startup sync — queue conflicts for agent-driven resolution
debugLog(
"memfs",
`${source} sync found ${result.conflicts.length} conflict(s), queuing for agent`,
);
pendingMemfsConflictsRef.current = result.conflicts;
}
return;
}
await updateMemoryFilesystemBlock(agentId);
if (commandId) {
updateMemorySyncCommand(
commandId,
formatMemorySyncSummary(result),
true,
);
}
} catch (error) {
const errorText = formatErrorDetails(error, agentId);
if (commandId) {
updateMemorySyncCommand(commandId, `Failed: ${errorText}`, false);
} else if (source !== "startup") {
appendError(`Memory sync failed: ${errorText}`);
} else {
console.error(`Memory sync failed: ${errorText}`);
}
} finally {
memorySyncInFlightRef.current = false;
}
},
[agentId, appendError, updateMemorySyncCommand],
);
const maybeSyncMemoryFilesystemAfterTurn = useCallback(async () => {
// Only auto-sync if memfs is enabled for this agent
const maybeCheckMemoryGitStatus = useCallback(async () => {
// Only check if memfs is enabled for this agent
if (!agentId || agentId === "loading") return;
if (!settingsManager.isMemfsEnabled(agentId)) return;
// Check for memory tool calls that need syncing (legacy path — memory tools
// are detached when memfs is enabled, but kept for backwards compatibility)
const newToolCallIds: string[] = [];
for (const line of buffersRef.current.byId.values()) {
if (line.kind !== "tool_call") continue;
if (!line.toolCallId || !line.name) continue;
if (!isMemoryTool(line.name)) continue;
if (memorySyncProcessedToolCallsRef.current.has(line.toolCallId))
continue;
newToolCallIds.push(line.toolCallId);
}
if (newToolCallIds.length > 0) {
for (const id of newToolCallIds) {
memorySyncProcessedToolCallsRef.current.add(id);
}
await runMemoryFilesystemSync("auto");
}
// Agent-driven conflict detection (fire-and-forget, non-blocking).
// Check when: (a) fs.watch detected a file change, or (b) every N turns
// to catch block-only changes (e.g. user manually editing blocks via the API).
const isDirty = memfsDirtyRef.current;
// Git-backed memory: check status periodically (fire-and-forget).
// Runs every N turns to detect uncommitted changes or unpushed commits.
const isIntervalTurn =
turnCountRef.current > 0 &&
turnCountRef.current % MEMFS_CONFLICT_CHECK_INTERVAL === 0;
if ((isDirty || isIntervalTurn) && !memfsConflictCheckInFlightRef.current) {
memfsDirtyRef.current = false;
memfsConflictCheckInFlightRef.current = true;
if (isIntervalTurn && !memfsGitCheckInFlightRef.current) {
memfsGitCheckInFlightRef.current = true;
// Fire-and-forget — don't await, don't block the turn
debugLog(
"memfs",
`Conflict check triggered (dirty=${isDirty}, interval=${isIntervalTurn}, turn=${turnCountRef.current})`,
);
checkMemoryFilesystemStatus(agentId)
.then(async (status) => {
if (status.conflicts.length > 0) {
debugLog(
"memfs",
`Found ${status.conflicts.length} conflict(s): ${status.conflicts.map((c) => c.label).join(", ")}`,
);
pendingMemfsConflictsRef.current = status.conflicts;
} else if (
status.newFiles.length > 0 ||
status.pendingFromFile.length > 0 ||
status.locationMismatches.length > 0
) {
// New files, file changes, or location mismatches detected - auto-sync
debugLog(
"memfs",
`Auto-syncing: ${status.newFiles.length} new, ${status.pendingFromFile.length} changed, ${status.locationMismatches.length} location mismatches`,
);
pendingMemfsConflictsRef.current = null;
await runMemoryFilesystemSync("auto");
} else {
pendingMemfsConflictsRef.current = null;
}
})
.catch((err) => {
debugWarn("memfs", "Conflict check failed", err);
import("../agent/memoryGit")
.then(({ getMemoryGitStatus }) => getMemoryGitStatus(agentId))
.then((status) => {
pendingGitReminderRef.current =
status.dirty || status.aheadOfRemote ? status : null;
})
.catch(() => {})
.finally(() => {
memfsConflictCheckInFlightRef.current = false;
memfsGitCheckInFlightRef.current = false;
});
}
}, [agentId, runMemoryFilesystemSync]);
}, [agentId]);
useEffect(() => {
if (loadingState !== "ready") {
@@ -2773,8 +2635,32 @@ export default function App({
}
memoryFilesystemInitializedRef.current = true;
runMemoryFilesystemSync("startup");
}, [agentId, loadingState, runMemoryFilesystemSync]);
// Git-backed memory: clone or pull on startup
(async () => {
try {
const { isGitRepo, cloneMemoryRepo, pullMemory } = await import(
"../agent/memoryGit"
);
if (!isGitRepo(agentId)) {
await cloneMemoryRepo(agentId);
} else {
await pullMemory(agentId);
}
} catch (err) {
const errMsg = err instanceof Error ? err.message : String(err);
debugWarn("memfs-git", `Startup sync failed: ${errMsg}`);
// Warn user visually
appendError(`Memory git sync failed: ${errMsg}`);
// Inject reminder so the agent also knows memory isn't synced
pendingGitReminderRef.current = {
dirty: false,
aheadOfRemote: false,
summary: `Git memory sync failed on startup: ${errMsg}\nMemory may be stale. Try running: git -C ~/.letta/agents/${agentId}/memory pull`,
};
}
})();
}, [agentId, loadingState, appendError]);
// Set up fs.watch on the memory directory to detect external file edits.
// When a change is detected, set a dirty flag — the actual conflict check
@@ -2793,7 +2679,8 @@ export default function App({
if (!existsSync(memRoot)) return;
watcher = watch(memRoot, { recursive: true }, () => {
memfsDirtyRef.current = true;
// Git-backed memory: no auto-sync on file changes.
// Agent handles commit/push. Status checked on interval.
});
memfsWatcherRef.current = watcher;
debugLog("memfs", `Watching memory directory: ${memRoot}`);
@@ -2824,113 +2711,8 @@ export default function App({
};
}, [agentId]);
const handleMemorySyncConflictSubmit = useCallback(
async (answers: Record<string, string>) => {
if (!agentId || agentId === "loading" || !memorySyncConflicts) {
return;
}
const commandId = memorySyncCommandIdRef.current;
const commandInput = memorySyncCommandInputRef.current;
memorySyncCommandIdRef.current = null;
memorySyncCommandInputRef.current = "/memfs sync";
const resolutions: MemorySyncResolution[] = memorySyncConflicts.map(
(conflict) => {
const answer = answers[`Conflict for ${conflict.label}`];
return {
label: conflict.label,
resolution: answer === "Use file version" ? "file" : "block",
};
},
);
setMemorySyncConflicts(null);
setActiveOverlay(null);
if (memorySyncInFlightRef.current) {
return;
}
memorySyncInFlightRef.current = true;
try {
const result = await syncMemoryFilesystem(agentId, {
resolutions,
});
if (result.conflicts.length > 0) {
setMemorySyncConflicts(result.conflicts);
setActiveOverlay("memfs-sync");
if (commandId) {
updateMemorySyncCommand(
commandId,
`Memory sync paused — resolve ${result.conflicts.length} conflict${
result.conflicts.length === 1 ? "" : "s"
} to continue.`,
false,
commandInput,
true, // keepRunning - don't commit until all conflicts resolved
);
}
return;
}
await updateMemoryFilesystemBlock(agentId);
// Format resolution summary (align with formatMemorySyncSummary which uses "⎿ " prefix)
const resolutionSummary = resolutions
.map(
(r) =>
`${r.label}: used ${r.resolution === "file" ? "file" : "block"} version`,
)
.join("\n");
if (commandId) {
updateMemorySyncCommand(
commandId,
`${formatMemorySyncSummary(result)}\nConflicts resolved:\n${resolutionSummary}`,
true,
commandInput,
);
}
} catch (error) {
const errorText = formatErrorDetails(error, agentId);
if (commandId) {
updateMemorySyncCommand(
commandId,
`Failed: ${errorText}`,
false,
commandInput,
);
} else {
appendError(`Memory sync failed: ${errorText}`);
}
} finally {
memorySyncInFlightRef.current = false;
}
},
[agentId, appendError, memorySyncConflicts, updateMemorySyncCommand],
);
const handleMemorySyncConflictCancel = useCallback(() => {
const commandId = memorySyncCommandIdRef.current;
const commandInput = memorySyncCommandInputRef.current;
memorySyncCommandIdRef.current = null;
memorySyncCommandInputRef.current = "/memfs sync";
memorySyncInFlightRef.current = false;
setMemorySyncConflicts(null);
setActiveOverlay(null);
if (commandId) {
updateMemorySyncCommand(
commandId,
"Memory sync cancelled.",
false,
commandInput,
);
}
}, [updateMemorySyncCommand]);
// Note: Old memFS conflict resolution overlay (handleMemorySyncConflictSubmit/Cancel)
// removed. Git-backed memory uses standard git merge conflict resolution via the agent.
// Core streaming function - iterative loop that processes conversation turns
const processConversation = useCallback(
@@ -3709,7 +3491,7 @@ export default function App({
queueSnapshotRef.current = [];
}
await maybeSyncMemoryFilesystemAfterTurn();
await maybeCheckMemoryGitStatus();
// === RALPH WIGGUM CONTINUATION CHECK ===
// Check if ralph mode is active and should auto-continue
@@ -4702,7 +4484,7 @@ export default function App({
queueApprovalResults,
consumeQueuedMessages,
appendTaskNotificationEvents,
maybeSyncMemoryFilesystemAfterTurn,
maybeCheckMemoryGitStatus,
openTrajectorySegment,
syncTrajectoryTokenBase,
syncTrajectoryElapsedBase,
@@ -7118,33 +6900,20 @@ export default function App({
);
await updateAgentSystemPromptMemfs(agentId, true);
// 4. Run initial sync (creates files from blocks)
await ensureMemoryFilesystemBlock(agentId);
const result = await syncMemoryFilesystem(agentId);
if (result.conflicts.length > 0) {
// Handle conflicts - show overlay (keep running so it stays in liveItems)
memorySyncCommandIdRef.current = cmdId;
memorySyncCommandInputRef.current = msg;
setMemorySyncConflicts(result.conflicts);
setActiveOverlay("memfs-sync");
updateMemorySyncCommand(
cmdId,
`Memory filesystem enabled with ${result.conflicts.length} conflict${result.conflicts.length === 1 ? "" : "s"} to resolve.`,
false,
msg,
true, // keepRunning - don't commit until conflict resolved
);
} else {
await updateMemoryFilesystemBlock(agentId);
const memoryDir = getMemoryFilesystemRoot(agentId);
updateMemorySyncCommand(
cmdId,
`Memory filesystem enabled.\nPath: ${memoryDir}\n${formatMemorySyncSummary(result)}`,
true,
msg,
);
// 4. Add git-memory-enabled tag and clone repo
const { addGitMemoryTag, isGitRepo, cloneMemoryRepo } =
await import("../agent/memoryGit");
await addGitMemoryTag(agentId);
if (!isGitRepo(agentId)) {
await cloneMemoryRepo(agentId);
}
const memoryDir = getMemoryFilesystemRoot(agentId);
updateMemorySyncCommand(
cmdId,
`Memory filesystem enabled (git-backed).\nPath: ${memoryDir}`,
true,
msg,
);
} catch (error) {
const errorText =
error instanceof Error ? error.message : String(error);
@@ -7172,7 +6941,7 @@ export default function App({
updateMemorySyncCommand(
cmdId,
"Syncing memory filesystem...",
"Pulling latest memory from server...",
true,
msg,
true,
@@ -7181,10 +6950,10 @@ export default function App({
setCommandRunning(true);
try {
await runMemoryFilesystemSync("command", cmdId);
const { pullMemory } = await import("../agent/memoryGit");
const result = await pullMemory(agentId);
updateMemorySyncCommand(cmdId, result.summary, true, msg);
} catch (error) {
// runMemoryFilesystemSync has its own error handling, but catch any
// unexpected errors that slip through
const errorText =
error instanceof Error ? error.message : String(error);
updateMemorySyncCommand(cmdId, `Failed: ${errorText}`, false);
@@ -7258,41 +7027,18 @@ export default function App({
setCommandRunning(true);
try {
// 1. Run final sync to ensure blocks are up-to-date
const result = await syncMemoryFilesystem(agentId);
if (result.conflicts.length > 0) {
// Handle conflicts - show overlay (keep running so it stays in liveItems)
memorySyncCommandIdRef.current = cmdId;
memorySyncCommandInputRef.current = msg;
setMemorySyncConflicts(result.conflicts);
setActiveOverlay("memfs-sync");
updateMemorySyncCommand(
cmdId,
`Cannot disable: resolve ${result.conflicts.length} conflict${result.conflicts.length === 1 ? "" : "s"} first.`,
false,
msg,
true, // keepRunning - don't commit until conflict resolved
);
return { submitted: true };
}
// 2. Re-attach memory tool
// 1. Re-attach memory tool
const { reattachMemoryTool } = await import("../tools/toolset");
// Use current model or default to Claude
const modelId = currentModelId || "anthropic/claude-sonnet-4";
await reattachMemoryTool(agentId, modelId);
// 3. Detach memory_filesystem block
await detachMemoryFilesystemBlock(agentId);
// 4. Update system prompt to remove memfs section
// 2. Update system prompt to remove memfs section
const { updateAgentSystemPromptMemfs } = await import(
"../agent/modify"
);
await updateAgentSystemPromptMemfs(agentId, false);
// 5. Update settings
// 3. Update settings
settingsManager.setMemfsEnabled(agentId, false);
updateMemorySyncCommand(
@@ -7766,43 +7512,26 @@ ${SYSTEM_REMINDER_CLOSE}
// Increment turn count for next iteration
turnCountRef.current += 1;
// Build memfs conflict reminder if conflicts were detected after the last turn
let memfsConflictReminder = "";
if (
pendingMemfsConflictsRef.current &&
pendingMemfsConflictsRef.current.length > 0
) {
const conflicts = pendingMemfsConflictsRef.current;
const conflictRows = conflicts
.map((c) => `| ${c.label} | Both file and block modified |`)
.join("\n");
memfsConflictReminder = `${SYSTEM_REMINDER_OPEN}
## Memory Filesystem: Sync Conflicts Detected
// Build git memory sync reminder if uncommitted changes or unpushed commits
let memoryGitReminder = "";
const gitStatus = pendingGitReminderRef.current;
if (gitStatus) {
memoryGitReminder = `${SYSTEM_REMINDER_OPEN}
MEMORY SYNC: Your memory directory has uncommitted changes or is ahead of the remote.
${conflicts.length} memory block${conflicts.length === 1 ? "" : "s"} ha${conflicts.length === 1 ? "s" : "ve"} conflicts (both the file and the in-memory block were modified since last sync):
${gitStatus.summary}
| Block | Status |
|-------|--------|
${conflictRows}
To see the full diff for each conflict, run:
To sync:
\`\`\`bash
letta memfs diff --agent $LETTA_AGENT_ID
cd ~/.letta/agents/${agentId}/memory
git add system/
git commit -m "<type>: <what changed>"
git push
\`\`\`
The diff will be written to a file for review. After reviewing, resolve all conflicts at once:
\`\`\`bash
letta memfs resolve --agent $LETTA_AGENT_ID --resolutions '<JSON array of {label, resolution}>'
\`\`\`
Resolution options: \`"file"\` (overwrite block with file) or \`"block"\` (overwrite file with block).
You MUST resolve all conflicts. They will not be synced automatically until resolved.
For more context, load the \`syncing-memory-filesystem\` skill.
${SYSTEM_REMINDER_CLOSE}
`;
// Clear after injecting so it doesn't repeat on subsequent turns
pendingMemfsConflictsRef.current = null;
// Clear after injecting so it doesn't repeat
pendingGitReminderRef.current = null;
}
// Build permission mode change alert if mode changed since last notification
@@ -7871,7 +7600,7 @@ ${SYSTEM_REMINDER_CLOSE}
pushReminder(bashCommandPrefix);
pushReminder(userPromptSubmitHookFeedback);
pushReminder(memoryReminderContent);
pushReminder(memfsConflictReminder);
pushReminder(memoryGitReminder);
const messageContent =
reminderParts.length > 0
? [...reminderParts, ...contentParts]
@@ -11196,29 +10925,8 @@ Plan file path: ${planFilePath}`;
/>
))}
{/* Memory Sync Conflict Resolver */}
{activeOverlay === "memfs-sync" && memorySyncConflicts && (
<InlineQuestionApproval
questions={memorySyncConflicts.map((conflict) => ({
header: "Memory sync",
question: `Conflict for ${conflict.label}`,
options: [
{
label: "Use file version",
description: "Overwrite memory block with file contents",
},
{
label: "Use block version",
description: "Overwrite file with memory block contents",
},
],
multiSelect: false,
allowOther: false, // Only file or block - no custom option
}))}
onSubmit={handleMemorySyncConflictSubmit}
onCancel={handleMemorySyncConflictCancel}
/>
)}
{/* Memory sync conflict overlay removed - git-backed memory
uses standard git merge conflicts resolved by the agent */}
{/* MCP Server Selector - conditionally mounted as overlay */}
{activeOverlay === "mcp" && (

View File

@@ -3,10 +3,7 @@ import { join, relative } from "node:path";
import { Box, useInput } from "ink";
import Link from "ink-link";
import { useMemo, useState } from "react";
import {
getMemoryFilesystemRoot,
MEMORY_FS_STATE_FILE,
} from "../../agent/memoryFilesystem";
import { getMemoryFilesystemRoot } from "../../agent/memoryFilesystem";
import { useTerminalWidth } from "../hooks/useTerminalWidth";
import { colors } from "./colors";
import { Text } from "./Text";
@@ -51,9 +48,7 @@ function scanMemoryFilesystem(memoryRoot: string): TreeNode[] {
}
// Filter out hidden files and state file
const filtered = entries.filter(
(name) => !name.startsWith(".") && name !== MEMORY_FS_STATE_FILE,
);
const filtered = entries.filter((name) => !name.startsWith("."));
// Sort: directories first, "system" always first among dirs, then alphabetically
const sorted = filtered.sort((a, b) => {

View File

@@ -1,35 +1,14 @@
import { createHash, randomUUID } from "node:crypto";
import {
cpSync,
existsSync,
mkdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from "node:fs";
import { readdir, readFile } from "node:fs/promises";
import { cpSync, existsSync, mkdirSync, rmSync, statSync } from "node:fs";
import { readdir } from "node:fs/promises";
import { homedir } from "node:os";
import { join, normalize, relative } from "node:path";
import { join } from "node:path";
import { parseArgs } from "node:util";
import { getClient } from "../../agent/client";
import { parseMdxFrontmatter } from "../../agent/memory";
import { READ_ONLY_BLOCK_LABELS } from "../../agent/memoryConstants";
import {
ensureMemoryFilesystemDirs,
syncMemoryFilesystem,
} from "../../agent/memoryFilesystem";
const MEMORY_FS_STATE_FILE = ".sync-state.json";
const MEMFS_MANAGED_LABELS = new Set(["memory_filesystem"]);
const READ_ONLY_LABELS = new Set(READ_ONLY_BLOCK_LABELS as readonly string[]);
type SyncState = {
blockHashes: Record<string, string>;
fileHashes: Record<string, string>;
blockIds: Record<string, string>;
lastSync: string | null;
};
getMemoryGitStatus,
getMemoryRepoDir,
isGitRepo,
pullMemory,
} from "../../agent/memoryGit";
function printUsage(): void {
console.log(
@@ -37,23 +16,21 @@ function printUsage(): void {
Usage:
letta memfs status [--agent <id>]
letta memfs diff [--agent <id>]
letta memfs resolve --resolutions '<JSON>' [--agent <id>]
letta memfs backup [--agent <id>]
letta memfs backups [--agent <id>]
letta memfs restore --from <backup> --force [--agent <id>]
letta memfs export --agent <id> --out <dir>
letta memfs pull [--agent <id>]
Notes:
- Requires agent id via --agent or LETTA_AGENT_ID.
- Output is JSON only.
- Memory is git-backed. Use git commands for commit/push.
Examples:
LETTA_AGENT_ID=agent-123 letta memfs status
letta memfs diff --agent agent-123
letta memfs resolve --agent agent-123 --resolutions '[{"label":"human/prefs","resolution":"file"}]'
letta memfs pull --agent agent-123
letta memfs backup --agent agent-123
letta memfs backups --agent agent-123
letta memfs restore --agent agent-123 --from memory-backup-20260131-204903 --force
letta memfs export --agent agent-123 --out /tmp/letta-memfs-agent-123
`.trim(),
);
@@ -63,46 +40,6 @@ function getAgentId(agentFromArgs?: string, agentIdFromArgs?: string): string {
return agentFromArgs || agentIdFromArgs || process.env.LETTA_AGENT_ID || "";
}
function hashContent(content: string): string {
return createHash("sha256").update(content).digest("hex");
}
function hashFileBody(content: string): string {
const { body } = parseMdxFrontmatter(content);
return hashContent(body);
}
function loadSyncState(agentId: string): SyncState {
const root = getMemoryRoot(agentId);
const statePath = join(root, MEMORY_FS_STATE_FILE);
if (!existsSync(statePath)) {
return {
blockHashes: {},
fileHashes: {},
blockIds: {},
lastSync: null,
};
}
try {
const raw = readFileSync(statePath, "utf-8");
const parsed = JSON.parse(raw);
return {
blockHashes: parsed.blockHashes || {},
fileHashes: parsed.fileHashes || {},
blockIds: parsed.blockIds || {},
lastSync: parsed.lastSync || null,
};
} catch {
return {
blockHashes: {},
fileHashes: {},
blockIds: {},
lastSync: null,
};
}
}
function getMemoryRoot(agentId: string): string {
return join(homedir(), ".letta", "agents", agentId, "memory");
}
@@ -153,448 +90,12 @@ async function listBackups(
}
function resolveBackupPath(agentId: string, from: string): string {
if (from.startsWith("/") || /^[A-Za-z]:[\\/]/.test(from)) {
if (from.startsWith("/") || /^[A-Za-z]:[/\\]/.test(from)) {
return from;
}
return join(getAgentRoot(agentId), from);
}
async function scanMdFiles(
dir: string,
baseDir = dir,
excludeDirs: string[] = [],
): Promise<string[]> {
if (!existsSync(dir)) return [];
const entries = await readdir(dir, { withFileTypes: true });
const results: string[] = [];
for (const entry of entries) {
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
if (excludeDirs.includes(entry.name)) continue;
results.push(...(await scanMdFiles(fullPath, baseDir, excludeDirs)));
} else if (entry.isFile() && entry.name.endsWith(".md")) {
results.push(relative(baseDir, fullPath));
}
}
return results;
}
function labelFromPath(relativePath: string): string {
return relativePath.replace(/\\/g, "/").replace(/\.md$/, "");
}
async function readMemoryFiles(
dir: string,
excludeDirs: string[] = [],
): Promise<Map<string, { content: string }>> {
const files = await scanMdFiles(dir, dir, excludeDirs);
const entries = new Map<string, { content: string }>();
for (const rel of files) {
const label = labelFromPath(rel);
const content = await readFile(join(dir, rel), "utf-8");
entries.set(label, { content });
}
return entries;
}
function getOverflowDirectory(): string {
const cwd = process.cwd();
const normalizedPath = normalize(cwd);
const sanitizedPath = normalizedPath
.replace(/^[/\\]/, "")
.replace(/[/\\:]/g, "_")
.replace(/\s+/g, "_");
return join(homedir(), ".letta", "projects", sanitizedPath, "agent-tools");
}
type Conflict = {
label: string;
fileContent: string;
blockContent: string;
};
type MetadataChange = {
label: string;
fileContent: string;
blockContent: string;
};
async function computeStatus(agentId: string) {
const client = await getClient();
const root = getMemoryRoot(agentId);
const systemDir = join(root, "system");
const detachedDir = root;
for (const dir of [root, systemDir]) {
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
}
const systemFiles = await readMemoryFiles(systemDir);
const detachedFiles = await readMemoryFiles(detachedDir, ["system", "user"]);
const blocksResponse = await client.agents.blocks.list(agentId, {
limit: 1000,
});
const attachedBlocks = Array.isArray(blocksResponse)
? blocksResponse
: ((blocksResponse as { items?: unknown[] }).items as Array<{
id?: string;
label?: string;
value?: string;
read_only?: boolean;
}>) || [];
const systemBlockMap = new Map<
string,
{ value: string; id: string; read_only?: boolean }
>();
for (const block of attachedBlocks) {
if (block.label && block.id) {
systemBlockMap.set(block.label, {
value: block.value || "",
id: block.id,
read_only: block.read_only,
});
}
}
const ownedBlocksResponse = await client.blocks.list({
tags: [`owner:${agentId}`],
limit: 1000,
});
const ownedBlocks = Array.isArray(ownedBlocksResponse)
? ownedBlocksResponse
: ((ownedBlocksResponse as { items?: unknown[] }).items as Array<{
id?: string;
label?: string;
value?: string;
read_only?: boolean;
}>) || [];
const attachedIds = new Set(attachedBlocks.map((b) => b.id));
const detachedBlockMap = new Map<
string,
{ value: string; id: string; read_only?: boolean }
>();
for (const block of ownedBlocks) {
if (block.label && block.id && !attachedIds.has(block.id)) {
if (!systemBlockMap.has(block.label)) {
detachedBlockMap.set(block.label, {
value: block.value || "",
id: block.id,
read_only: block.read_only,
});
}
}
}
const lastState = loadSyncState(agentId);
const conflicts: Array<{ label: string }> = [];
const pendingFromFile: string[] = [];
const pendingFromBlock: string[] = [];
const newFiles: string[] = [];
const newBlocks: string[] = [];
const locationMismatches: string[] = [];
const allLabels = new Set<string>([
...systemFiles.keys(),
...detachedFiles.keys(),
...systemBlockMap.keys(),
...detachedBlockMap.keys(),
...Object.keys(lastState.blockHashes),
...Object.keys(lastState.fileHashes),
]);
for (const label of [...allLabels].sort()) {
if (MEMFS_MANAGED_LABELS.has(label)) continue;
const systemFile = systemFiles.get(label);
const detachedFile = detachedFiles.get(label);
const attachedBlock = systemBlockMap.get(label);
const detachedBlock = detachedBlockMap.get(label);
const fileEntry = systemFile || detachedFile;
const fileInSystem = !!systemFile;
const blockEntry = attachedBlock || detachedBlock;
const isAttached = !!attachedBlock;
const effectiveReadOnly =
!!blockEntry?.read_only || READ_ONLY_LABELS.has(label);
if (fileEntry && blockEntry) {
const locationMismatch =
(fileInSystem && !isAttached) || (!fileInSystem && isAttached);
if (locationMismatch) locationMismatches.push(label);
}
const fileHash = fileEntry ? hashContent(fileEntry.content) : null;
const fileBodyHash = fileEntry ? hashFileBody(fileEntry.content) : null;
const blockHash = blockEntry ? hashContent(blockEntry.value) : null;
const lastFileHash = lastState.fileHashes[label] ?? null;
const lastBlockHash = lastState.blockHashes[label] ?? null;
const fileChanged = fileHash !== lastFileHash;
const blockChanged = blockHash !== lastBlockHash;
if (fileEntry && !blockEntry) {
if (READ_ONLY_LABELS.has(label)) continue;
if (lastBlockHash && !fileChanged) continue;
newFiles.push(label);
continue;
}
if (!fileEntry && blockEntry) {
if (effectiveReadOnly) {
pendingFromFile.push(label);
continue;
}
if (lastFileHash && !blockChanged) continue;
newBlocks.push(label);
continue;
}
if (!fileEntry || !blockEntry) continue;
if (effectiveReadOnly) {
if (blockChanged) pendingFromBlock.push(label);
continue;
}
if (fileBodyHash === blockHash) {
if (fileChanged) pendingFromFile.push(label);
continue;
}
if (fileChanged) {
pendingFromFile.push(label);
continue;
}
if (blockChanged) {
pendingFromBlock.push(label);
}
}
const isClean =
conflicts.length === 0 &&
pendingFromFile.length === 0 &&
pendingFromBlock.length === 0 &&
newFiles.length === 0 &&
newBlocks.length === 0 &&
locationMismatches.length === 0;
return {
conflicts,
pendingFromFile,
pendingFromBlock,
newFiles,
newBlocks,
locationMismatches,
isClean,
lastSync: lastState.lastSync,
};
}
async function computeDiff(agentId: string): Promise<{
conflicts: Conflict[];
metadataOnly: MetadataChange[];
}> {
const client = await getClient();
const root = getMemoryRoot(agentId);
const systemDir = join(root, "system");
const detachedDir = root;
for (const dir of [root, systemDir]) {
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
}
const systemFiles = await readMemoryFiles(systemDir);
const detachedFiles = await readMemoryFiles(detachedDir, ["system", "user"]);
const blocksResponse = await client.agents.blocks.list(agentId, {
limit: 1000,
});
const attachedBlocks = Array.isArray(blocksResponse)
? blocksResponse
: ((blocksResponse as { items?: unknown[] }).items as Array<{
id?: string;
label?: string;
value?: string;
read_only?: boolean;
}>) || [];
const systemBlockMap = new Map<
string,
{ value: string; id: string; read_only?: boolean }
>();
for (const block of attachedBlocks) {
if (block.label && block.id) {
systemBlockMap.set(block.label, {
value: block.value || "",
id: block.id,
read_only: block.read_only,
});
}
}
const ownedBlocksResponse = await client.blocks.list({
tags: [`owner:${agentId}`],
limit: 1000,
});
const ownedBlocks = Array.isArray(ownedBlocksResponse)
? ownedBlocksResponse
: ((ownedBlocksResponse as { items?: unknown[] }).items as Array<{
id?: string;
label?: string;
value?: string;
read_only?: boolean;
}>) || [];
const attachedIds = new Set(attachedBlocks.map((b) => b.id));
const detachedBlockMap = new Map<
string,
{ value: string; id: string; read_only?: boolean }
>();
for (const block of ownedBlocks) {
if (block.label && block.id && !attachedIds.has(block.id)) {
if (!systemBlockMap.has(block.label)) {
detachedBlockMap.set(block.label, {
value: block.value || "",
id: block.id,
read_only: block.read_only,
});
}
}
}
const lastState = loadSyncState(agentId);
const conflicts: Conflict[] = [];
const metadataOnly: MetadataChange[] = [];
const allLabels = new Set<string>([
...systemFiles.keys(),
...detachedFiles.keys(),
...systemBlockMap.keys(),
...detachedBlockMap.keys(),
...Object.keys(lastState.blockHashes),
...Object.keys(lastState.fileHashes),
]);
for (const label of [...allLabels].sort()) {
if (MEMFS_MANAGED_LABELS.has(label)) continue;
const systemFile = systemFiles.get(label);
const detachedFile = detachedFiles.get(label);
const attachedBlock = systemBlockMap.get(label);
const detachedBlock = detachedBlockMap.get(label);
const fileEntry = systemFile || detachedFile;
const blockEntry = attachedBlock || detachedBlock;
if (!fileEntry || !blockEntry) continue;
const effectiveReadOnly =
!!blockEntry.read_only || READ_ONLY_LABELS.has(label);
if (effectiveReadOnly) continue;
const fileHash = hashContent(fileEntry.content);
const fileBodyHash = hashFileBody(fileEntry.content);
const blockHash = hashContent(blockEntry.value);
const lastFileHash = lastState.fileHashes[label] ?? null;
const lastBlockHash = lastState.blockHashes[label] ?? null;
const fileChanged = fileHash !== lastFileHash;
const blockChanged = blockHash !== lastBlockHash;
if (fileBodyHash === blockHash) {
if (fileChanged) {
metadataOnly.push({
label,
fileContent: fileEntry.content,
blockContent: blockEntry.value,
});
}
continue;
}
if (fileChanged && blockChanged) {
conflicts.push({
label,
fileContent: fileEntry.content,
blockContent: blockEntry.value,
});
}
}
return { conflicts, metadataOnly };
}
function formatDiffFile(
conflicts: Conflict[],
metadataOnly: MetadataChange[],
agentId: string,
): string {
const lines: string[] = [
`# Memory Filesystem Diff`,
``,
`Agent: ${agentId}`,
`Generated: ${new Date().toISOString()}`,
`Conflicts: ${conflicts.length}`,
`Metadata-only changes: ${metadataOnly.length}`,
``,
`---`,
``,
];
for (const conflict of conflicts) {
lines.push(`## Conflict: ${conflict.label}`);
lines.push(``);
lines.push(`### File Version`);
lines.push(`\`\`\``);
lines.push(conflict.fileContent);
lines.push(`\`\`\``);
lines.push(``);
lines.push(`### Block Version`);
lines.push(`\`\`\``);
lines.push(conflict.blockContent);
lines.push(`\`\`\``);
lines.push(``);
lines.push(`---`);
lines.push(``);
}
if (metadataOnly.length > 0) {
lines.push(`## Metadata-only Changes`);
lines.push(``);
lines.push(
`Frontmatter changed while body content stayed the same (file wins).`,
);
lines.push(``);
for (const change of metadataOnly) {
lines.push(`### ${change.label}`);
lines.push(``);
lines.push(`#### File Version (with frontmatter)`);
lines.push(`\`\`\``);
lines.push(change.fileContent);
lines.push(`\`\`\``);
lines.push(``);
lines.push(`#### Block Version (body only)`);
lines.push(`\`\`\``);
lines.push(change.blockContent);
lines.push(`\`\`\``);
lines.push(``);
lines.push(`---`);
lines.push(``);
}
}
return lines.join("\n");
}
export async function runMemfsSubcommand(argv: string[]): Promise<number> {
let parsed: ReturnType<typeof parseArgs>;
try {
@@ -607,7 +108,6 @@ export async function runMemfsSubcommand(argv: string[]): Promise<number> {
from: { type: "string" },
force: { type: "boolean" },
out: { type: "string" },
resolutions: { type: "string" },
},
strict: true,
allowPositionals: true,
@@ -640,82 +140,49 @@ export async function runMemfsSubcommand(argv: string[]): Promise<number> {
try {
if (action === "status") {
ensureMemoryFilesystemDirs(agentId);
const status = await computeStatus(agentId);
if (!isGitRepo(agentId)) {
console.log(
JSON.stringify({ error: "Not a git repo", gitEnabled: false }),
);
return 1;
}
const status = await getMemoryGitStatus(agentId);
console.log(JSON.stringify(status, null, 2));
return status.isClean ? 0 : 2;
return status.dirty || status.aheadOfRemote ? 2 : 0;
}
if (action === "diff") {
ensureMemoryFilesystemDirs(agentId);
const { conflicts, metadataOnly } = await computeDiff(agentId);
if (conflicts.length === 0 && metadataOnly.length === 0) {
console.log(
JSON.stringify(
{ conflicts: [], metadataOnly: [], diffPath: null, clean: true },
null,
2,
),
);
return 0;
if (!isGitRepo(agentId)) {
console.error("Not a git repo. Enable git-backed memory first.");
return 1;
}
const diffContent = formatDiffFile(conflicts, metadataOnly, agentId);
const overflowDir = getOverflowDirectory();
if (!existsSync(overflowDir)) {
mkdirSync(overflowDir, { recursive: true });
const { execFile: execFileCb } = await import("node:child_process");
const { promisify } = await import("node:util");
const execFile = promisify(execFileCb);
const dir = getMemoryRepoDir(agentId);
const { stdout } = await execFile("git", ["diff"], { cwd: dir });
if (stdout.trim()) {
console.log(stdout);
return 2;
}
const filename = `memfs-diff-${randomUUID()}.md`;
const diffPath = join(overflowDir, filename);
writeFileSync(diffPath, diffContent, "utf-8");
console.log(
JSON.stringify(
{ conflicts, metadataOnly, diffPath, clean: false },
null,
2,
),
);
return conflicts.length > 0 ? 2 : 0;
console.log("No changes.");
return 0;
}
if (action === "resolve") {
ensureMemoryFilesystemDirs(agentId);
const resolutionsRaw = parsed.values.resolutions as string | undefined;
if (!resolutionsRaw) {
console.error("Missing --resolutions JSON.");
if (action === "pull") {
if (!isGitRepo(agentId)) {
console.error("Not a git repo. Enable git-backed memory first.");
return 1;
}
let resolutions: Array<{ label: string; resolution: "file" | "block" }> =
[];
try {
const parsedResolutions = JSON.parse(resolutionsRaw);
if (!Array.isArray(parsedResolutions)) {
throw new Error("resolutions must be an array");
}
resolutions = parsedResolutions;
} catch (error) {
console.error(
`Invalid --resolutions JSON: ${error instanceof Error ? error.message : String(error)}`,
);
return 1;
}
const result = await syncMemoryFilesystem(agentId, {
resolutions,
});
const result = await pullMemory(agentId);
console.log(JSON.stringify(result, null, 2));
return result.conflicts.length > 0 ? 2 : 0;
return 0;
}
if (action === "backup") {
const root = getMemoryRoot(agentId);
if (!existsSync(root)) {
console.error(
`Memory directory not found for agent ${agentId}. Run memfs sync first.`,
);
console.error(`Memory directory not found for agent ${agentId}.`);
return 1;
}
const agentRoot = getAgentRoot(agentId);
@@ -771,9 +238,7 @@ export async function runMemfsSubcommand(argv: string[]): Promise<number> {
}
const root = getMemoryRoot(agentId);
if (!existsSync(root)) {
console.error(
`Memory directory not found for agent ${agentId}. Run memfs sync first.`,
);
console.error(`Memory directory not found for agent ${agentId}.`);
return 1;
}
if (existsSync(out)) {

View File

@@ -18,11 +18,7 @@ import { getClient } from "./agent/client";
import { setAgentContext, setConversationId } from "./agent/context";
import { createAgent } from "./agent/create";
import { ISOLATED_BLOCK_LABELS } from "./agent/memory";
import {
ensureMemoryFilesystemBlock,
syncMemoryFilesystem,
updateMemoryFilesystemBlock,
} from "./agent/memoryFilesystem";
import { sendMessageStream } from "./agent/message";
import { getModelUpdateArgs } from "./agent/model";
import { SessionStats } from "./agent/stats";
@@ -713,22 +709,26 @@ export async function handleHeadlessCommand(
);
}
// Sync filesystem-backed memory before creating conversations (only if memfs is enabled)
// Git-backed memory: clone or pull on startup (only if memfs is enabled)
if (settingsManager.isMemfsEnabled(agent.id)) {
try {
await ensureMemoryFilesystemBlock(agent.id);
const syncResult = await syncMemoryFilesystem(agent.id);
if (syncResult.conflicts.length > 0) {
console.error(
`Memory filesystem sync conflicts detected (${syncResult.conflicts.length}). Run in interactive mode to resolve.`,
);
process.exit(1);
const { isGitRepo, cloneMemoryRepo, pullMemory } = await import(
"./agent/memoryGit"
);
if (!isGitRepo(agent.id)) {
await cloneMemoryRepo(agent.id);
} else {
const result = await pullMemory(agent.id);
if (result.summary.includes("CONFLICT")) {
console.error(
"Memory has merge conflicts. Run in interactive mode to resolve.",
);
process.exit(1);
}
}
await updateMemoryFilesystemBlock(agent.id);
// Note: Sync summary intentionally not logged in headless mode to keep output clean
} catch (error) {
console.error(
`Memory filesystem sync failed: ${error instanceof Error ? error.message : String(error)}`,
`Memory git sync failed: ${error instanceof Error ? error.message : String(error)}`,
);
process.exit(1);
}

View File

@@ -1708,6 +1708,17 @@ async function main(): Promise<void> {
);
}
// Git-backed memory: ensure tag + repo are set up
if (settingsManager.isMemfsEnabled(agent.id)) {
const { addGitMemoryTag, isGitRepo, cloneMemoryRepo } = await import(
"./agent/memoryGit"
);
await addGitMemoryTag(agent.id);
if (!isGitRepo(agent.id)) {
await cloneMemoryRepo(agent.id);
}
}
// Check if we're resuming an existing agent
// We're resuming if:
// 1. We specified an agent ID via --agent flag (agentIdArg)

View File

@@ -9,81 +9,150 @@ Agents with the `git-memory-enabled` tag have their memory blocks stored in git
**Features:**
- Stored in cloud (GCS)
- Accessible via `https://api.letta.com/v1/git/<agent-id>/state.git`
- Bidirectional sync: API Git (webhook-triggered, ~2-3s delay)
- Accessible via `$LETTA_BASE_URL/v1/git/<agent-id>/state.git`
- Bidirectional sync: API <-> Git (webhook-triggered, ~2-3s delay)
- Structure: `memory/system/*.md` for system blocks
## Setup Authentication (One-Time)
## What the CLI Harness Does Automatically
Configure git credential helper to authenticate with Letta API:
When memfs is enabled, the Letta Code CLI automatically:
1. Adds the `git-memory-enabled` tag to the agent (triggers backend to create the git repo)
2. Clones the repo into `~/.letta/agents/<agent-id>/memory/` (git root is the memory directory)
3. Configures a **local** credential helper in `memory/.git/config` (so `git push`/`git pull` work without auth ceremony)
4. Installs a **pre-commit hook** that validates frontmatter before each commit (see below)
5. On subsequent startups: pulls latest changes, reconfigures credentials and hook (self-healing)
6. During sessions: periodically checks `git status` and reminds you (the agent) to commit/push if dirty
If any of these steps fail, you can replicate them manually using the sections below.
## Authentication
The harness configures a per-repo credential helper during clone. To verify or reconfigure:
```bash
export LETTA_API_KEY="your-api-key"
cd ~/.letta/agents/<agent-id>/memory
git config --global credential.https://api.letta.com.helper '!f() {
echo "username=letta";
echo "password=$LETTA_API_KEY";
}; f'
# Check if configured
git config --get credential.$LETTA_BASE_URL.helper
# Reconfigure (e.g. after API key rotation)
git config credential.$LETTA_BASE_URL.helper \
'!f() { echo "username=letta"; echo "password=$LETTA_API_KEY"; }; f'
```
After setup, git operations will automatically use your API key for authentication.
For cloning a *different* agent's repo (e.g. during memory migration), set up a global helper:
```bash
git config --global credential.$LETTA_BASE_URL.helper \
'!f() { echo "username=letta"; echo "password=$LETTA_API_KEY"; }; f'
```
## Pre-Commit Hook (Frontmatter Validation)
The harness installs a git pre-commit hook that validates `.md` files under `memory/` before each commit. This prevents pushes that the server would reject.
**Rules:**
- Every `.md` file must have YAML frontmatter (`---` header and closing `---`)
- Required fields: `description` (non-empty string), `limit` (positive integer)
- `read_only` is a **protected field**: you (the agent) cannot add, remove, or change it. Files with `read_only: true` cannot be modified at all. Only the server/user sets this field.
- Unknown frontmatter keys are rejected
**Valid file format:**
```markdown
---
description: What this block contains
limit: 20000
---
Block content goes here.
```
If the hook rejects a commit, read the error message — it tells you exactly which file and which rule was violated. Fix the file and retry.
## Clone Agent Memory
```bash
# Clone agent's memory repo
git clone "https://api.letta.com/v1/git/<agent-id>/state.git" ~/my-agent-memory
git clone "$LETTA_BASE_URL/v1/git/<agent-id>/state.git" ~/my-agent-memory
# View memory blocks
ls ~/my-agent-memory/memory/system/
cat ~/my-agent-memory/memory/system/human.md
```
## Enabling Git Memory (Manual)
If the harness `/memfs enable` failed, you can replicate it:
```bash
AGENT_ID="<your-agent-id>"
AGENT_DIR=~/.letta/agents/$AGENT_ID
MEMORY_REPO_DIR="$AGENT_DIR/memory"
# 1. Add git-memory-enabled tag (IMPORTANT: preserve existing tags!)
# First GET the agent to read current tags, then PATCH with the new tag appended.
# The harness code does: tags = [...existingTags, "git-memory-enabled"]
curl -X PATCH "$LETTA_BASE_URL/v1/agents/$AGENT_ID" \
-H "Authorization: Bearer $LETTA_API_KEY" \
-H "Content-Type: application/json" \
-d '{"tags": ["origin:letta-code", "git-memory-enabled"]}'
# 2. Clone the repo into memory/
mkdir -p "$MEMORY_REPO_DIR"
git clone "$LETTA_BASE_URL/v1/git/$AGENT_ID/state.git" "$MEMORY_REPO_DIR"
# 3. Configure local credential helper
cd "$MEMORY_REPO_DIR"
git config credential.$LETTA_BASE_URL.helper \
'!f() { echo "username=letta"; echo "password=$LETTA_API_KEY"; }; f'
```
## Bidirectional Sync
### API Edit Git Pull
### API Edit -> Git Pull
```bash
# 1. Edit block via API (or use memory tools)
# 2. Pull to get changes (webhook creates commit automatically)
cd ~/my-agent-memory
git pull --ff-only
cd ~/.letta/agents/<agent-id>/memory
git pull
```
Changes made via the API are automatically committed to git within 2-3 seconds.
### Git Push API Update
### Git Push -> API Update
```bash
cd ~/.letta/agents/<agent-id>/memory
# 1. Edit files locally
echo "Updated info" > memory/system/human.md
echo "Updated info" > system/human.md
# 2. Commit and push
git add memory/system/human.md
git commit -m "update human block"
git add system/human.md
git commit -m "fix: update human block"
git push
# 3. API automatically reflects changes (webhook-triggered, ~2-3s delay)
```
Changes pushed to git are automatically synced to the API within 2-3 seconds.
## Conflict Resolution
When both API and git have diverged:
```bash
cd ~/my-agent-memory
cd ~/.letta/agents/<agent-id>/memory
# 1. Try to push (will be rejected)
git push # "fetch first"
git push # -> "fetch first"
# 2. Pull to create merge conflict
git pull --no-rebase
# CONFLICT in memory/system/human.md
# -> CONFLICT in system/human.md
# 3. View conflict markers
cat memory/system/human.md
cat system/human.md
# <<<<<<< HEAD
# your local changes
# =======
@@ -91,13 +160,13 @@ cat memory/system/human.md
# >>>>>>> <commit>
# 4. Resolve
echo "final resolved content" > memory/system/human.md
git add memory/system/human.md
git commit -m "resolved conflict"
echo "final resolved content" > system/human.md
git add system/human.md
git commit -m "fix: resolved conflict in human block"
# 5. Push resolution
git push
# API automatically updates with resolved content
# -> API automatically updates with resolved content
```
## Block Management
@@ -106,36 +175,37 @@ git push
```bash
# Create file in system/ directory (automatically attached to agent)
echo "My new block content" > memory/system/new-block.md
git add memory/system/new-block.md
git commit -m "add new block"
echo "My new block content" > system/new-block.md
git add system/new-block.md
git commit -m "feat: add new block"
git push
# Block automatically created and attached to agent
# -> Block automatically created and attached to agent
```
### Delete/Detach Block
```bash
# Remove file from system/ directory
git rm memory/system/persona.md
git commit -m "remove persona block"
git rm system/persona.md
git commit -m "chore: remove persona block"
git push
# Block automatically detached from agent
# -> Block automatically detached from agent
```
## Directory Structure
```
repo/
~/.letta/agents/<agent-id>/
├── .letta/
│ └── config.json # Repo metadata
└── memory/
── system/ # System blocks (attached to agent)
│ └── config.json # Agent metadata
└── memory/ # Git repo root
── .git/ # Git repo data
└── system/ # System blocks (attached to agent)
├── human.md
└── persona.md
```
**System blocks** (`memory/system/`) are attached to the agent and appear in the agent's memory.
**System blocks** (`memory/system/`) are attached to the agent and appear in the agent's system prompt.
## Requirements
@@ -146,15 +216,19 @@ repo/
## Troubleshooting
**Clone fails with "Authentication failed":**
- Verify credential helper is set: `git config --global --get credential.https://api.letta.com.helper`
- Verify API key is exported: `echo $LETTA_API_KEY`
- Reconfigure: Run setup command again with your API key
- Check credential helper: `git config --get credential.$LETTA_BASE_URL.helper`
- Reconfigure: see Authentication section above
- Verify the endpoint is reachable: `curl -u letta:$LETTA_API_KEY $LETTA_BASE_URL/v1/git/<agent-id>/state.git/info/refs?service=git-upload-pack`
**Push/pull doesn't update API:**
- Wait 2-3 seconds for webhook processing
- Verify agent has `git-memory-enabled` tag
- Check if you have write access to the agent
**Harness setup failed (no .git/ after /memfs enable):**
- Check debug logs (`LETTA_DEBUG=1`)
- Follow "Enabling Git Memory (Manual)" steps above
**Can't see changes immediately:**
- Bidirectional sync has a 2-3 second delay for webhook processing
- Use `git pull` to get latest API changes

View File

@@ -1,655 +1,25 @@
/**
* Integration tests for memory filesystem sync behavior.
* These tests hit the real Letta API and require LETTA_API_KEY to be set.
*
* Tests cover:
* - Bug 1: File move from system/ to root/ (should detach, not duplicate)
* - Bug 2: File deletion (should remove owner tag, not resurrect)
* - FS wins all policy (when both changed, file wins)
* - Location mismatch auto-sync
*
* Run with: bun test src/tests/agent/memoryFilesystem.sync.integration.test.ts
* NOTE: The old hash-based sync tests (syncMemoryFilesystem,
* checkMemoryFilesystemStatus) have been removed. Memory is now
* git-backed. New integration tests for the git model should be
* added when needed.
*/
import {
afterAll,
afterEach,
beforeAll,
beforeEach,
describe,
expect,
test,
} from "bun:test";
import {
existsSync,
mkdirSync,
readFileSync,
rmSync,
writeFileSync,
} from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import Letta from "@letta-ai/letta-client";
import { describe, expect, test } from "bun:test";
import {
checkMemoryFilesystemStatus,
ensureMemoryFilesystemDirs,
getMemoryDetachedDir,
getMemorySystemDir,
syncMemoryFilesystem,
} from "../../agent/memoryFilesystem";
import { settingsManager } from "../../settings-manager";
// Skip all tests if no API key is available
const LETTA_API_KEY = process.env.LETTA_API_KEY;
const LETTA_BASE_URL = process.env.LETTA_BASE_URL || "https://api.letta.com";
const API_KEY = LETTA_API_KEY ?? "";
const describeIntegration = LETTA_API_KEY ? describe : describe.skip;
describeIntegration("memfs sync integration", () => {
let client: Letta;
let testAgentId: string;
let tempHomeDir: string;
let originalHome: string | undefined;
const createdBlockIds: string[] = [];
beforeAll(async () => {
client = new Letta({
baseURL: LETTA_BASE_URL,
apiKey: API_KEY,
});
// Create a test agent
const agent = await client.agents.create({
name: `memfs-sync-test-${Date.now()}`,
model: "letta/letta-free",
embedding: "letta/letta-free",
});
testAgentId = agent.id;
describe("memfs git integration", () => {
test.skip("clone memory repo on first run", () => {
expect(true).toBe(true);
});
afterAll(async () => {
// Clean up: delete created blocks
for (const blockId of createdBlockIds) {
try {
await client.blocks.delete(blockId);
} catch {
// Ignore errors during cleanup
}
}
// Delete test agent
if (testAgentId) {
try {
await client.agents.delete(testAgentId);
} catch {
// Ignore errors during cleanup
}
}
test.skip("pull memory on startup", () => {
expect(true).toBe(true);
});
beforeEach(async () => {
// Reset settings manager before changing HOME
await settingsManager.reset();
// Create temp directory and override HOME
tempHomeDir = join(tmpdir(), `memfs-sync-test-${Date.now()}`);
mkdirSync(tempHomeDir, { recursive: true });
originalHome = process.env.HOME;
process.env.HOME = tempHomeDir;
// Create settings with API base URL
// API key is read from process.env.LETTA_API_KEY by getClient()
const settingsDir = join(tempHomeDir, ".letta");
mkdirSync(settingsDir, { recursive: true });
writeFileSync(
join(settingsDir, "settings.json"),
JSON.stringify({
env: {
LETTA_BASE_URL: LETTA_BASE_URL,
},
}),
);
// Initialize settings manager with new HOME
await settingsManager.initialize();
// Set up memfs directories
ensureMemoryFilesystemDirs(testAgentId, tempHomeDir);
test.skip("git status detects uncommitted changes", () => {
expect(true).toBe(true);
});
afterEach(async () => {
// Reset settings manager
await settingsManager.reset();
// Restore HOME
process.env.HOME = originalHome;
// Clean up temp directory
if (tempHomeDir && existsSync(tempHomeDir)) {
rmSync(tempHomeDir, { recursive: true, force: true });
}
});
function getSystemDir(): string {
return getMemorySystemDir(testAgentId, tempHomeDir);
}
function getDetachedDir(): string {
return getMemoryDetachedDir(testAgentId, tempHomeDir);
}
function writeSystemFile(label: string, content: string): void {
const systemDir = getSystemDir();
const filePath = join(systemDir, `${label}.md`);
const dir = join(systemDir, ...label.split("/").slice(0, -1));
if (label.includes("/")) {
mkdirSync(dir, { recursive: true });
}
writeFileSync(filePath, content);
}
function writeDetachedFile(label: string, content: string): void {
const detachedDir = getDetachedDir();
const filePath = join(detachedDir, `${label}.md`);
const dir = join(detachedDir, ...label.split("/").slice(0, -1));
if (label.includes("/")) {
mkdirSync(dir, { recursive: true });
}
writeFileSync(filePath, content);
}
function deleteFile(dir: string, label: string): void {
const filePath = join(dir, `${label}.md`);
if (existsSync(filePath)) {
rmSync(filePath);
}
}
function readFile(dir: string, label: string): string | null {
const filePath = join(dir, `${label}.md`);
if (existsSync(filePath)) {
return readFileSync(filePath, "utf-8");
}
return null;
}
async function getAttachedBlocks(): Promise<
Array<{ id: string; label?: string; value?: string }>
> {
const blocks = await client.agents.blocks.list(testAgentId);
return Array.isArray(blocks)
? blocks
: (
blocks as {
items?: Array<{ id: string; label?: string; value?: string }>;
}
).items || [];
}
async function getOwnedBlocks(): Promise<
Array<{ id: string; label?: string; value?: string; tags?: string[] }>
> {
const ownerTag = `owner:${testAgentId}`;
const blocks = await client.blocks.list({ tags: [ownerTag] });
return Array.isArray(blocks)
? blocks
: (
blocks as {
items?: Array<{
id: string;
label?: string;
value?: string;
tags?: string[];
}>;
}
).items || [];
}
test("new file in system/ creates attached block", async () => {
const label = `test-new-file-${Date.now()}`;
const content = "New file content";
// Create file in system/
writeSystemFile(label, content);
// Sync
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// Verify block was created
expect(result.createdBlocks).toContain(label);
// Verify block is attached
const attachedBlocks = await getAttachedBlocks();
const block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
expect(block?.value).toBe(content);
// Track for cleanup
if (block?.id) {
createdBlockIds.push(block.id);
}
});
test("new file at root creates detached block (not attached)", async () => {
const label = `test-detached-${Date.now()}`;
const content = "Detached file content";
// Create file at root (detached)
writeDetachedFile(label, content);
// Sync
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// Verify block was created
expect(result.createdBlocks).toContain(label);
// Verify block is NOT attached
const attachedBlocks = await getAttachedBlocks();
const attachedBlock = attachedBlocks.find((b) => b.label === label);
expect(attachedBlock).toBeUndefined();
// Verify block exists via owner tag (detached)
const ownedBlocks = await getOwnedBlocks();
const ownedBlock = ownedBlocks.find((b) => b.label === label);
expect(ownedBlock).toBeDefined();
expect(ownedBlock?.value).toBe(content);
// Track for cleanup
if (ownedBlock?.id) {
createdBlockIds.push(ownedBlock.id);
}
});
test("file move from system/ to root/ detaches block (no duplication)", async () => {
const label = `test-move-${Date.now()}`;
const content = "Content that will be moved";
// Create file in system/
writeSystemFile(label, content);
// First sync - creates attached block
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify block is attached
let attachedBlocks = await getAttachedBlocks();
let block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
if (block?.id) {
createdBlockIds.push(block.id);
}
// Move file: delete from system/, create at root
deleteFile(getSystemDir(), label);
writeDetachedFile(label, content);
// Second sync - should detach (location mismatch with same content)
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify block is no longer attached
attachedBlocks = await getAttachedBlocks();
block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeUndefined();
// Verify only ONE block exists with this label (no duplication)
const ownedBlocks = await getOwnedBlocks();
const matchingBlocks = ownedBlocks.filter((b) => b.label === label);
expect(matchingBlocks.length).toBe(1);
// Verify the block still exists (just detached)
expect(matchingBlocks[0]?.value).toBe(content);
});
test("file deletion removes owner tag (no resurrection)", async () => {
const label = `test-delete-${Date.now()}`;
const content = "Content that will be deleted";
// Create file at root (detached)
writeDetachedFile(label, content);
// First sync - creates detached block with owner tag
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify block exists via owner tag
let ownedBlocks = await getOwnedBlocks();
let block = ownedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
const blockId = block?.id;
if (blockId) {
createdBlockIds.push(blockId);
}
// Delete the file
deleteFile(getDetachedDir(), label);
// Second sync - should remove owner tag
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
expect(result.deletedBlocks).toContain(label);
// Verify block no longer has owner tag (not discoverable)
ownedBlocks = await getOwnedBlocks();
block = ownedBlocks.find((b) => b.label === label);
expect(block).toBeUndefined();
// Third sync - file should NOT resurrect
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
const fileContent = readFile(getDetachedDir(), label);
expect(fileContent).toBeNull();
});
test("FS wins all: when both file and block changed, file wins", async () => {
const label = `test-fs-wins-${Date.now()}`;
const originalContent = "Original content";
const fileContent = "File changed content";
const blockContent = "Block changed content";
// Create file in system/
writeSystemFile(label, originalContent);
// First sync - creates block
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
let attachedBlocks = await getAttachedBlocks();
let block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
if (!block?.id) {
throw new Error("Expected block to exist after first sync.");
}
const blockId = block.id;
createdBlockIds.push(blockId);
// Change both file AND block
writeSystemFile(label, fileContent);
await client.blocks.update(blockId, { value: blockContent });
// Second sync - file should win (no conflict)
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// Verify no conflicts
expect(result.conflicts.length).toBe(0);
expect(result.updatedBlocks).toContain(label);
// Verify block has FILE content (not block content)
attachedBlocks = await getAttachedBlocks();
block = attachedBlocks.find((b) => b.label === label);
expect(block?.value).toBe(fileContent);
});
test("location mismatch auto-sync: content matches but location differs", async () => {
const label = `test-location-${Date.now()}`;
const content = "Same content";
// Create file in system/
writeSystemFile(label, content);
// First sync - creates attached block
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
let attachedBlocks = await getAttachedBlocks();
let block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
const blockId = block?.id;
if (blockId) {
createdBlockIds.push(blockId);
}
// Move file to root (content unchanged)
deleteFile(getSystemDir(), label);
writeDetachedFile(label, content);
// Second sync - should detach block (location mismatch with same content)
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify block is no longer attached
attachedBlocks = await getAttachedBlocks();
block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeUndefined();
// Verify block still exists (detached)
const ownedBlocks = await getOwnedBlocks();
const detachedBlock = ownedBlocks.find((b) => b.label === label);
expect(detachedBlock).toBeDefined();
});
test("location mismatch with content diff: sync both in one pass", async () => {
const label = `test-location-content-${Date.now()}`;
const originalContent = "Original content";
const newContent = "New content at root";
// Create file in system/
writeSystemFile(label, originalContent);
// First sync - creates attached block
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
let attachedBlocks = await getAttachedBlocks();
let block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeDefined();
const blockId = block?.id;
if (blockId) {
createdBlockIds.push(blockId);
}
// Move file to root AND change content
deleteFile(getSystemDir(), label);
writeDetachedFile(label, newContent);
// Second sync - should update content AND detach in one pass
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// Verify block content was updated
expect(result.updatedBlocks).toContain(label);
// Verify block is detached
attachedBlocks = await getAttachedBlocks();
block = attachedBlocks.find((b) => b.label === label);
expect(block).toBeUndefined();
// Verify detached block has new content
const ownedBlocks = await getOwnedBlocks();
const detachedBlock = ownedBlocks.find((b) => b.label === label);
expect(detachedBlock).toBeDefined();
expect(detachedBlock?.value).toBe(newContent);
});
test("checkMemoryFilesystemStatus reports location mismatches", async () => {
const label = `test-status-${Date.now()}`;
const content = "Status test content";
// Create file in system/
writeSystemFile(label, content);
// First sync - creates attached block
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
const attachedBlocks = await getAttachedBlocks();
const block = attachedBlocks.find((b) => b.label === label);
if (block?.id) {
createdBlockIds.push(block.id);
}
// Move file to root (content unchanged)
deleteFile(getSystemDir(), label);
writeDetachedFile(label, content);
// Check status - should report location mismatch
const status = await checkMemoryFilesystemStatus(testAgentId, {
homeDir: tempHomeDir,
});
expect(status.locationMismatches).toContain(label);
expect(status.isClean).toBe(false);
});
// =========================================================================
// Read-only block tests
// =========================================================================
test("read_only block: file edit is overwritten by API content", async () => {
const label = `test-readonly-${Date.now()}`;
const originalContent = "Original read-only content";
const editedContent = "User tried to edit this";
// Create a read_only block via API
const block = await client.blocks.create({
label,
value: originalContent,
description: "Test read-only block",
read_only: true,
tags: [`owner:${testAgentId}`],
});
createdBlockIds.push(block.id);
// Attach to agent
await client.agents.blocks.attach(block.id, { agent_id: testAgentId });
// First sync - creates file
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify file was created
const filePath = join(getSystemDir(), `${label}.md`);
expect(existsSync(filePath)).toBe(true);
// Edit the file locally
writeFileSync(filePath, editedContent);
// Second sync - should overwrite with API content
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// File should be in updatedFiles (overwritten)
expect(result.updatedFiles).toContain(label);
// Verify file content is back to original (API wins)
const fileContent = readFileSync(filePath, "utf-8");
expect(fileContent).toContain(originalContent);
// Verify block was NOT updated (still has original content)
const updatedBlock = await client.blocks.retrieve(block.id);
expect(updatedBlock.value).toBe(originalContent);
});
test("read_only block: deleted file is recreated", async () => {
const label = `test-readonly-delete-${Date.now()}`;
const content = "Content that should persist";
// Create a read_only block via API
const block = await client.blocks.create({
label,
value: content,
description: "Test read-only block for deletion",
read_only: true,
tags: [`owner:${testAgentId}`],
});
createdBlockIds.push(block.id);
// Attach to agent
await client.agents.blocks.attach(block.id, { agent_id: testAgentId });
// First sync - creates file
await syncMemoryFilesystem(testAgentId, { homeDir: tempHomeDir });
// Verify file was created
const filePath = join(getSystemDir(), `${label}.md`);
expect(existsSync(filePath)).toBe(true);
// Delete the file locally
rmSync(filePath);
expect(existsSync(filePath)).toBe(false);
// Second sync - should recreate file (not remove owner tag)
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// File should be recreated
expect(result.createdFiles).toContain(label);
expect(existsSync(filePath)).toBe(true);
// Verify block still has owner tag and is attached
const attachedBlocks = await client.agents.blocks.list(testAgentId);
const attachedArray = Array.isArray(attachedBlocks)
? attachedBlocks
: (attachedBlocks as { items?: Array<{ id: string }> }).items || [];
expect(attachedArray.some((b) => b.id === block.id)).toBe(true);
});
test("read_only label: file-only (no block) is deleted", async () => {
// This tests the case where someone creates a file for a read_only label
// but no corresponding block exists - the file should be deleted
const label = "skills";
// Helper to ensure no block exists for this label
async function ensureNoBlock(labelToDelete: string) {
// Remove attached blocks with this label
const attachedBlocks = await getAttachedBlocks();
for (const b of attachedBlocks.filter((x) => x.label === labelToDelete)) {
if (b.id) {
try {
await client.agents.blocks.detach(b.id, { agent_id: testAgentId });
await client.blocks.delete(b.id);
} catch {
// Ignore errors (block may not be deletable)
}
}
}
// Remove detached owned blocks with this label
const ownedBlocks = await getOwnedBlocks();
for (const b of ownedBlocks.filter((x) => x.label === labelToDelete)) {
if (b.id) {
try {
await client.blocks.delete(b.id);
} catch {
// Ignore errors
}
}
}
}
// Ensure API has no block for this label
await ensureNoBlock(label);
// Verify no block exists
const attachedBefore = await getAttachedBlocks();
const ownedBefore = await getOwnedBlocks();
const blockExists =
attachedBefore.some((b) => b.label === label) ||
ownedBefore.some((b) => b.label === label);
// For fresh test agents, there should be no skills block
// If one exists and can't be deleted, we can't run this test
expect(blockExists).toBe(false);
if (blockExists) {
// This assertion above will fail, but just in case:
return;
}
// Create local file in system/
writeSystemFile(label, "local skills content that should be deleted");
// Verify file was created
const filePath = join(getSystemDir(), `${label}.md`);
expect(existsSync(filePath)).toBe(true);
// Sync - should delete the file (API is authoritative for read_only labels)
const result = await syncMemoryFilesystem(testAgentId, {
homeDir: tempHomeDir,
});
// File should be deleted
expect(existsSync(filePath)).toBe(false);
expect(result.deletedFiles).toContain(label);
test.skip("git status detects local ahead of remote", () => {
expect(true).toBe(true);
});
});

View File

@@ -11,7 +11,6 @@ import {
getMemoryFilesystemRoot,
getMemorySystemDir,
labelFromRelativePath,
parseBlockFromFileContent,
renderMemoryFilesystemTree,
} from "../../agent/memoryFilesystem";
@@ -91,111 +90,8 @@ function createMockClient(options: {
};
}
describe("parseBlockFromFileContent", () => {
test("parses frontmatter with label, description, and limit", () => {
const content = `---
label: persona/soul
description: Who I am and what I value
limit: 30000
---
My persona content here.`;
const result = parseBlockFromFileContent(content, "default-label");
expect(result.label).toBe("persona/soul");
expect(result.description).toBe("Who I am and what I value");
expect(result.limit).toBe(30000);
expect(result.value).toBe("My persona content here.");
});
test("uses default label when frontmatter label is missing", () => {
const content = `---
description: Some description
---
Content here.`;
const result = parseBlockFromFileContent(content, "my-default-label");
expect(result.label).toBe("my-default-label");
expect(result.description).toBe("Some description");
});
test("generates description from label when frontmatter description is missing", () => {
const content = `---
label: test/block
---
Content here.`;
const result = parseBlockFromFileContent(content, "default");
expect(result.label).toBe("test/block");
expect(result.description).toBe("Memory block: test/block");
});
test("uses default limit when frontmatter limit is missing or invalid", () => {
const content = `---
label: test
limit: invalid
---
Content.`;
const result = parseBlockFromFileContent(content, "default");
expect(result.limit).toBe(20000);
});
test("handles content without frontmatter", () => {
const content = "Just plain content without frontmatter.";
const result = parseBlockFromFileContent(content, "fallback-label");
expect(result.label).toBe("fallback-label");
expect(result.description).toBe("Memory block: fallback-label");
expect(result.limit).toBe(20000);
expect(result.value).toBe("Just plain content without frontmatter.");
});
test("sets read_only from frontmatter", () => {
const content = `---
label: test/block
read_only: true
---
Read-only content.`;
const result = parseBlockFromFileContent(content, "default");
expect(result.read_only).toBe(true);
});
test("sets read_only for known read-only labels", () => {
const content = `---
label: memory_filesystem
---
Filesystem content.`;
const result = parseBlockFromFileContent(content, "memory_filesystem");
expect(result.read_only).toBe(true);
});
test("does not set read_only for regular blocks", () => {
const content = `---
label: persona/soul
---
Regular content.`;
const result = parseBlockFromFileContent(content, "persona/soul");
expect(result.read_only).toBeUndefined();
});
});
// parseBlockFromFileContent tests removed - YAML frontmatter no longer
// used with git-backed memory (files contain raw block content).
describe("labelFromRelativePath", () => {
test("converts simple filename to label", () => {

View File

@@ -0,0 +1,305 @@
/**
* Tests for the git pre-commit hook that validates frontmatter
* in memory .md files.
*
* Each test creates a temp git repo, installs the hook, stages
* a file, and verifies the commit succeeds or fails as expected.
*/
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
import { execSync } from "node:child_process";
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { PRE_COMMIT_HOOK_SCRIPT } from "../../agent/memoryGit";
let tempDir: string;
const GIT_ENV = {
...process.env,
GIT_AUTHOR_NAME: "test",
GIT_AUTHOR_EMAIL: "test@test.com",
GIT_COMMITTER_NAME: "test",
GIT_COMMITTER_EMAIL: "test@test.com",
};
function git(args: string): string {
return execSync(`git ${args}`, {
cwd: tempDir,
encoding: "utf-8",
env: GIT_ENV,
});
}
function writeAndStage(relativePath: string, content: string): void {
const fullPath = join(tempDir, relativePath);
mkdirSync(join(fullPath, ".."), { recursive: true });
writeFileSync(fullPath, content, "utf-8");
git(`add ${relativePath}`);
}
function tryCommit(): { success: boolean; output: string } {
try {
const output = git('commit -m "test"');
return { success: true, output };
} catch (err) {
const output =
err instanceof Error
? (err as { stderr?: string }).stderr || err.message
: String(err);
return { success: false, output };
}
}
/** Valid frontmatter for convenience */
const VALID_FM = "---\ndescription: Test block\nlimit: 20000\n---\n\n";
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), "memgit-test-"));
git("init");
const hookPath = join(tempDir, ".git", "hooks", "pre-commit");
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, { mode: 0o755 });
writeFileSync(join(tempDir, ".gitkeep"), "");
git("add .gitkeep");
git('commit -m "init"');
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
describe("pre-commit hook: frontmatter required", () => {
test("allows files with valid frontmatter", () => {
writeAndStage(
"memory/system/human/prefs.md",
`${VALID_FM}Block content here.\n`,
);
const result = tryCommit();
expect(result.success).toBe(true);
});
test("rejects files without frontmatter", () => {
writeAndStage(
"memory/system/human/prefs.md",
"Just plain content\nno frontmatter here\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("missing frontmatter");
});
test("rejects unclosed frontmatter", () => {
writeAndStage(
"memory/system/broken.md",
"---\ndescription: oops\nlimit: 20000\n\nContent without closing ---\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("never closed");
});
});
describe("pre-commit hook: required fields", () => {
test("rejects missing description", () => {
writeAndStage(
"memory/system/bad.md",
"---\nlimit: 20000\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("missing required field 'description'");
});
test("rejects missing limit", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: A block\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("missing required field 'limit'");
});
test("rejects empty description", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription:\nlimit: 20000\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("must not be empty");
});
});
describe("pre-commit hook: field validation", () => {
test("rejects non-integer limit", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: valid\nlimit: abc\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("positive integer");
});
test("rejects zero limit", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: valid\nlimit: 0\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("positive integer");
});
test("rejects negative limit", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: valid\nlimit: -5\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("positive integer");
});
test("rejects float limit", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: valid\nlimit: 20.5\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("positive integer");
});
test("allows limit with trailing whitespace", () => {
writeAndStage(
"memory/system/ok.md",
"---\ndescription: test\nlimit: 20000 \n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(true);
});
test("rejects unknown frontmatter key", () => {
writeAndStage(
"memory/system/bad.md",
"---\ndescription: valid\nlimit: 20000\ntypo_key: oops\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("unknown frontmatter key");
});
});
describe("pre-commit hook: read_only protection", () => {
test("rejects modifying a read_only file", () => {
// First commit: create a read_only file (bypass hook for setup)
const hookPath = join(tempDir, ".git", "hooks", "pre-commit");
rmSync(hookPath);
writeAndStage(
"memory/system/skills.md",
"---\ndescription: Skills\nlimit: 20000\nread_only: true\n---\n\nOriginal.\n",
);
tryCommit();
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, { mode: 0o755 });
// Second commit: try to modify it
writeAndStage(
"memory/system/skills.md",
"---\ndescription: Skills\nlimit: 20000\nread_only: true\n---\n\nModified.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("read_only and cannot be modified");
});
test("rejects agent adding read_only to new file", () => {
writeAndStage(
"memory/system/new.md",
"---\ndescription: New block\nlimit: 20000\nread_only: false\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("protected field");
});
test("rejects agent changing read_only value", () => {
// First commit: create with read_only: false (from server pull)
// Bypass the hook for initial setup
const hookPath = join(tempDir, ".git", "hooks", "pre-commit");
rmSync(hookPath);
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\nread_only: false\n---\n\nContent.\n",
);
tryCommit();
// Re-install hook
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, { mode: 0o755 });
// Now try to change read_only
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\nread_only: true\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("protected field");
});
test("allows modifying content of non-read_only file (with read_only preserved)", () => {
// First commit: file with read_only: false (from server)
const hookPath = join(tempDir, ".git", "hooks", "pre-commit");
rmSync(hookPath);
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\nread_only: false\n---\n\nOriginal.\n",
);
tryCommit();
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, { mode: 0o755 });
// Modify content but keep read_only the same
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\nread_only: false\n---\n\nUpdated.\n",
);
const result = tryCommit();
expect(result.success).toBe(true);
});
test("rejects agent removing read_only field", () => {
// First commit: file with read_only (from server)
const hookPath = join(tempDir, ".git", "hooks", "pre-commit");
rmSync(hookPath);
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\nread_only: false\n---\n\nContent.\n",
);
tryCommit();
writeFileSync(hookPath, PRE_COMMIT_HOOK_SCRIPT, { mode: 0o755 });
// Remove read_only from frontmatter
writeAndStage(
"memory/system/block.md",
"---\ndescription: A block\nlimit: 20000\n---\n\nContent.\n",
);
const result = tryCommit();
expect(result.success).toBe(false);
expect(result.output).toContain("cannot be removed");
});
});
describe("pre-commit hook: non-memory files", () => {
test("ignores non-memory files", () => {
writeAndStage("README.md", "---\nbogus: true\n---\n\nThis is fine.\n");
const result = tryCommit();
expect(result.success).toBe(true);
});
test("ignores non-md files in memory dir", () => {
writeAndStage("memory/system/.sync-state.json", '{"bad": "frontmatter"}');
const result = tryCommit();
expect(result.success).toBe(true);
});
});

View File

@@ -7,6 +7,7 @@
import { createRequire } from "node:module";
import * as path from "node:path";
import { fileURLToPath } from "node:url";
import { getServerUrl } from "../../agent/client";
import { getCurrentAgentId } from "../../agent/context";
import { settingsManager } from "../../settings-manager";
@@ -68,13 +69,16 @@ export function getShellEnv(): NodeJS.ProcessEnv {
// Context not set yet (e.g., during startup), skip
}
// Inject API key from settings if not already in env
if (!env.LETTA_API_KEY) {
// Inject API key and base URL from settings if not already in env
if (!env.LETTA_API_KEY || !env.LETTA_BASE_URL) {
try {
const settings = settingsManager.getSettings();
if (settings.env?.LETTA_API_KEY) {
if (!env.LETTA_API_KEY && settings.env?.LETTA_API_KEY) {
env.LETTA_API_KEY = settings.env.LETTA_API_KEY;
}
if (!env.LETTA_BASE_URL) {
env.LETTA_BASE_URL = getServerUrl();
}
} catch {
// Settings not initialized yet, skip
}