chore: Create new memory blocks with --new (#58)

This commit is contained in:
Devansh Jain
2025-11-03 19:20:03 -08:00
committed by GitHub
parent c5b9f50767
commit 5c3b303eac
6 changed files with 47 additions and 31 deletions

View File

@@ -52,7 +52,7 @@ When you run `letta` in a project, it resumes where you left off with the same a
```bash
letta # Auto-resumes project agent (or creates new if first time)
letta --new # Force create new agent
letta --new # Create new agent with new memory blocks
letta --agent <id> # Use specific agent ID
```
@@ -81,7 +81,7 @@ Join our [Discord](https://discord.gg/letta) to share feedback on persistence pa
### Interactive Mode
```bash
letta # Auto-resume project agent (or create new if first time)
letta --new # Force create new agent
letta --new # Create new agent with new memory blocks
letta --agent <id> # Use specific agent ID
letta --model <model> # Specify model (e.g., claude-sonnet-4.5, gpt-4o)
letta -m <model> # Short form of --model
@@ -94,7 +94,7 @@ letta --continue # Resume global last agent (deprecated, use project-bas
```bash
letta -p "Run bun lint and correct errors" # Auto-resumes project agent
letta -p "Pick up where you left off" # Same - auto-resumes by default
letta -p "Start fresh" --new # Force new agent
letta -p "Start fresh" --new # Create new agent with new memory blocks
letta -p "Run all the test" --allowedTools "Bash" # Control tool permissions
letta -p "Just read the code" --disallowedTools "Bash" # Control tool permissions
letta -p "Explain this code" -m gpt-4o # Use specific model

View File

@@ -70,7 +70,7 @@ export async function getResumeData(
(msg) => msg.message_type === "approval_request_message",
);
const inContextMessage =
approvalMessage || matchingMessages[matchingMessages.length - 1]!;
approvalMessage ?? matchingMessages[matchingMessages.length - 1];
messageToCheck = inContextMessage;
} else {

View File

@@ -20,6 +20,7 @@ export async function createAgent(
model?: string,
embeddingModel = "openai/text-embedding-3-small",
updateArgs?: Record<string, unknown>,
forceNewBlocks = false,
) {
// Resolve model identifier to handle
let modelHandle: string;
@@ -63,29 +64,32 @@ export async function createAgent(
// Retrieve existing blocks (both global and local) and match them with defaults
const existingBlocks = new Map<string, BlockResponse>();
// Load global blocks (persona, human)
for (const [label, blockId] of Object.entries(globalSharedBlockIds)) {
try {
const block = await client.blocks.retrieve(blockId);
existingBlocks.set(label, block);
} catch {
// Block no longer exists, will create new one
console.warn(
`Global block ${label} (${blockId}) not found, will create new one`,
);
// Only load existing blocks if we're not forcing new blocks
if (!forceNewBlocks) {
// Load global blocks (persona, human)
for (const [label, blockId] of Object.entries(globalSharedBlockIds)) {
try {
const block = await client.blocks.retrieve(blockId);
existingBlocks.set(label, block);
} catch {
// Block no longer exists, will create new one
console.warn(
`Global block ${label} (${blockId}) not found, will create new one`,
);
}
}
}
// Load local blocks (style)
for (const [label, blockId] of Object.entries(localSharedBlockIds)) {
try {
const block = await client.blocks.retrieve(blockId);
existingBlocks.set(label, block);
} catch {
// Block no longer exists, will create new one
console.warn(
`Local block ${label} (${blockId}) not found, will create new one`,
);
// Load local blocks (style)
for (const [label, blockId] of Object.entries(localSharedBlockIds)) {
try {
const block = await client.blocks.retrieve(blockId);
existingBlocks.set(label, block);
} catch {
// Block no longer exists, will create new one
console.warn(
`Local block ${label} (${blockId}) not found, will create new one`,
);
}
}
}

View File

@@ -544,7 +544,7 @@ export default function App({
const errorDetail = error.detail ? `\n${error.detail}` : "";
errorDetails = `${errorType}${errorMessage}${errorDetail}`;
}
} catch (e) {
} catch (_e) {
// If we can't fetch error details, let user know
appendError(
`${errorDetails}\n(Unable to fetch additional error details from server)`,
@@ -595,7 +595,7 @@ export default function App({
const client = await getClient();
// Send cancel request to backend
const cancelResult = await client.agents.messages.cancel(agentId);
const _cancelResult = await client.agents.messages.cancel(agentId);
// console.error("cancelResult", JSON.stringify(cancelResult, null, 2));
// WORKAROUND: Also abort the stream immediately since backend cancellation is buggy

View File

@@ -75,7 +75,13 @@ export async function handleHeadlessCommand(argv: string[], model?: string) {
// Priority 2: Check if --new flag was passed (skip all resume logic)
if (!agent && forceNew) {
const updateArgs = getModelUpdateArgs(model);
agent = await createAgent(undefined, model, undefined, updateArgs);
agent = await createAgent(
undefined,
model,
undefined,
updateArgs,
forceNew,
);
}
// Priority 3: Try to resume from project settings (.letta/settings.local.json)
@@ -582,7 +588,7 @@ export async function handleHeadlessCommand(argv: string[], model?: string) {
const errorDetail = error.detail ? `: ${error.detail}` : "";
errorMessage = `${errorType}${errorMsg}${errorDetail}`;
}
} catch (e) {
} catch (_e) {
// If we can't fetch error details, append note to error message
errorMessage = `${errorMessage}\n(Unable to fetch additional error details from server)`;
}

View File

@@ -299,9 +299,15 @@ async function main() {
// Priority 2: Check if --new flag was passed (skip all resume logic)
if (!agent && forceNew) {
// Create new agent, don't check any lastAgent fields
// Create new agent with new memory blocks
const updateArgs = getModelUpdateArgs(model);
agent = await createAgent(undefined, model, undefined, updateArgs);
agent = await createAgent(
undefined,
model,
undefined,
updateArgs,
forceNew,
);
}
// Priority 3: Try to resume from project settings (.letta/settings.local.json)