fix(cli): preserve model across new conversations (#1228)

Co-authored-by: Letta Code <noreply@letta.com>
This commit is contained in:
jnjpng
2026-03-02 13:24:59 -08:00
committed by GitHub
parent 3f189ed0c8
commit 60aca976de
2 changed files with 124 additions and 0 deletions

View File

@@ -3343,6 +3343,72 @@ export default function App({
};
}, [agentId, agentState, conversationId, loadingState]);
const maybeCarryOverActiveConversationModel = useCallback(
async (targetConversationId: string) => {
if (!hasConversationModelOverrideRef.current) {
return;
}
const currentLlmConfig = llmConfigRef.current;
const rawModelHandle = buildModelHandleFromLlmConfig(currentLlmConfig);
if (!rawModelHandle) {
return;
}
// Keep provider naming aligned with model handles used by /model.
const [provider, ...modelParts] = rawModelHandle.split("/");
const modelHandle =
provider === "chatgpt_oauth" && modelParts.length > 0
? `${OPENAI_CODEX_PROVIDER_NAME}/${modelParts.join("/")}`
: rawModelHandle;
const modelInfo = getModelInfoForLlmConfig(modelHandle, {
reasoning_effort: currentLlmConfig?.reasoning_effort ?? null,
enable_reasoner:
(currentLlmConfig as { enable_reasoner?: boolean | null } | null)
?.enable_reasoner ?? null,
});
const updateArgs: Record<string, unknown> = {
...((modelInfo?.updateArgs as Record<string, unknown> | undefined) ??
{}),
};
const reasoningEffort = currentLlmConfig?.reasoning_effort;
if (
typeof reasoningEffort === "string" &&
updateArgs.reasoning_effort === undefined
) {
updateArgs.reasoning_effort = reasoningEffort;
}
const enableReasoner = (
currentLlmConfig as { enable_reasoner?: boolean | null } | null
)?.enable_reasoner;
if (
typeof enableReasoner === "boolean" &&
updateArgs.enable_reasoner === undefined
) {
updateArgs.enable_reasoner = enableReasoner;
}
try {
const { updateConversationLLMConfig } = await import("../agent/modify");
await updateConversationLLMConfig(
targetConversationId,
modelHandle,
Object.keys(updateArgs).length > 0 ? updateArgs : undefined,
);
} catch (error) {
debugWarn(
"conversation-model",
`Failed to carry over active model to new conversation: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
},
[],
);
// Helper to append an error to the transcript
// Also tracks the error in telemetry so we know an error was shown.
// Pass `true` or `{ skip: true }` to suppress telemetry (e.g. hint
@@ -7709,6 +7775,8 @@ export default function App({
isolated_block_labels: [...ISOLATED_BLOCK_LABELS],
});
await maybeCarryOverActiveConversationModel(conversation.id);
// Update conversationId state
setConversationId(conversation.id);
@@ -7795,6 +7863,8 @@ export default function App({
agent_id: agentId,
isolated_block_labels: [...ISOLATED_BLOCK_LABELS],
});
await maybeCarryOverActiveConversationModel(conversation.id);
setConversationId(conversation.id);
pendingConversationSwitchRef.current = {
@@ -10089,6 +10159,7 @@ ${SYSTEM_REMINDER_CLOSE}
resetTrajectoryBases,
sessionContextReminderEnabled,
appendTaskNotificationEvents,
maybeCarryOverActiveConversationModel,
],
);
@@ -13347,6 +13418,10 @@ If using apply_patch, use this exact relative patch path: ${applyPatchRelativePa
agent_id: agentId,
isolated_block_labels: [...ISOLATED_BLOCK_LABELS],
});
await maybeCarryOverActiveConversationModel(
conversation.id,
);
setConversationId(conversation.id);
settingsManager.setLocalLastSession(
{ agentId, conversationId: conversation.id },

View File

@@ -89,6 +89,55 @@ describe("model preset refresh wiring", () => {
expect(segment).not.toContain("updateAgentLLMConfig(");
});
test("App defines helper to carry over active conversation model", () => {
const path = fileURLToPath(new URL("../../cli/App.tsx", import.meta.url));
const source = readFileSync(path, "utf-8");
const start = source.indexOf(
"const maybeCarryOverActiveConversationModel = useCallback(",
);
const end = source.indexOf(
"// Helper to append an error to the transcript",
start,
);
expect(start).toBeGreaterThanOrEqual(0);
expect(end).toBeGreaterThan(start);
const segment = source.slice(start, end);
expect(segment).toContain("hasConversationModelOverrideRef.current");
expect(segment).toContain("buildModelHandleFromLlmConfig");
expect(segment).toContain("getModelInfoForLlmConfig(");
expect(segment).toContain("updateConversationLLMConfig(");
expect(segment).toContain(
"Failed to carry over active model to new conversation",
);
});
test("new conversation flows reapply active conversation model before switching", () => {
const path = fileURLToPath(new URL("../../cli/App.tsx", import.meta.url));
const source = readFileSync(path, "utf-8");
const carryOverCalls =
source.match(
/await maybeCarryOverActiveConversationModel\(\s*conversation\.id,?\s*\);/g,
) ?? [];
expect(carryOverCalls.length).toBeGreaterThanOrEqual(3);
const newCmdAnchor = source.indexOf('if (msg.trim() === "/new")');
expect(newCmdAnchor).toBeGreaterThanOrEqual(0);
const newCmdWindow = source.slice(newCmdAnchor, newCmdAnchor + 1800);
expect(newCmdWindow).toContain(
"await maybeCarryOverActiveConversationModel(conversation.id);",
);
const clearAnchor = source.indexOf('if (msg.trim() === "/clear")');
expect(clearAnchor).toBeGreaterThanOrEqual(0);
const clearWindow = source.slice(clearAnchor, clearAnchor + 2000);
expect(clearWindow).toContain(
"await maybeCarryOverActiveConversationModel(conversation.id);",
);
});
test("interactive resume flow refreshes model preset without explicit --model", () => {
const path = fileURLToPath(new URL("../../index.ts", import.meta.url));
const source = readFileSync(path, "utf-8");