fix: patch default model (#670)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
Charles Packer
2026-01-24 20:10:40 -08:00
committed by GitHub
parent 07992a7746
commit fd7ca18066
3 changed files with 55 additions and 10 deletions

View File

@@ -40,6 +40,22 @@ export function getDefaultModel(): string {
return firstModel.handle;
}
/**
* Get the default model handle based on billing tier.
* Free tier users get glm-4.7, everyone else gets the standard default.
* @param billingTier - The user's billing tier (e.g., "free", "pro", "enterprise")
* @returns The model handle to use as default
*/
export function getDefaultModelForTier(billingTier?: string | null): string {
// Free tier gets glm-4.7 (a free model)
if (billingTier?.toLowerCase() === "free") {
const freeDefault = models.find((m) => m.id === "glm-4.7");
if (freeDefault) return freeDefault.handle;
}
// Everyone else (pro, enterprise, unknown) gets the standard default
return getDefaultModel();
}
/**
* Format available models for error messages
*/

View File

@@ -68,12 +68,12 @@ export const commands: Record<string, Command> = {
return "Opening message search...";
},
},
"/plan": {
desc: "Enter plan mode",
"/connect": {
desc: "Connect your LLM API keys (OpenAI, Anthropic, etc.)",
order: 17,
handler: () => {
// Handled specially in App.tsx
return "Entering plan mode...";
// Handled specially in App.tsx - opens ProviderSelector
return "Opening provider connection...";
},
},
"/clear": {
@@ -263,12 +263,12 @@ export const commands: Record<string, Command> = {
},
// === Session management (order 40-49) ===
"/connect": {
desc: "Connect your LLM API keys (OpenAI, Anthropic, etc.)",
"/plan": {
desc: "Enter plan mode",
order: 40,
handler: () => {
// Handled specially in App.tsx - opens ProviderSelector
return "Opening provider connection...";
// Handled specially in App.tsx
return "Entering plan mode...";
},
},
"/disconnect": {

View File

@@ -1514,8 +1514,37 @@ async function main(): Promise<void> {
return;
}
// Use selected server model (from self-hosted model picker) if available
const effectiveModel = selectedServerModel || model;
// Determine effective model:
// 1. Use selectedServerModel if user picked from self-hosted picker
// 2. Use model if --model flag was passed
// 3. Otherwise, use billing-tier-aware default (free tier gets glm-4.7)
let effectiveModel = selectedServerModel || model;
if (!effectiveModel && !selfHostedBaseUrl) {
// On Letta API without explicit model - check billing tier for appropriate default
const { getDefaultModelForTier } = await import("./agent/model");
let billingTier: string | null = null;
try {
const baseURL =
process.env.LETTA_BASE_URL ||
settings.env?.LETTA_BASE_URL ||
LETTA_CLOUD_API_URL;
const apiKey =
process.env.LETTA_API_KEY || settings.env?.LETTA_API_KEY;
const response = await fetch(`${baseURL}/v1/metadata/balance`, {
headers: apiKey ? { Authorization: `Bearer ${apiKey}` } : {},
});
if (response.ok) {
const data = (await response.json()) as {
billing_tier?: string;
};
billingTier = data.billing_tier ?? null;
}
} catch {
// Ignore - will use standard default
}
effectiveModel = getDefaultModelForTier(billingTier);
}
const updateArgs = getModelUpdateArgs(effectiveModel);
const result = await createAgent(
undefined,