feat: ChatGPT subscription connect flow (#487)

This commit is contained in:
Cameron
2026-03-05 10:16:35 -08:00
committed by GitHub
parent bb0ccd65e1
commit 9bf19ebab4
7 changed files with 360 additions and 55 deletions

View File

@@ -31,6 +31,7 @@ Your personal AI assistant that remembers everything across **Telegram, Slack, D
- Node.js 20+ - Node.js 20+
- A Letta API key from [app.letta.com](https://app.letta.com) (or a running [Letta Docker server](https://docs.letta.com/guides/docker/)) - A Letta API key from [app.letta.com](https://app.letta.com) (or a running [Letta Docker server](https://docs.letta.com/guides/docker/))
- A Telegram bot token from [@BotFather](https://t.me/BotFather) - A Telegram bot token from [@BotFather](https://t.me/BotFather)
- Optional: a ChatGPT subscription account you want to use for model credits
### Install ### Install
@@ -97,6 +98,14 @@ npm install && npm run build && npm link
lettabot onboard lettabot onboard
``` ```
Prefer to use your ChatGPT subscription instead of another API key? After onboarding (or anytime), run:
```bash
lettabot connect chatgpt
```
This opens a browser flow and enables connected subscription handles in the model picker.
### Run ### Run
```bash ```bash
@@ -167,13 +176,13 @@ Then ask your bot things like:
| Command | Description | | Command | Description |
|---------|-------------| |---------|-------------|
| `lettabot onboard` | Interactive setup wizard | | `lettabot onboard` | Interactive setup wizard |
| `lettabot connect` | Connect model providers (for example, `chatgpt`) |
| `lettabot server` | Start the bot server | | `lettabot server` | Start the bot server |
| `lettabot configure` | View and edit configuration | | `lettabot configure` | View and edit configuration |
| `lettabot skills status` | Show enabled and available skills | | `lettabot skills status` | Show enabled and available skills |
| `lettabot destroy` | Delete all local data and start fresh | | `lettabot destroy` | Delete all local data and start fresh |
| `lettabot help` | Show help | | `lettabot help` | Show help |
## Channel Setup ## Channel Setup
By default, LettaBot uses a **single agent with a single shared conversation** across all channels: By default, LettaBot uses a **single agent with a single shared conversation** across all channels:

View File

@@ -34,6 +34,16 @@ npm ci
3. Go to Settings > API Keys 3. Go to Settings > API Keys
4. Create a new API key and copy it 4. Create a new API key and copy it
### 3b. Connect your ChatGPT subscription (optional)
If you want connected provider models from your ChatGPT/ChatGPT Plus subscription, run:
```bash
lettabot connect chatgpt
```
The command opens a browser-based flow for OAuth and then makes those handles available in `lettabot model` and onboarding.
### 4. Configure LettaBot ### 4. Configure LettaBot
**Option A: Interactive Setup (Recommended)** **Option A: Interactive Setup (Recommended)**

View File

@@ -4,6 +4,7 @@
# #
# Excluded: # Excluded:
# - CLI commands (src/cli*, src/cron/cli.ts, onboard, setup) -- user-facing terminal output # - CLI commands (src/cli*, src/cron/cli.ts, onboard, setup) -- user-facing terminal output
# - src/commands/letta-connect.ts -- interactive OAuth URL/terminal guidance output
# - Test files (*.test.ts, mock-*) -- test output # - Test files (*.test.ts, mock-*) -- test output
# - banner.ts -- ASCII art display # - banner.ts -- ASCII art display
# - JSDoc examples (lines starting with ' *') # - JSDoc examples (lines starting with ' *')
@@ -18,6 +19,7 @@ hits=$(grep -rEn 'console\.(log|error|warn|info|debug|trace)[[:space:]]*\(' src/
--exclude='setup.ts' \ --exclude='setup.ts' \
--exclude='onboard.ts' \ --exclude='onboard.ts' \
--exclude='slack-wizard.ts' \ --exclude='slack-wizard.ts' \
--exclude='letta-connect.ts' \
--exclude='cli.ts' \ --exclude='cli.ts' \
--exclude-dir='cli' \ --exclude-dir='cli' \
| grep -Ev ':[0-9]+:[[:space:]]*\* ' \ | grep -Ev ':[0-9]+:[[:space:]]*\* ' \

View File

@@ -229,6 +229,7 @@ Commands:
configure View and edit configuration configure View and edit configuration
config encode Encode config file as base64 for LETTABOT_CONFIG_YAML config encode Encode config file as base64 for LETTABOT_CONFIG_YAML
config decode Decode and print LETTABOT_CONFIG_YAML env var config decode Decode and print LETTABOT_CONFIG_YAML env var
connect <provider> Connect model providers (e.g., chatgpt/codex)
model Interactive model selector model Interactive model selector
model show Show current agent model model show Show current agent model
model set <handle> Set model by handle (e.g., anthropic/claude-sonnet-4-5-20250929) model set <handle> Set model by handle (e.g., anthropic/claude-sonnet-4-5-20250929)
@@ -262,6 +263,7 @@ Examples:
lettabot todo list --actionable lettabot todo list --actionable
lettabot pairing list telegram # Show pending Telegram pairings lettabot pairing list telegram # Show pending Telegram pairings
lettabot pairing approve telegram ABCD1234 # Approve a pairing code lettabot pairing approve telegram ABCD1234 # Approve a pairing code
lettabot connect chatgpt # Connect ChatGPT subscription (via OAuth)
Environment: Environment:
LETTABOT_CONFIG_YAML Inline YAML or base64-encoded config (for cloud deploys) LETTABOT_CONFIG_YAML Inline YAML or base64-encoded config (for cloud deploys)
@@ -371,6 +373,18 @@ async function main() {
await modelCommand(subCommand, args[2]); await modelCommand(subCommand, args[2]);
break; break;
} }
case 'connect': {
const { runLettaConnect } = await import('./commands/letta-connect.js');
const requestedProvider = subCommand || 'chatgpt';
const providers = requestedProvider === 'chatgpt' ? ['chatgpt', 'codex'] : [requestedProvider];
const connected = await runLettaConnect(providers);
if (!connected) {
console.error(`Failed to run letta connect for provider: ${requestedProvider}`);
process.exit(1);
}
break;
}
case 'channels': case 'channels':
case 'channel': { case 'channel': {
@@ -617,7 +631,7 @@ async function main() {
case undefined: case undefined:
console.log('Usage: lettabot <command>\n'); console.log('Usage: lettabot <command>\n');
console.log('Commands: onboard, server, configure, model, channels, skills, reset-conversation, destroy, help\n'); console.log('Commands: onboard, server, configure, connect, model, channels, skills, reset-conversation, destroy, help\n');
console.log('Run "lettabot help" for more information.'); console.log('Run "lettabot help" for more information.');
break; break;

View File

@@ -0,0 +1,176 @@
/**
* Use Letta Code's provider connection flow from Lettabot.
*/
import { existsSync } from 'node:fs';
import { spawn, spawnSync } from 'node:child_process';
import { resolve } from 'node:path';
import { createRequire } from 'node:module';
interface CommandCandidate {
command: string;
args: string[];
}
const require = createRequire(import.meta.url);
/** Lines that add noise without helping the user. */
const SUPPRESSED_PATTERNS = [
/^Checking account/i,
/^Starting OAuth/i,
/^Starting local OAuth/i,
/^A browser window will open/i,
/^Opening browser/i,
/^Waiting for authorization/i,
/^Please complete the sign-in/i,
/^The page will redirect/i,
/^Authorization received/i,
/^Exchanging code/i,
/^Extracting account/i,
/^Creating ChatGPT/i,
];
/** Lines we rewrite to something shorter. */
const REWRITE_RULES: Array<{ pattern: RegExp; replacement: string }> = [
{ pattern: /^If the browser doesn't open automatically,? visit:$/i, replacement: 'If the browser doesn\'t open, visit:' },
{ pattern: /^If needed,? visit:$/i, replacement: '' }, // suppress the duplicate URL header
];
function filterOAuthLine(line: string, state: { urlPrinted: boolean }): string | null {
const trimmed = line.trim();
if (!trimmed) return null;
// Suppress known noise lines.
if (SUPPRESSED_PATTERNS.some(p => p.test(trimmed))) return null;
// Rewrite rules.
for (const rule of REWRITE_RULES) {
if (rule.pattern.test(trimmed)) {
return rule.replacement || null;
}
}
// URLs: print once, skip duplicates.
if (trimmed.startsWith('http://') || trimmed.startsWith('https://')) {
if (state.urlPrinted) return null;
state.urlPrinted = true;
return ` ${trimmed}`;
}
// Pass through everything else (e.g. success messages).
return trimmed;
}
async function runLettaCodeCommand(candidate: CommandCandidate, providerAlias: string, env: NodeJS.ProcessEnv): Promise<boolean> {
return new Promise((resolve) => {
const child = spawn(candidate.command, [...candidate.args, providerAlias], {
stdio: ['inherit', 'pipe', 'pipe'],
cwd: process.cwd(),
env,
});
const filterState = { urlPrinted: false };
let headerPrinted = false;
child.stdout?.on('data', (chunk: Buffer) => {
for (const raw of chunk.toString().split('\n')) {
const line = filterOAuthLine(raw, filterState);
if (line === null) continue;
if (!headerPrinted) {
console.log('Connecting ChatGPT subscription...\n');
headerPrinted = true;
}
console.log(line);
}
});
// Suppress stderr entirely (hides "Unknown command" from old versions).
child.stderr?.resume();
child.on('error', () => resolve(false));
child.on('close', (code) => resolve(code === 0));
});
}
function getCandidateCommands(): CommandCandidate[] {
const commands: CommandCandidate[] = [];
const seen = new Set<string>();
const addCandidate = (candidate: CommandCandidate): void => {
const key = `${candidate.command} ${candidate.args.join(' ')}`;
if (seen.has(key)) {
return;
}
commands.push(candidate);
seen.add(key);
};
// Resolve the bundled dependency from lettabot's install path, not only cwd.
try {
const resolvedScript = require.resolve('@letta-ai/letta-code/letta.js');
if (existsSync(resolvedScript)) {
addCandidate({
command: process.execPath,
args: [resolvedScript, 'connect'],
});
}
} catch {
// Fall through to other discovery paths.
}
// Direct package entrypoint when available.
const letCodeScript = resolve(process.cwd(), 'node_modules', '@letta-ai', 'letta-code', 'letta.js');
if (existsSync(letCodeScript)) {
addCandidate({
command: process.execPath,
args: [letCodeScript, 'connect'],
});
}
// npm-style binary from local node_modules/.bin
const localBinary = process.platform === 'win32'
? resolve(process.cwd(), 'node_modules', '.bin', 'letta.cmd')
: resolve(process.cwd(), 'node_modules', '.bin', 'letta');
if (existsSync(localBinary)) {
addCandidate({
command: localBinary,
args: ['connect'],
});
}
// Fallback to npx from npm registry.
const npxCommand = process.platform === 'win32' ? 'npx.cmd' : 'npx';
addCandidate({
command: npxCommand,
args: ['-y', '@letta-ai/letta-code@latest', 'connect'],
});
return commands;
}
export async function runLettaConnect(providers: string[], env: NodeJS.ProcessEnv = process.env): Promise<boolean> {
const candidates = getCandidateCommands();
const commandEnv: NodeJS.ProcessEnv = { ...process.env, ...env };
const attemptedAliases = new Set<string>();
for (const provider of providers) {
if (attemptedAliases.has(provider)) {
continue;
}
attemptedAliases.add(provider);
for (const candidate of candidates) {
const ok = await runLettaCodeCommand(candidate, provider, commandEnv);
if (ok) {
return true;
}
}
}
return false;
}
export async function runChatgptConnect(env: NodeJS.ProcessEnv = process.env): Promise<boolean> {
// Newer Letta Code versions use `chatgpt`; older versions use `codex`.
return runLettaConnect(['chatgpt', 'codex'], env);
}

View File

@@ -7,9 +7,10 @@ import { resolve } from 'node:path';
import { spawnSync } from 'node:child_process'; import { spawnSync } from 'node:child_process';
import * as p from '@clack/prompts'; import * as p from '@clack/prompts';
import { saveConfig, syncProviders, isApiServerMode } from './config/index.js'; import { saveConfig, syncProviders, isApiServerMode } from './config/index.js';
import type { AgentConfig, LettaBotConfig, ProviderConfig } from './config/types.js'; import type { AgentConfig, LettaBotConfig } from './config/types.js';
import { isLettaApiUrl } from './utils/server.js'; import { isLettaApiUrl } from './utils/server.js';
import { parseCsvList, parseOptionalInt } from './utils/parse.js'; import { parseCsvList, parseOptionalInt } from './utils/parse.js';
import { runChatgptConnect } from './commands/letta-connect.js';
import { CHANNELS, getChannelHint, isSignalCliInstalled, setupTelegram, setupSlack, setupDiscord, setupWhatsApp, setupSignal } from './channels/setup.js'; import { CHANNELS, getChannelHint, isSignalCliInstalled, setupTelegram, setupSlack, setupDiscord, setupWhatsApp, setupSignal } from './channels/setup.js';
// ============================================================================ // ============================================================================
@@ -221,8 +222,9 @@ interface OnboardConfig {
// Model (only for new agents) // Model (only for new agents)
model?: string; model?: string;
// BYOK Providers (for free tier) // BYOK/connected providers
providers?: Array<{ id: string; name: string; apiKey: string }>; providers?: Array<{ id: string; name: string; apiKey: string }>;
chatgptConnected?: boolean;
// Channels (with access control) // Channels (with access control)
telegram: { telegram: {
@@ -552,8 +554,17 @@ async function stepAgent(config: OnboardConfig, env: Record<string, string>): Pr
} }
} }
type ByokProvider = {
id: string;
name: string;
displayName: string;
providerType: string;
isOAuth?: boolean;
};
// BYOK Provider definitions (same as letta-code) // BYOK Provider definitions (same as letta-code)
const BYOK_PROVIDERS = [ const BYOK_PROVIDERS: ByokProvider[] = [
{ id: 'codex', name: 'chatgpt-plus-pro', displayName: 'ChatGPT / Codex', providerType: 'chatgpt_oauth', isOAuth: true },
{ id: 'anthropic', name: 'lc-anthropic', displayName: 'Anthropic (Claude)', providerType: 'anthropic' }, { id: 'anthropic', name: 'lc-anthropic', displayName: 'Anthropic (Claude)', providerType: 'anthropic' },
{ id: 'openai', name: 'lc-openai', displayName: 'OpenAI', providerType: 'openai' }, { id: 'openai', name: 'lc-openai', displayName: 'OpenAI', providerType: 'openai' },
{ id: 'gemini', name: 'lc-gemini', displayName: 'Google Gemini', providerType: 'google_ai' }, { id: 'gemini', name: 'lc-gemini', displayName: 'Google Gemini', providerType: 'google_ai' },
@@ -563,34 +574,55 @@ const BYOK_PROVIDERS = [
]; ];
async function stepProviders(config: OnboardConfig, env: Record<string, string>): Promise<void> { async function stepProviders(config: OnboardConfig, env: Record<string, string>): Promise<void> {
// Only for free tier users on Letta API (not Docker/custom servers, not paid)
if (isDockerAuthMethod(config.authMethod)) return; if (isDockerAuthMethod(config.authMethod)) return;
if (config.billingTier !== 'free') return; const isFreeTier = config.billingTier === 'free';
const providerDefs = BYOK_PROVIDERS.filter(provider => isFreeTier || provider.id === 'codex');
if (providerDefs.length === 0) return;
const selectedProviders = await p.multiselect({ // Paid users only see the ChatGPT OAuth option -- use a confirm instead of multiselect.
message: 'Add LLM provider keys (optional - for BYOK models)', const oauthOnly = providerDefs.length === 1 && providerDefs[0].isOAuth;
options: BYOK_PROVIDERS.map(provider => ({
value: provider.id,
label: provider.displayName,
hint: `Connect your ${provider.displayName} API key`,
})),
required: false,
});
if (p.isCancel(selectedProviders)) { p.cancel('Setup cancelled'); process.exit(0); } let selectedProviders: string[];
if (oauthOnly) {
const connect = await p.confirm({
message: 'Connect your ChatGPT subscription? (via OAuth)',
initialValue: false,
});
if (p.isCancel(connect)) { p.cancel('Setup cancelled'); process.exit(0); }
selectedProviders = connect ? [providerDefs[0].id] : [];
} else {
const result = await p.multiselect({
message: 'Add connected providers (optional)',
options: providerDefs.map(provider => ({
value: provider.id,
label: provider.displayName,
hint: provider.isOAuth ? 'Connect your ChatGPT subscription via OAuth' : `Connect your ${provider.displayName} API key`,
})),
required: false,
});
if (p.isCancel(result)) { p.cancel('Setup cancelled'); process.exit(0); }
selectedProviders = (result as string[]) || [];
}
// If no providers selected, skip // If no providers selected, skip
if (!selectedProviders || selectedProviders.length === 0) { if (selectedProviders.length === 0) {
return; return;
} }
config.providers = []; const providersById = new Map((config.providers ?? []).map(provider => [provider.id, provider]));
const apiKey = config.apiKey || env.LETTA_API_KEY || process.env.LETTA_API_KEY; const apiKey = config.apiKey || env.LETTA_API_KEY || process.env.LETTA_API_KEY;
// Collect API keys for each selected provider // Collect API keys for each selected provider
for (const providerId of selectedProviders as string[]) { for (const providerId of selectedProviders) {
const provider = BYOK_PROVIDERS.find(p => p.id === providerId); const provider = BYOK_PROVIDERS.find(p => p.id === providerId);
if (!provider) continue; if (!provider) continue;
if (provider.isOAuth) {
const connected = await runChatgptConnect({ LETTA_BASE_URL: config.baseUrl || 'https://api.letta.com' });
if (connected) {
config.chatgptConnected = true;
}
continue;
}
const providerKey = await p.text({ const providerKey = await p.text({
message: `${provider.displayName} API Key`, message: `${provider.displayName} API Key`,
@@ -650,7 +682,7 @@ async function stepProviders(config: OnboardConfig, env: Record<string, string>)
if (response.ok) { if (response.ok) {
spinner.stop(`Connected ${provider.displayName}`); spinner.stop(`Connected ${provider.displayName}`);
config.providers.push({ id: provider.id, name: provider.name, apiKey: providerKey }); providersById.set(provider.id, { id: provider.id, name: provider.name, apiKey: providerKey });
// If OpenAI was just connected, offer to enable voice transcription // If OpenAI was just connected, offer to enable voice transcription
if (provider.id === 'openai') { if (provider.id === 'openai') {
@@ -673,6 +705,13 @@ async function stepProviders(config: OnboardConfig, env: Record<string, string>)
} }
} }
} }
const mergedProviders = Array.from(providersById.values());
if (mergedProviders.length > 0) {
config.providers = mergedProviders;
} else {
delete config.providers;
}
} }
async function stepModel(config: OnboardConfig, env: Record<string, string>): Promise<void> { async function stepModel(config: OnboardConfig, env: Record<string, string>): Promise<void> {
@@ -1198,6 +1237,19 @@ function showSummary(config: OnboardConfig): void {
if (config.model) { if (config.model) {
lines.push(`Model: ${config.model}`); lines.push(`Model: ${config.model}`);
} }
// Providers
const providerNames: string[] = [];
if (config.chatgptConnected) providerNames.push('ChatGPT subscription');
if (config.providers?.length) {
for (const prov of config.providers) {
const def = BYOK_PROVIDERS.find(b => b.id === prov.id);
if (def && !def.isOAuth) providerNames.push(def.displayName);
}
}
if (providerNames.length > 0) {
lines.push(`Providers: ${providerNames.join(', ')}`);
}
// Channels // Channels
const channels: string[] = []; const channels: string[] = [];

View File

@@ -105,12 +105,25 @@ async function fetchByokModels(apiKey?: string): Promise<ByokModel[]> {
} }
} }
function addModelOption(
options: Array<{ value: string; label: string; hint: string }>,
seen: Set<string>,
option: { value: string; label: string; hint: string },
): void {
if (seen.has(option.value)) {
return;
}
options.push(option);
seen.add(option.value);
}
/** /**
* Build model selection options based on billing tier * Build model selection options based on billing tier
* Returns array ready for @clack/prompts select() * Returns array ready for @clack/prompts select()
* *
* For free users: Show free models first, then BYOK models from API * For free users: Show free static models first.
* For paid users: Show featured models first, then all models * For paid users: Show featured models first, then full static list.
* For all users: show connected provider models (OAuth/API key providers).
* For Docker/custom servers: fetch models from server * For Docker/custom servers: fetch models from server
*/ */
export async function buildModelOptions(options?: { export async function buildModelOptions(options?: {
@@ -128,51 +141,80 @@ export async function buildModelOptions(options?: {
} }
const result: Array<{ value: string; label: string; hint: string }> = []; const result: Array<{ value: string; label: string; hint: string }> = [];
const seenHandles = new Set<string>();
if (isFreeTier) { if (isFreeTier) {
// Free tier: Show free models first // Free tier: Show free models first
const freeModels = models.filter(m => m.free); const freeModels = models.filter(m => m.free);
result.push(...freeModels.map(m => ({ freeModels.forEach(model => {
value: m.handle, addModelOption(result, seenHandles, {
label: m.label, value: model.handle,
hint: `🆓 Free - ${m.description}`, label: model.label,
}))); hint: `🆓 Free - ${model.description}`,
// Fetch BYOK models from API
const byokModels = await fetchByokModels(options?.apiKey);
if (byokModels.length > 0) {
result.push({
value: '__byok_header__',
label: '── Your Connected Providers ──',
hint: 'Models from your API keys',
}); });
});
result.push(...byokModels.map(m => ({
value: m.handle,
label: m.display_name || m.name,
hint: `🔑 ${m.provider_name}`,
})));
}
} else { } else {
// Paid tier: Show featured models first // Paid tier: Show featured models first
const featured = models.filter(m => m.isFeatured); const featured = models.filter(m => m.isFeatured);
const nonFeatured = models.filter(m => !m.isFeatured); const nonFeatured = models.filter(m => !m.isFeatured);
result.push(...featured.map(m => ({ featured.forEach(model => {
value: m.handle, addModelOption(result, seenHandles, {
label: m.label, value: model.handle,
hint: m.free ? `🆓 Free - ${m.description}` : `${m.description}`, label: model.label,
}))); hint: model.free ? `🆓 Free - ${model.description}` : `${model.description}`,
});
});
result.push(...nonFeatured.map(m => ({ nonFeatured.forEach(model => {
value: m.handle, addModelOption(result, seenHandles, {
label: m.label, value: model.handle,
hint: m.description, label: model.label,
}))); hint: model.description,
});
});
}
// Include connected provider models for both free and paid users.
const byokModels = await fetchByokModels(options?.apiKey);
if (byokModels.length > 0) {
// ChatGPT subscription models get their own section at the top.
const chatgptModels = byokModels.filter(m => m.provider_type === 'chatgpt_oauth');
const otherModels = byokModels.filter(m => m.provider_type !== 'chatgpt_oauth');
if (chatgptModels.length > 0) {
addModelOption(result, seenHandles, {
value: '__chatgpt_header__',
label: '── ChatGPT Subscription ──',
hint: 'Included with your ChatGPT plan',
});
chatgptModels.forEach(model => {
addModelOption(result, seenHandles, {
value: model.handle,
label: model.display_name || model.name,
hint: 'ChatGPT',
});
});
}
if (otherModels.length > 0) {
addModelOption(result, seenHandles, {
value: '__byok_header__',
label: '── Your API Keys ──',
hint: 'Models from your API keys',
});
otherModels.forEach(model => {
addModelOption(result, seenHandles, {
value: model.handle,
label: model.display_name || model.name,
hint: `🔑 ${model.provider_name}`,
});
});
}
} }
// Add custom option // Add custom option
result.push({ addModelOption(result, seenHandles, {
value: '__custom__', value: '__custom__',
label: 'Other (specify handle)', label: 'Other (specify handle)',
hint: 'e.g. anthropic/claude-sonnet-4-5-20250929' hint: 'e.g. anthropic/claude-sonnet-4-5-20250929'
@@ -226,7 +268,7 @@ export async function handleModelSelection(
if (p.isCancel(selection)) return null; if (p.isCancel(selection)) return null;
// Skip header selections // Skip header selections
if (selection === '__byok_header__') return null; if (selection === '__byok_header__' || selection === '__chatgpt_header__') return null;
// Handle custom model input // Handle custom model input
if (selection === '__custom__') { if (selection === '__custom__') {