feat: add skills extraction to --from-af import (#823)

Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
cthomas
2026-02-05 12:46:42 -08:00
committed by GitHub
parent 22243c9296
commit d786ad470a
8 changed files with 761 additions and 9 deletions

159
src/agent/export.ts Normal file
View File

@@ -0,0 +1,159 @@
import { readdir, readFile } from "node:fs/promises";
import { relative, resolve } from "node:path";
import { getAgentSkillsDir } from "./skills";
export interface SkillSchema {
name: string;
files?: Record<string, string>;
source_url?: string;
}
/**
* Package skills from .skills/ and ~/.letta/skills directories
* Returns skills ready for .af export
* Automatically uses source_url for skills found in known repos
*/
export async function packageSkills(
agentId?: string,
skillsDir?: string,
): Promise<SkillSchema[]> {
const skills: SkillSchema[] = [];
const skillNames = new Set<string>();
// Directories to check (in priority order)
// If explicit skillsDir provided, only check that directory
const dirsToCheck = skillsDir
? [skillsDir]
: [
agentId && getAgentSkillsDir(agentId),
resolve(process.cwd(), ".skills"), // Project-local
resolve(process.env.HOME || "~", ".letta", "skills"), // Global
].filter((dir): dir is string => Boolean(dir));
for (const baseDir of dirsToCheck) {
try {
const entries = await readdir(baseDir, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
// Skip if already processed (project-local takes priority)
if (skillNames.has(entry.name)) continue;
const skillDir = resolve(baseDir, entry.name);
// Validate SKILL.md exists
const skillMdPath = resolve(skillDir, "SKILL.md");
try {
await readFile(skillMdPath, "utf-8");
} catch {
console.warn(
`Skipping invalid skill ${entry.name}: missing SKILL.md`,
);
continue;
}
// Check if skill exists in known repos (prefer source_url over embedding)
const sourceUrl = await findSkillSourceUrl(entry.name);
const skill: SkillSchema = { name: entry.name };
if (sourceUrl) {
skill.source_url = sourceUrl;
} else {
skill.files = await readSkillFiles(skillDir);
}
skills.push(skill);
skillNames.add(entry.name);
}
} catch (error) {
// Directory doesn't exist - continue to next
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
return skills;
}
/**
* Recursively read all files from a skill directory
* Returns map of relative paths to file contents
*/
async function readSkillFiles(
skillDir: string,
): Promise<Record<string, string>> {
const files: Record<string, string> = {};
async function walk(dir: string): Promise<void> {
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = resolve(dir, entry.name);
if (entry.isDirectory()) {
await walk(fullPath);
} else {
const content = await readFile(fullPath, "utf-8");
const relativePath = relative(skillDir, fullPath).replace(/\\/g, "/");
files[relativePath] = content;
}
}
}
await walk(skillDir);
return files;
}
// Known skill repositories to check
const SKILL_REPOS = [
"letta-ai/skills/main/tools",
"letta-ai/skills/main/letta",
"anthropics/skills/main/skills",
] as const;
// Cache for skill directory listings
const dirCache = new Map<string, Set<string>>();
/**
* Check if skill exists in known repos
* Returns source_url if found, null otherwise
*/
async function findSkillSourceUrl(skillName: string): Promise<string | null> {
for (const repoPath of SKILL_REPOS) {
if (!dirCache.has(repoPath)) {
dirCache.set(repoPath, await fetchGitHubDirs(repoPath));
}
if (dirCache.get(repoPath)?.has(skillName)) {
return `${repoPath}/${skillName}`;
}
}
return null;
}
/**
* Fetch directory names from GitHub path
*/
async function fetchGitHubDirs(path: string): Promise<Set<string>> {
const [owner, repo, branch, ...pathParts] = path.split("/");
if (!owner || !repo || !branch) return new Set();
try {
const { fetchGitHubContents, parseDirNames } = await import(
"./github-utils"
);
const entries = await fetchGitHubContents(
owner,
repo,
branch,
pathParts.join("/"),
);
return parseDirNames(entries);
} catch {
return new Set();
}
}

61
src/agent/github-utils.ts Normal file
View File

@@ -0,0 +1,61 @@
/**
* Shared GitHub API utilities for skills import/export
*/
export interface GitHubEntry {
type: "file" | "dir";
name: string;
path: string;
download_url?: string;
}
/**
* Fetch GitHub contents using gh CLI (authenticated) or direct API
* Returns array of directory/file entries
*/
export async function fetchGitHubContents(
owner: string,
repo: string,
branch: string,
path: string,
): Promise<GitHubEntry[]> {
const apiPath = path
? `repos/${owner}/${repo}/contents/${path}?ref=${branch}`
: `repos/${owner}/${repo}/contents?ref=${branch}`;
// Try gh CLI (authenticated, 5000 req/hr)
try {
const { execSync } = await import("node:child_process");
const result = execSync(`gh api ${apiPath}`, {
encoding: "utf-8",
stdio: ["pipe", "pipe", "ignore"],
});
return JSON.parse(result) as GitHubEntry[];
} catch {
// Fall back to unauthenticated API (60 req/hr)
}
// Try direct API
const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${branch}`;
const response = await fetch(url, {
headers: {
Accept: "application/vnd.github.v3+json",
"User-Agent": "letta-code",
},
});
if (!response.ok) {
throw new Error(
`Failed to fetch from ${owner}/${repo}/${branch}/${path}: ${response.statusText}`,
);
}
return (await response.json()) as GitHubEntry[];
}
/**
* Extract directory names from GitHub entries
*/
export function parseDirNames(entries: GitHubEntry[]): Set<string> {
return new Set(entries.filter((e) => e.type === "dir").map((e) => e.name));
}

View File

@@ -2,7 +2,8 @@
* Import an agent from an AgentFile (.af) template
*/
import { createReadStream } from "node:fs";
import { resolve } from "node:path";
import { chmod, mkdir, readFile, writeFile } from "node:fs/promises";
import { dirname, resolve } from "node:path";
import type { AgentState } from "@letta-ai/letta-client/resources/agents/agents";
import { getClient } from "./client";
import { getModelUpdateArgs } from "./model";
@@ -12,10 +13,12 @@ export interface ImportAgentOptions {
filePath: string;
modelOverride?: string;
stripMessages?: boolean;
stripSkills?: boolean;
}
export interface ImportAgentResult {
agent: AgentState;
skills?: string[];
}
export async function importAgentFromFile(
@@ -51,5 +54,169 @@ export async function importAgentFromFile(
agent = await client.agents.retrieve(agentId);
}
return { agent };
// Extract skills from .af file if present (unless stripSkills=true)
let skills: string[] | undefined;
if (!options.stripSkills) {
const { getAgentSkillsDir } = await import("./skills");
const skillsDir = getAgentSkillsDir(agentId);
skills = await extractSkillsFromAf(resolvedPath, skillsDir);
}
return { agent, skills };
}
/**
* Extract skills from an AgentFile and write to destination directory
* Always overwrites existing skills
* Supports both embedded files and remote source_url
*/
export async function extractSkillsFromAf(
afPath: string,
destDir: string,
): Promise<string[]> {
const extracted: string[] = [];
// Read and parse .af file
const content = await readFile(afPath, "utf-8");
const afData = JSON.parse(content);
if (!afData.skills || !Array.isArray(afData.skills)) {
return [];
}
for (const skill of afData.skills) {
const skillDir = resolve(destDir, skill.name);
await mkdir(skillDir, { recursive: true });
// Case 1: Files are embedded in .af
if (skill.files) {
await writeSkillFiles(skillDir, skill.files);
extracted.push(skill.name);
}
// Case 2: Skill should be fetched from source_url
else if (skill.source_url) {
await fetchSkillFromUrl(skillDir, skill.source_url);
extracted.push(skill.name);
} else {
console.warn(`Skipping skill ${skill.name}: no files or source_url`);
}
}
return extracted;
}
/**
* Write skill files to disk from embedded content
*/
async function writeSkillFiles(
skillDir: string,
files: Record<string, string>,
): Promise<void> {
for (const [filePath, fileContent] of Object.entries(files)) {
await writeSkillFile(skillDir, filePath, fileContent);
}
}
/**
* Write a single skill file with appropriate permissions
*/
async function writeSkillFile(
skillDir: string,
filePath: string,
content: string,
): Promise<void> {
const fullPath = resolve(skillDir, filePath);
await mkdir(dirname(fullPath), { recursive: true });
await writeFile(fullPath, content, "utf-8");
const isScript =
filePath.startsWith("scripts/") || content.trimStart().startsWith("#!");
if (isScript) {
try {
await chmod(fullPath, 0o755);
} catch {
// chmod not supported on Windows - skip silently
}
}
}
/**
* Fetch skill from remote source_url and write to disk
* Supports formats:
* - "owner/repo/branch/path" (standard - what export generates)
* - "github.com/owner/repo/tree/branch/path" (normalized from GitHub URLs)
*/
async function fetchSkillFromUrl(
skillDir: string,
sourceUrl: string,
): Promise<void> {
// Normalize GitHub URLs (github.com/... → owner/repo/branch/path)
const githubPath = sourceUrl
.replace(/^github\.com\//, "")
.replace(/\/tree\//, "/");
// Fetch directory listing from GitHub API
const parts = githubPath.split("/");
if (parts.length < 4 || !parts[0] || !parts[1] || !parts[2]) {
throw new Error(`Invalid GitHub path: ${githubPath}`);
}
const owner = parts[0];
const repo = parts[1];
const branch = parts[2];
const path = parts.slice(3).join("/");
// Fetch contents using shared GitHub util
const { fetchGitHubContents } = await import("./github-utils");
const entries = await fetchGitHubContents(owner, repo, branch, path);
if (!Array.isArray(entries)) {
throw new Error(`Expected directory at ${sourceUrl}, got file`);
}
// Download all files recursively
await downloadGitHubDirectory(entries, skillDir, owner, repo, branch, path);
}
/**
* Recursively download files from GitHub directory
*/
async function downloadGitHubDirectory(
entries: Array<{ type: "file" | "dir"; path: string; download_url?: string }>,
destDir: string,
owner: string,
repo: string,
branch: string,
basePath: string,
): Promise<void> {
const { fetchGitHubContents } = await import("./github-utils");
for (const entry of entries) {
if (entry.type === "file") {
if (!entry.download_url) {
throw new Error(`Missing download_url for file: ${entry.path}`);
}
const fileResponse = await fetch(entry.download_url);
const fileContent = await fileResponse.text();
const relativePath = entry.path.replace(`${basePath}/`, "");
await writeSkillFile(destDir, relativePath, fileContent);
} else if (entry.type === "dir") {
// Recursively fetch subdirectory using shared util
const subEntries = await fetchGitHubContents(
owner,
repo,
branch,
entry.path,
);
await downloadGitHubDirectory(
subEntries,
destDir,
owner,
repo,
branch,
basePath,
);
}
}
}