feat: add skills extraction to --from-af import (#823)
Co-authored-by: Letta <noreply@letta.com>
This commit is contained in:
159
src/agent/export.ts
Normal file
159
src/agent/export.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { readdir, readFile } from "node:fs/promises";
|
||||
import { relative, resolve } from "node:path";
|
||||
import { getAgentSkillsDir } from "./skills";
|
||||
|
||||
export interface SkillSchema {
|
||||
name: string;
|
||||
files?: Record<string, string>;
|
||||
source_url?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Package skills from .skills/ and ~/.letta/skills directories
|
||||
* Returns skills ready for .af export
|
||||
* Automatically uses source_url for skills found in known repos
|
||||
*/
|
||||
export async function packageSkills(
|
||||
agentId?: string,
|
||||
skillsDir?: string,
|
||||
): Promise<SkillSchema[]> {
|
||||
const skills: SkillSchema[] = [];
|
||||
const skillNames = new Set<string>();
|
||||
|
||||
// Directories to check (in priority order)
|
||||
// If explicit skillsDir provided, only check that directory
|
||||
const dirsToCheck = skillsDir
|
||||
? [skillsDir]
|
||||
: [
|
||||
agentId && getAgentSkillsDir(agentId),
|
||||
resolve(process.cwd(), ".skills"), // Project-local
|
||||
resolve(process.env.HOME || "~", ".letta", "skills"), // Global
|
||||
].filter((dir): dir is string => Boolean(dir));
|
||||
|
||||
for (const baseDir of dirsToCheck) {
|
||||
try {
|
||||
const entries = await readdir(baseDir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
|
||||
// Skip if already processed (project-local takes priority)
|
||||
if (skillNames.has(entry.name)) continue;
|
||||
|
||||
const skillDir = resolve(baseDir, entry.name);
|
||||
|
||||
// Validate SKILL.md exists
|
||||
const skillMdPath = resolve(skillDir, "SKILL.md");
|
||||
try {
|
||||
await readFile(skillMdPath, "utf-8");
|
||||
} catch {
|
||||
console.warn(
|
||||
`Skipping invalid skill ${entry.name}: missing SKILL.md`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if skill exists in known repos (prefer source_url over embedding)
|
||||
const sourceUrl = await findSkillSourceUrl(entry.name);
|
||||
|
||||
const skill: SkillSchema = { name: entry.name };
|
||||
|
||||
if (sourceUrl) {
|
||||
skill.source_url = sourceUrl;
|
||||
} else {
|
||||
skill.files = await readSkillFiles(skillDir);
|
||||
}
|
||||
|
||||
skills.push(skill);
|
||||
skillNames.add(entry.name);
|
||||
}
|
||||
} catch (error) {
|
||||
// Directory doesn't exist - continue to next
|
||||
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return skills;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively read all files from a skill directory
|
||||
* Returns map of relative paths to file contents
|
||||
*/
|
||||
async function readSkillFiles(
|
||||
skillDir: string,
|
||||
): Promise<Record<string, string>> {
|
||||
const files: Record<string, string> = {};
|
||||
|
||||
async function walk(dir: string): Promise<void> {
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = resolve(dir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await walk(fullPath);
|
||||
} else {
|
||||
const content = await readFile(fullPath, "utf-8");
|
||||
const relativePath = relative(skillDir, fullPath).replace(/\\/g, "/");
|
||||
files[relativePath] = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await walk(skillDir);
|
||||
return files;
|
||||
}
|
||||
|
||||
// Known skill repositories to check
|
||||
const SKILL_REPOS = [
|
||||
"letta-ai/skills/main/tools",
|
||||
"letta-ai/skills/main/letta",
|
||||
"anthropics/skills/main/skills",
|
||||
] as const;
|
||||
|
||||
// Cache for skill directory listings
|
||||
const dirCache = new Map<string, Set<string>>();
|
||||
|
||||
/**
|
||||
* Check if skill exists in known repos
|
||||
* Returns source_url if found, null otherwise
|
||||
*/
|
||||
async function findSkillSourceUrl(skillName: string): Promise<string | null> {
|
||||
for (const repoPath of SKILL_REPOS) {
|
||||
if (!dirCache.has(repoPath)) {
|
||||
dirCache.set(repoPath, await fetchGitHubDirs(repoPath));
|
||||
}
|
||||
|
||||
if (dirCache.get(repoPath)?.has(skillName)) {
|
||||
return `${repoPath}/${skillName}`;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch directory names from GitHub path
|
||||
*/
|
||||
async function fetchGitHubDirs(path: string): Promise<Set<string>> {
|
||||
const [owner, repo, branch, ...pathParts] = path.split("/");
|
||||
if (!owner || !repo || !branch) return new Set();
|
||||
|
||||
try {
|
||||
const { fetchGitHubContents, parseDirNames } = await import(
|
||||
"./github-utils"
|
||||
);
|
||||
const entries = await fetchGitHubContents(
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
pathParts.join("/"),
|
||||
);
|
||||
return parseDirNames(entries);
|
||||
} catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
61
src/agent/github-utils.ts
Normal file
61
src/agent/github-utils.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Shared GitHub API utilities for skills import/export
|
||||
*/
|
||||
|
||||
export interface GitHubEntry {
|
||||
type: "file" | "dir";
|
||||
name: string;
|
||||
path: string;
|
||||
download_url?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch GitHub contents using gh CLI (authenticated) or direct API
|
||||
* Returns array of directory/file entries
|
||||
*/
|
||||
export async function fetchGitHubContents(
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
path: string,
|
||||
): Promise<GitHubEntry[]> {
|
||||
const apiPath = path
|
||||
? `repos/${owner}/${repo}/contents/${path}?ref=${branch}`
|
||||
: `repos/${owner}/${repo}/contents?ref=${branch}`;
|
||||
|
||||
// Try gh CLI (authenticated, 5000 req/hr)
|
||||
try {
|
||||
const { execSync } = await import("node:child_process");
|
||||
const result = execSync(`gh api ${apiPath}`, {
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "ignore"],
|
||||
});
|
||||
return JSON.parse(result) as GitHubEntry[];
|
||||
} catch {
|
||||
// Fall back to unauthenticated API (60 req/hr)
|
||||
}
|
||||
|
||||
// Try direct API
|
||||
const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${branch}`;
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": "letta-code",
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch from ${owner}/${repo}/${branch}/${path}: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
return (await response.json()) as GitHubEntry[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract directory names from GitHub entries
|
||||
*/
|
||||
export function parseDirNames(entries: GitHubEntry[]): Set<string> {
|
||||
return new Set(entries.filter((e) => e.type === "dir").map((e) => e.name));
|
||||
}
|
||||
@@ -2,7 +2,8 @@
|
||||
* Import an agent from an AgentFile (.af) template
|
||||
*/
|
||||
import { createReadStream } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { chmod, mkdir, readFile, writeFile } from "node:fs/promises";
|
||||
import { dirname, resolve } from "node:path";
|
||||
import type { AgentState } from "@letta-ai/letta-client/resources/agents/agents";
|
||||
import { getClient } from "./client";
|
||||
import { getModelUpdateArgs } from "./model";
|
||||
@@ -12,10 +13,12 @@ export interface ImportAgentOptions {
|
||||
filePath: string;
|
||||
modelOverride?: string;
|
||||
stripMessages?: boolean;
|
||||
stripSkills?: boolean;
|
||||
}
|
||||
|
||||
export interface ImportAgentResult {
|
||||
agent: AgentState;
|
||||
skills?: string[];
|
||||
}
|
||||
|
||||
export async function importAgentFromFile(
|
||||
@@ -51,5 +54,169 @@ export async function importAgentFromFile(
|
||||
agent = await client.agents.retrieve(agentId);
|
||||
}
|
||||
|
||||
return { agent };
|
||||
// Extract skills from .af file if present (unless stripSkills=true)
|
||||
let skills: string[] | undefined;
|
||||
|
||||
if (!options.stripSkills) {
|
||||
const { getAgentSkillsDir } = await import("./skills");
|
||||
const skillsDir = getAgentSkillsDir(agentId);
|
||||
skills = await extractSkillsFromAf(resolvedPath, skillsDir);
|
||||
}
|
||||
|
||||
return { agent, skills };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract skills from an AgentFile and write to destination directory
|
||||
* Always overwrites existing skills
|
||||
* Supports both embedded files and remote source_url
|
||||
*/
|
||||
export async function extractSkillsFromAf(
|
||||
afPath: string,
|
||||
destDir: string,
|
||||
): Promise<string[]> {
|
||||
const extracted: string[] = [];
|
||||
|
||||
// Read and parse .af file
|
||||
const content = await readFile(afPath, "utf-8");
|
||||
const afData = JSON.parse(content);
|
||||
|
||||
if (!afData.skills || !Array.isArray(afData.skills)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
for (const skill of afData.skills) {
|
||||
const skillDir = resolve(destDir, skill.name);
|
||||
await mkdir(skillDir, { recursive: true });
|
||||
|
||||
// Case 1: Files are embedded in .af
|
||||
if (skill.files) {
|
||||
await writeSkillFiles(skillDir, skill.files);
|
||||
extracted.push(skill.name);
|
||||
}
|
||||
// Case 2: Skill should be fetched from source_url
|
||||
else if (skill.source_url) {
|
||||
await fetchSkillFromUrl(skillDir, skill.source_url);
|
||||
extracted.push(skill.name);
|
||||
} else {
|
||||
console.warn(`Skipping skill ${skill.name}: no files or source_url`);
|
||||
}
|
||||
}
|
||||
|
||||
return extracted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write skill files to disk from embedded content
|
||||
*/
|
||||
async function writeSkillFiles(
|
||||
skillDir: string,
|
||||
files: Record<string, string>,
|
||||
): Promise<void> {
|
||||
for (const [filePath, fileContent] of Object.entries(files)) {
|
||||
await writeSkillFile(skillDir, filePath, fileContent);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a single skill file with appropriate permissions
|
||||
*/
|
||||
async function writeSkillFile(
|
||||
skillDir: string,
|
||||
filePath: string,
|
||||
content: string,
|
||||
): Promise<void> {
|
||||
const fullPath = resolve(skillDir, filePath);
|
||||
await mkdir(dirname(fullPath), { recursive: true });
|
||||
await writeFile(fullPath, content, "utf-8");
|
||||
|
||||
const isScript =
|
||||
filePath.startsWith("scripts/") || content.trimStart().startsWith("#!");
|
||||
if (isScript) {
|
||||
try {
|
||||
await chmod(fullPath, 0o755);
|
||||
} catch {
|
||||
// chmod not supported on Windows - skip silently
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch skill from remote source_url and write to disk
|
||||
* Supports formats:
|
||||
* - "owner/repo/branch/path" (standard - what export generates)
|
||||
* - "github.com/owner/repo/tree/branch/path" (normalized from GitHub URLs)
|
||||
*/
|
||||
async function fetchSkillFromUrl(
|
||||
skillDir: string,
|
||||
sourceUrl: string,
|
||||
): Promise<void> {
|
||||
// Normalize GitHub URLs (github.com/... → owner/repo/branch/path)
|
||||
const githubPath = sourceUrl
|
||||
.replace(/^github\.com\//, "")
|
||||
.replace(/\/tree\//, "/");
|
||||
|
||||
// Fetch directory listing from GitHub API
|
||||
const parts = githubPath.split("/");
|
||||
if (parts.length < 4 || !parts[0] || !parts[1] || !parts[2]) {
|
||||
throw new Error(`Invalid GitHub path: ${githubPath}`);
|
||||
}
|
||||
|
||||
const owner = parts[0];
|
||||
const repo = parts[1];
|
||||
const branch = parts[2];
|
||||
const path = parts.slice(3).join("/");
|
||||
|
||||
// Fetch contents using shared GitHub util
|
||||
const { fetchGitHubContents } = await import("./github-utils");
|
||||
const entries = await fetchGitHubContents(owner, repo, branch, path);
|
||||
|
||||
if (!Array.isArray(entries)) {
|
||||
throw new Error(`Expected directory at ${sourceUrl}, got file`);
|
||||
}
|
||||
|
||||
// Download all files recursively
|
||||
await downloadGitHubDirectory(entries, skillDir, owner, repo, branch, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively download files from GitHub directory
|
||||
*/
|
||||
async function downloadGitHubDirectory(
|
||||
entries: Array<{ type: "file" | "dir"; path: string; download_url?: string }>,
|
||||
destDir: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
branch: string,
|
||||
basePath: string,
|
||||
): Promise<void> {
|
||||
const { fetchGitHubContents } = await import("./github-utils");
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.type === "file") {
|
||||
if (!entry.download_url) {
|
||||
throw new Error(`Missing download_url for file: ${entry.path}`);
|
||||
}
|
||||
const fileResponse = await fetch(entry.download_url);
|
||||
const fileContent = await fileResponse.text();
|
||||
const relativePath = entry.path.replace(`${basePath}/`, "");
|
||||
await writeSkillFile(destDir, relativePath, fileContent);
|
||||
} else if (entry.type === "dir") {
|
||||
// Recursively fetch subdirectory using shared util
|
||||
const subEntries = await fetchGitHubContents(
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
entry.path,
|
||||
);
|
||||
await downloadGitHubDirectory(
|
||||
subEntries,
|
||||
destDir,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
basePath,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6925,24 +6925,76 @@ export default function App({
|
||||
try {
|
||||
const client = await getClient();
|
||||
|
||||
// Pass conversation_id if we're in a specific conversation (not default)
|
||||
// Build export parameters (include conversation_id if in specific conversation)
|
||||
const exportParams: { conversation_id?: string } = {};
|
||||
if (conversationId !== "default") {
|
||||
exportParams.conversation_id = conversationId;
|
||||
}
|
||||
|
||||
const fileContent = await client.agents.exportFile(
|
||||
agentId,
|
||||
exportParams,
|
||||
);
|
||||
const fileName = `${agentId}.af`;
|
||||
// Package skills from agent/project/global directories
|
||||
const { packageSkills } = await import("../agent/export");
|
||||
const skills = await packageSkills(agentId);
|
||||
|
||||
// Export agent with skills
|
||||
let fileContent: unknown;
|
||||
if (skills.length > 0) {
|
||||
// Use raw fetch with auth from settings
|
||||
const { settingsManager } = await import("../settings-manager");
|
||||
const { getServerUrl } = await import("../agent/client");
|
||||
const settings =
|
||||
await settingsManager.getSettingsWithSecureTokens();
|
||||
const apiKey =
|
||||
process.env.LETTA_API_KEY || settings.env?.LETTA_API_KEY;
|
||||
const baseUrl = getServerUrl();
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
...exportParams,
|
||||
skills,
|
||||
};
|
||||
|
||||
const response = await fetch(
|
||||
`${baseUrl}/v1/agents/${agentId}/export`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Export failed: ${response.statusText}`);
|
||||
}
|
||||
|
||||
fileContent = await response.json();
|
||||
} else {
|
||||
// No skills to include, use SDK
|
||||
fileContent = await client.agents.exportFile(
|
||||
agentId,
|
||||
exportParams,
|
||||
);
|
||||
}
|
||||
|
||||
// Generate filename
|
||||
const fileName = exportParams.conversation_id
|
||||
? `${exportParams.conversation_id}.af`
|
||||
: `${agentId}.af`;
|
||||
|
||||
writeFileSync(fileName, JSON.stringify(fileContent, null, 2));
|
||||
|
||||
// Build success message
|
||||
let summary = `AgentFile downloaded to ${fileName}`;
|
||||
if (skills.length > 0) {
|
||||
summary += `\n📦 Included ${skills.length} skill(s): ${skills.map((s) => s.name).join(", ")}`;
|
||||
}
|
||||
|
||||
buffersRef.current.byId.set(cmdId, {
|
||||
kind: "command",
|
||||
id: cmdId,
|
||||
input: msg,
|
||||
output: `AgentFile downloaded to ${fileName}`,
|
||||
output: summary,
|
||||
phase: "finished",
|
||||
success: true,
|
||||
});
|
||||
|
||||
@@ -505,9 +505,17 @@ export async function handleHeadlessCommand(
|
||||
filePath: fromAfFile,
|
||||
modelOverride: model,
|
||||
stripMessages: true,
|
||||
stripSkills: false,
|
||||
});
|
||||
agent = result.agent;
|
||||
isNewlyCreatedAgent = true;
|
||||
|
||||
// Display extracted skills summary
|
||||
if (result.skills && result.skills.length > 0) {
|
||||
console.log(
|
||||
`📦 Extracted ${result.skills.length} skill${result.skills.length === 1 ? "" : "s"} to .skills/: ${result.skills.join(", ")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Priority 2: Try to use --agent specified ID
|
||||
|
||||
@@ -1465,6 +1465,7 @@ async function main(): Promise<void> {
|
||||
filePath: fromAfFile,
|
||||
modelOverride: model,
|
||||
stripMessages: true,
|
||||
stripSkills: false,
|
||||
});
|
||||
agent = result.agent;
|
||||
isNewlyCreatedAgent = true;
|
||||
@@ -1472,6 +1473,13 @@ async function main(): Promise<void> {
|
||||
isNew: true,
|
||||
blocks: [],
|
||||
});
|
||||
|
||||
// Display extracted skills summary
|
||||
if (result.skills && result.skills.length > 0) {
|
||||
console.log(
|
||||
`\n📦 Extracted ${result.skills.length} skill${result.skills.length === 1 ? "" : "s"} to .skills/: ${result.skills.join(", ")}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Priority 2: Try to use --agent specified ID
|
||||
|
||||
82
src/tests/agent/export-skills.test.ts
Normal file
82
src/tests/agent/export-skills.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
|
||||
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { packageSkills } from "../../agent/export";
|
||||
|
||||
describe("packageSkills from .skills/ directory", () => {
|
||||
const testDir = join(process.cwd(), ".test-skills-export");
|
||||
const skillsDir = join(testDir, ".skills");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
process.chdir(testDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
if (existsSync(testDir)) {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("packages single skill", async () => {
|
||||
mkdirSync(join(skillsDir, "test-skill"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(skillsDir, "test-skill", "SKILL.md"),
|
||||
"---\nname: test-skill\ndescription: Test\n---\n\n# Test Skill",
|
||||
);
|
||||
writeFileSync(join(skillsDir, "test-skill", "config.yaml"), "version: 1.0");
|
||||
|
||||
const skills = await packageSkills(undefined, skillsDir);
|
||||
|
||||
expect(skills).toHaveLength(1);
|
||||
expect(skills[0]?.name).toBe("test-skill");
|
||||
expect(skills[0]?.files?.["SKILL.md"]).toContain("Test Skill");
|
||||
expect(skills[0]?.files?.["config.yaml"]).toBe("version: 1.0");
|
||||
});
|
||||
|
||||
test("packages multiple skills", async () => {
|
||||
for (const name of ["skill-one", "skill-two"]) {
|
||||
mkdirSync(join(skillsDir, name), { recursive: true });
|
||||
writeFileSync(join(skillsDir, name, "SKILL.md"), `# ${name}`);
|
||||
}
|
||||
|
||||
const skills = await packageSkills(undefined, skillsDir);
|
||||
|
||||
expect(skills).toHaveLength(2);
|
||||
expect(skills.map((s) => s.name).sort()).toEqual([
|
||||
"skill-one",
|
||||
"skill-two",
|
||||
]);
|
||||
});
|
||||
|
||||
test("includes nested files", async () => {
|
||||
mkdirSync(join(skillsDir, "nested-skill", "scripts"), { recursive: true });
|
||||
writeFileSync(join(skillsDir, "nested-skill", "SKILL.md"), "# Nested");
|
||||
writeFileSync(
|
||||
join(skillsDir, "nested-skill", "scripts", "run.sh"),
|
||||
"#!/bin/bash\necho hello",
|
||||
);
|
||||
|
||||
const skills = await packageSkills(undefined, skillsDir);
|
||||
|
||||
expect(skills).toHaveLength(1);
|
||||
expect(skills[0]?.files?.["SKILL.md"]).toBeDefined();
|
||||
expect(skills[0]?.files?.["scripts/run.sh"]).toBeDefined();
|
||||
});
|
||||
|
||||
test("skips skills without SKILL.md", async () => {
|
||||
mkdirSync(join(skillsDir, "invalid-skill"), { recursive: true });
|
||||
writeFileSync(join(skillsDir, "invalid-skill", "README.md"), "No SKILL.md");
|
||||
|
||||
const skills = await packageSkills(undefined, skillsDir);
|
||||
|
||||
expect(skills).toHaveLength(0);
|
||||
});
|
||||
|
||||
test("returns empty array when .skills/ missing", async () => {
|
||||
const skills = await packageSkills(undefined, skillsDir);
|
||||
expect(skills).toEqual([]);
|
||||
});
|
||||
});
|
||||
215
src/tests/agent/import-skills.test.ts
Normal file
215
src/tests/agent/import-skills.test.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
|
||||
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { readFile, stat } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
import { extractSkillsFromAf } from "../../agent/import";
|
||||
|
||||
describe("skills extraction from .af files", () => {
|
||||
const testDir = join(process.cwd(), ".test-skills-import");
|
||||
const skillsDir = join(testDir, ".skills");
|
||||
const afPath = join(testDir, "test-agent.af");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
beforeEach(() => {
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
process.chdir(testDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(originalCwd);
|
||||
if (existsSync(testDir)) {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("extracts single skill with multiple files", async () => {
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [
|
||||
{
|
||||
name: "test-skill",
|
||||
files: {
|
||||
"SKILL.md":
|
||||
"---\nname: test-skill\ndescription: A test skill\n---\n\n# Test Skill\n\nThis is a test.",
|
||||
"scripts/hello": "#!/bin/bash\necho 'Hello from test skill'",
|
||||
"config.yaml": "version: 1.0\nfeatures:\n - testing",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual(["test-skill"]);
|
||||
expect(existsSync(join(skillsDir, "test-skill", "SKILL.md"))).toBe(true);
|
||||
expect(existsSync(join(skillsDir, "test-skill", "scripts", "hello"))).toBe(
|
||||
true,
|
||||
);
|
||||
expect(existsSync(join(skillsDir, "test-skill", "config.yaml"))).toBe(true);
|
||||
|
||||
const skillContent = await readFile(
|
||||
join(skillsDir, "test-skill", "SKILL.md"),
|
||||
"utf-8",
|
||||
);
|
||||
expect(skillContent).toContain("Test Skill");
|
||||
|
||||
// Check executable permissions (skip on Windows - chmod not supported)
|
||||
if (process.platform !== "win32") {
|
||||
const scriptStats = await stat(
|
||||
join(skillsDir, "test-skill", "scripts", "hello"),
|
||||
);
|
||||
expect(scriptStats.mode & 0o111).not.toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
test("extracts skill with source_url metadata", async () => {
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [
|
||||
{
|
||||
name: "slack",
|
||||
files: {
|
||||
"SKILL.md":
|
||||
"---\nname: slack\ndescription: Slack integration\n---\n\n# Slack Skill",
|
||||
"scripts/slack": "#!/bin/bash\necho 'Slack CLI'",
|
||||
},
|
||||
source_url: "letta-ai/skills/tools/slack",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual(["slack"]);
|
||||
expect(existsSync(join(skillsDir, "slack", "SKILL.md"))).toBe(true);
|
||||
expect(existsSync(join(skillsDir, "slack", "scripts", "slack"))).toBe(true);
|
||||
});
|
||||
|
||||
test("overwrites existing skills", async () => {
|
||||
mkdirSync(join(skillsDir, "existing-skill"), { recursive: true });
|
||||
writeFileSync(
|
||||
join(skillsDir, "existing-skill", "SKILL.md"),
|
||||
"# Old Version\n\nThis will be overwritten.",
|
||||
);
|
||||
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [
|
||||
{
|
||||
name: "existing-skill",
|
||||
files: {
|
||||
"SKILL.md": "# New Version\n\nThis is the updated version.",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual(["existing-skill"]);
|
||||
|
||||
const newContent = await readFile(
|
||||
join(skillsDir, "existing-skill", "SKILL.md"),
|
||||
"utf-8",
|
||||
);
|
||||
expect(newContent).toContain("New Version");
|
||||
expect(newContent).not.toContain("Old Version");
|
||||
});
|
||||
|
||||
test("handles multiple skills", async () => {
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [
|
||||
{
|
||||
name: "skill-one",
|
||||
files: {
|
||||
"SKILL.md": "# Skill One",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "skill-two",
|
||||
files: {
|
||||
"SKILL.md": "# Skill Two",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "skill-three",
|
||||
files: {
|
||||
"SKILL.md": "# Skill Three",
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual(["skill-one", "skill-two", "skill-three"]);
|
||||
expect(existsSync(join(skillsDir, "skill-one", "SKILL.md"))).toBe(true);
|
||||
expect(existsSync(join(skillsDir, "skill-two", "SKILL.md"))).toBe(true);
|
||||
expect(existsSync(join(skillsDir, "skill-three", "SKILL.md"))).toBe(true);
|
||||
});
|
||||
|
||||
test("handles .af without skills", async () => {
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual([]);
|
||||
});
|
||||
|
||||
test("fetches skill from remote source_url (integration)", async () => {
|
||||
const afContent = {
|
||||
agents: [],
|
||||
blocks: [],
|
||||
sources: [],
|
||||
tools: [],
|
||||
mcp_servers: [],
|
||||
skills: [
|
||||
{
|
||||
name: "imessage",
|
||||
source_url: "letta-ai/skills/main/tools/imessage",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
writeFileSync(afPath, JSON.stringify(afContent, null, 2));
|
||||
|
||||
const extracted = await extractSkillsFromAf(afPath, skillsDir);
|
||||
|
||||
expect(extracted).toEqual(["imessage"]);
|
||||
expect(existsSync(join(skillsDir, "imessage", "SKILL.md"))).toBe(true);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user