feat: Model based toolset switching (#111)
Co-authored-by: cpacker <packercharles@gmail.com>
This commit is contained in:
269
src/tools/impl/ApplyPatch.ts
Normal file
269
src/tools/impl/ApplyPatch.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
import { promises as fs } from "node:fs";
|
||||
import * as path from "node:path";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface ApplyPatchArgs {
|
||||
input: string;
|
||||
}
|
||||
|
||||
interface ApplyPatchResult {
|
||||
message: string;
|
||||
}
|
||||
|
||||
type FileOperation =
|
||||
| {
|
||||
kind: "add";
|
||||
path: string;
|
||||
contentLines: string[];
|
||||
}
|
||||
| {
|
||||
kind: "update";
|
||||
fromPath: string;
|
||||
toPath?: string;
|
||||
hunks: Hunk[];
|
||||
}
|
||||
| {
|
||||
kind: "delete";
|
||||
path: string;
|
||||
};
|
||||
|
||||
interface Hunk {
|
||||
lines: string[]; // raw hunk lines (excluding the @@ header)
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple ApplyPatch implementation compatible with the Letta/Codex apply_patch tool format.
|
||||
*
|
||||
* Supports:
|
||||
* - *** Add File: path
|
||||
* - *** Update File: path
|
||||
* - optional *** Move to: new_path
|
||||
* - one or more @@ hunks with space/-/+ lines
|
||||
* - *** Delete File: path
|
||||
*/
|
||||
export async function apply_patch(
|
||||
args: ApplyPatchArgs,
|
||||
): Promise<ApplyPatchResult> {
|
||||
validateRequiredParams(args, ["input"], "apply_patch");
|
||||
const { input } = args;
|
||||
|
||||
const lines = input.split(/\r?\n/);
|
||||
if (lines[0]?.trim() !== "*** Begin Patch") {
|
||||
throw new Error('Patch must start with "*** Begin Patch"');
|
||||
}
|
||||
const endIndex = lines.lastIndexOf("*** End Patch");
|
||||
if (endIndex === -1) {
|
||||
throw new Error('Patch must end with "*** End Patch"');
|
||||
}
|
||||
|
||||
const ops: FileOperation[] = [];
|
||||
let i = 1;
|
||||
|
||||
while (i < endIndex) {
|
||||
const line = lines[i]?.trim();
|
||||
if (!line) {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.startsWith("*** Add File:")) {
|
||||
const filePath = line.replace("*** Add File:", "").trim();
|
||||
i += 1;
|
||||
const contentLines: string[] = [];
|
||||
while (i < endIndex) {
|
||||
const raw = lines[i];
|
||||
if (raw === undefined || raw.startsWith("*** ")) break;
|
||||
if (raw.startsWith("+")) {
|
||||
contentLines.push(raw.slice(1));
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
ops.push({ kind: "add", path: filePath, contentLines });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.startsWith("*** Update File:")) {
|
||||
const fromPath = line.replace("*** Update File:", "").trim();
|
||||
i += 1;
|
||||
|
||||
let toPath: string | undefined;
|
||||
if (i < endIndex) {
|
||||
const moveLine = lines[i];
|
||||
if (moveLine?.startsWith("*** Move to:")) {
|
||||
toPath = moveLine.replace("*** Move to:", "").trim();
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const hunks: Hunk[] = [];
|
||||
while (i < endIndex) {
|
||||
const hLine = lines[i];
|
||||
if (hLine === undefined || hLine.startsWith("*** ")) break;
|
||||
if (hLine.startsWith("@@")) {
|
||||
// Start of a new hunk
|
||||
i += 1;
|
||||
const hunkLines: string[] = [];
|
||||
while (i < endIndex) {
|
||||
const l = lines[i];
|
||||
if (l === undefined || l.startsWith("@@") || l.startsWith("*** ")) {
|
||||
break;
|
||||
}
|
||||
if (
|
||||
l.startsWith(" ") ||
|
||||
l.startsWith("+") ||
|
||||
l.startsWith("-") ||
|
||||
l === ""
|
||||
) {
|
||||
hunkLines.push(l);
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
hunks.push({ lines: hunkLines });
|
||||
continue;
|
||||
}
|
||||
// Skip stray lines until next header/hunk
|
||||
i += 1;
|
||||
}
|
||||
|
||||
if (hunks.length === 0) {
|
||||
throw new Error(`Update for file ${fromPath} has no hunks`);
|
||||
}
|
||||
|
||||
ops.push({ kind: "update", fromPath, toPath, hunks });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.startsWith("*** Delete File:")) {
|
||||
const filePath = line.replace("*** Delete File:", "").trim();
|
||||
ops.push({ kind: "delete", path: filePath });
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Unknown directive; skip
|
||||
i += 1;
|
||||
}
|
||||
|
||||
const cwd = process.cwd();
|
||||
const pendingWrites = new Map<string, string>();
|
||||
|
||||
// Helper to get current content (including prior ops in this patch)
|
||||
const loadFile = async (relativePath: string): Promise<string> => {
|
||||
const abs = path.resolve(cwd, relativePath);
|
||||
const cached = pendingWrites.get(abs);
|
||||
if (cached !== undefined) return cached;
|
||||
|
||||
try {
|
||||
const buf = await fs.readFile(abs, "utf8");
|
||||
return buf;
|
||||
} catch (error) {
|
||||
const err = error as NodeJS.ErrnoException;
|
||||
if (err.code === "ENOENT") {
|
||||
throw new Error(`File not found for update: ${relativePath}`);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const saveFile = (relativePath: string, content: string) => {
|
||||
const abs = path.resolve(cwd, relativePath);
|
||||
pendingWrites.set(abs, content);
|
||||
};
|
||||
|
||||
// Apply all operations in memory first
|
||||
for (const op of ops) {
|
||||
if (op.kind === "add") {
|
||||
const abs = path.resolve(cwd, op.path);
|
||||
const content = op.contentLines.join("\n");
|
||||
pendingWrites.set(abs, content);
|
||||
} else if (op.kind === "update") {
|
||||
const currentPath = op.fromPath;
|
||||
let content = await loadFile(currentPath);
|
||||
|
||||
for (const hunk of op.hunks) {
|
||||
const { oldChunk, newChunk } = buildOldNewChunks(hunk.lines);
|
||||
if (!oldChunk) {
|
||||
continue;
|
||||
}
|
||||
const idx = content.indexOf(oldChunk);
|
||||
if (idx === -1) {
|
||||
throw new Error(
|
||||
`Failed to apply hunk to ${currentPath}: context not found`,
|
||||
);
|
||||
}
|
||||
content =
|
||||
content.slice(0, idx) +
|
||||
newChunk +
|
||||
content.slice(idx + oldChunk.length);
|
||||
}
|
||||
|
||||
const targetPath = op.toPath ?? op.fromPath;
|
||||
saveFile(targetPath, content);
|
||||
// If file was renamed, also clear the old path so we don't write both
|
||||
if (op.toPath && op.toPath !== op.fromPath) {
|
||||
const oldAbs = path.resolve(cwd, op.fromPath);
|
||||
if (pendingWrites.has(oldAbs)) {
|
||||
pendingWrites.delete(oldAbs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply deletes on disk
|
||||
for (const op of ops) {
|
||||
if (op.kind === "delete") {
|
||||
const abs = path.resolve(cwd, op.path);
|
||||
try {
|
||||
await fs.unlink(abs);
|
||||
} catch (error) {
|
||||
const err = error as NodeJS.ErrnoException;
|
||||
if (err.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush writes to disk
|
||||
for (const [absPath, content] of pendingWrites.entries()) {
|
||||
const dir = path.dirname(absPath);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await fs.writeFile(absPath, content, "utf8");
|
||||
}
|
||||
|
||||
return {
|
||||
message: "Patch applied successfully",
|
||||
};
|
||||
}
|
||||
|
||||
function buildOldNewChunks(lines: string[]): {
|
||||
oldChunk: string;
|
||||
newChunk: string;
|
||||
} {
|
||||
const oldParts: string[] = [];
|
||||
const newParts: string[] = [];
|
||||
|
||||
for (const raw of lines) {
|
||||
if (raw === "") {
|
||||
oldParts.push("\n");
|
||||
newParts.push("\n");
|
||||
continue;
|
||||
}
|
||||
const prefix = raw[0];
|
||||
const text = raw.slice(1);
|
||||
|
||||
if (prefix === " ") {
|
||||
oldParts.push(`${text}\n`);
|
||||
newParts.push(`${text}\n`);
|
||||
} else if (prefix === "-") {
|
||||
oldParts.push(`${text}\n`);
|
||||
} else if (prefix === "+") {
|
||||
newParts.push(`${text}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
oldChunk: oldParts.join(""),
|
||||
newChunk: newParts.join(""),
|
||||
};
|
||||
}
|
||||
32
src/tools/impl/GrepFiles.ts
Normal file
32
src/tools/impl/GrepFiles.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { type GrepArgs, grep } from "./Grep.js";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface GrepFilesArgs {
|
||||
pattern: string;
|
||||
include?: string;
|
||||
path?: string;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
type GrepFilesResult = Awaited<ReturnType<typeof grep>>;
|
||||
|
||||
/**
|
||||
* Codex-style grep_files tool.
|
||||
* Uses the existing Grep implementation and returns a list of files with matches.
|
||||
*/
|
||||
export async function grep_files(
|
||||
args: GrepFilesArgs,
|
||||
): Promise<GrepFilesResult> {
|
||||
validateRequiredParams(args, ["pattern"], "grep_files");
|
||||
|
||||
const { pattern, include, path } = args;
|
||||
|
||||
const grepArgs: GrepArgs = {
|
||||
pattern,
|
||||
path,
|
||||
glob: include,
|
||||
output_mode: "files_with_matches",
|
||||
};
|
||||
|
||||
return grep(grepArgs);
|
||||
}
|
||||
26
src/tools/impl/ListDirCodex.ts
Normal file
26
src/tools/impl/ListDirCodex.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { ls } from "./LS.js";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface ListDirCodexArgs {
|
||||
dir_path: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
depth?: number;
|
||||
}
|
||||
|
||||
type ListDirCodexResult = Awaited<ReturnType<typeof ls>>;
|
||||
|
||||
/**
|
||||
* Codex-style list_dir tool.
|
||||
* Delegates to the existing LS implementation; offset/limit/depth are accepted but currently ignored.
|
||||
*/
|
||||
export async function list_dir(
|
||||
args: ListDirCodexArgs,
|
||||
): Promise<ListDirCodexResult> {
|
||||
validateRequiredParams(args, ["dir_path"], "list_dir");
|
||||
|
||||
const { dir_path } = args;
|
||||
|
||||
// LS handles path resolution and formatting.
|
||||
return ls({ path: dir_path, ignore: [] });
|
||||
}
|
||||
42
src/tools/impl/ReadFileCodex.ts
Normal file
42
src/tools/impl/ReadFileCodex.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { read } from "./Read.js";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface IndentationOptions {
|
||||
anchor_line?: number;
|
||||
max_levels?: number;
|
||||
include_siblings?: boolean;
|
||||
include_header?: boolean;
|
||||
max_lines?: number;
|
||||
}
|
||||
|
||||
interface ReadFileCodexArgs {
|
||||
file_path: string;
|
||||
offset?: number;
|
||||
limit?: number;
|
||||
mode?: "slice" | "indentation" | string;
|
||||
indentation?: IndentationOptions;
|
||||
}
|
||||
|
||||
interface ReadFileCodexResult {
|
||||
content: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Codex-style read_file tool.
|
||||
* Currently supports slice-style reading; indentation mode is ignored but accepted.
|
||||
*/
|
||||
export async function read_file(
|
||||
args: ReadFileCodexArgs,
|
||||
): Promise<ReadFileCodexResult> {
|
||||
validateRequiredParams(args, ["file_path"], "read_file");
|
||||
|
||||
const { file_path, offset, limit } = args;
|
||||
|
||||
const result = await read({
|
||||
file_path,
|
||||
offset,
|
||||
limit,
|
||||
});
|
||||
|
||||
return { content: result.content };
|
||||
}
|
||||
72
src/tools/impl/Shell.ts
Normal file
72
src/tools/impl/Shell.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { bash } from "./Bash.js";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface ShellArgs {
|
||||
command: string[];
|
||||
workdir?: string;
|
||||
timeout_ms?: number;
|
||||
with_escalated_permissions?: boolean;
|
||||
justification?: string;
|
||||
}
|
||||
|
||||
interface ShellResult {
|
||||
output: string;
|
||||
stdout: string[];
|
||||
stderr: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Codex-style shell tool.
|
||||
* Runs an array of shell arguments, typically ["bash", "-lc", "..."].
|
||||
*/
|
||||
export async function shell(args: ShellArgs): Promise<ShellResult> {
|
||||
validateRequiredParams(args, ["command"], "shell");
|
||||
|
||||
const { command, workdir, timeout_ms, justification: description } = args;
|
||||
if (!Array.isArray(command) || command.length === 0) {
|
||||
throw new Error("command must be a non-empty array of strings");
|
||||
}
|
||||
|
||||
const commandString = command.join(" ");
|
||||
|
||||
const previousUserCwd = process.env.USER_CWD;
|
||||
if (workdir) {
|
||||
process.env.USER_CWD = workdir;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await bash({
|
||||
command: commandString,
|
||||
timeout: timeout_ms ?? 120000,
|
||||
description,
|
||||
run_in_background: false,
|
||||
});
|
||||
|
||||
const text = (result.content ?? [])
|
||||
.map((item) =>
|
||||
"text" in item && typeof item.text === "string" ? item.text : "",
|
||||
)
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
|
||||
const stdout = text ? text.split("\n") : [];
|
||||
const stderr =
|
||||
result.status === "error"
|
||||
? ["Command reported an error. See output for details."]
|
||||
: [];
|
||||
|
||||
return {
|
||||
output: text,
|
||||
stdout,
|
||||
stderr,
|
||||
};
|
||||
} finally {
|
||||
if (workdir) {
|
||||
if (previousUserCwd === undefined) {
|
||||
delete process.env.USER_CWD;
|
||||
} else {
|
||||
process.env.USER_CWD = previousUserCwd;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
70
src/tools/impl/ShellCommand.ts
Normal file
70
src/tools/impl/ShellCommand.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { bash } from "./Bash.js";
|
||||
import { validateRequiredParams } from "./validation.js";
|
||||
|
||||
interface ShellCommandArgs {
|
||||
command: string;
|
||||
workdir?: string;
|
||||
timeout_ms?: number;
|
||||
with_escalated_permissions?: boolean;
|
||||
justification?: string;
|
||||
}
|
||||
|
||||
interface ShellCommandResult {
|
||||
output: string;
|
||||
stdout: string[];
|
||||
stderr: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Codex-style shell_command tool.
|
||||
* Runs a shell script string in the user's default shell.
|
||||
*/
|
||||
export async function shell_command(
|
||||
args: ShellCommandArgs,
|
||||
): Promise<ShellCommandResult> {
|
||||
validateRequiredParams(args, ["command"], "shell_command");
|
||||
|
||||
const { command, workdir, timeout_ms, justification: description } = args;
|
||||
|
||||
// Reuse Bash implementation for execution, but honor the requested workdir
|
||||
const previousUserCwd = process.env.USER_CWD;
|
||||
if (workdir) {
|
||||
process.env.USER_CWD = workdir;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await bash({
|
||||
command,
|
||||
timeout: timeout_ms ?? 120000,
|
||||
description,
|
||||
run_in_background: false,
|
||||
});
|
||||
|
||||
const text = (result.content ?? [])
|
||||
.map((item) =>
|
||||
"text" in item && typeof item.text === "string" ? item.text : "",
|
||||
)
|
||||
.filter(Boolean)
|
||||
.join("\n");
|
||||
|
||||
const stdout = text ? text.split("\n") : [];
|
||||
const stderr =
|
||||
result.status === "error"
|
||||
? ["Command reported an error. See output for details."]
|
||||
: [];
|
||||
|
||||
return {
|
||||
output: text,
|
||||
stdout,
|
||||
stderr,
|
||||
};
|
||||
} finally {
|
||||
if (workdir) {
|
||||
if (previousUserCwd === undefined) {
|
||||
delete process.env.USER_CWD;
|
||||
} else {
|
||||
process.env.USER_CWD = previousUserCwd;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user