refactor: tighten codex toolset parity without freeform (#1014)

This commit is contained in:
Charles Packer
2026-02-18 12:55:09 -08:00
committed by GitHub
parent a1637b313a
commit 4e1827ebc1
17 changed files with 336 additions and 136 deletions

View File

@@ -41,6 +41,7 @@ const PARALLEL_SAFE_TOOLS = new Set([
// === Anthropic toolset (default) ===
"Read",
"view_image",
"ViewImage",
"Grep",
"Glob",

View File

@@ -96,7 +96,13 @@ const DynamicPreview: React.FC<DynamicPreviewProps> = ({
const cmd =
typeof cmdVal === "string" ? cmdVal : toolArgs || "(no arguments)";
const descVal = parsedArgs?.description;
const desc = typeof descVal === "string" ? descVal : "";
const justificationVal = parsedArgs?.justification;
const desc =
typeof descVal === "string"
? descVal
: typeof justificationVal === "string"
? justificationVal
: "";
return (
<Box flexDirection="column" paddingLeft={2}>

View File

@@ -109,7 +109,11 @@ function getBashInfo(approval: ApprovalRequest): BashInfo | null {
command =
typeof args.command === "string" ? args.command : "(no command)";
description =
typeof args.description === "string" ? args.description : "";
typeof args.description === "string"
? args.description
: typeof args.justification === "string"
? args.justification
: "";
}
return {

View File

@@ -48,13 +48,15 @@ const toolsets: ToolsetOption[] = [
label: "Codex Tools",
description: "Toolset optimized for GPT/Codex models",
tools: [
"AskUserQuestion",
"EnterPlanMode",
"ExitPlanMode",
"Task",
"Skill",
"ShellCommand",
"Shell",
"ReadFile",
"ListDir",
"GrepFiles",
"ApplyPatch",
"UpdatePlan",
"ViewImage",
],
isFeatured: true,
},
@@ -62,15 +64,7 @@ const toolsets: ToolsetOption[] = [
id: "codex_snake",
label: "Codex Tools (snake_case)",
description: "Toolset optimized for GPT/Codex models (snake_case)",
tools: [
"shell_command",
"shell",
"read_file",
"list_dir",
"grep_files",
"apply_patch",
"update_plan",
],
tools: ["shell_command", "apply_patch", "update_plan", "view_image"],
},
{
id: "gemini",

View File

@@ -0,0 +1,76 @@
import { afterEach, describe, expect, test } from "bun:test";
import { existsSync, readFileSync } from "node:fs";
import { join } from "node:path";
import { apply_patch } from "../../tools/impl/ApplyPatch";
import { TestDirectory } from "../helpers/testFs";
describe("apply_patch tool", () => {
let testDir: TestDirectory | undefined;
let originalUserCwd: string | undefined;
afterEach(() => {
if (originalUserCwd === undefined) delete process.env.USER_CWD;
else process.env.USER_CWD = originalUserCwd;
testDir?.cleanup();
testDir = undefined;
});
test("moves file and removes source path", async () => {
testDir = new TestDirectory();
originalUserCwd = process.env.USER_CWD;
process.env.USER_CWD = testDir.path;
testDir.createFile("old/name.txt", "old content\n");
await apply_patch({
input: `*** Begin Patch
*** Update File: old/name.txt
*** Move to: renamed/name.txt
@@
-old content
+new content
*** End Patch`,
});
const oldPath = join(testDir.path, "old/name.txt");
const newPath = join(testDir.path, "renamed/name.txt");
expect(existsSync(oldPath)).toBe(false);
expect(existsSync(newPath)).toBe(true);
expect(readFileSync(newPath, "utf-8")).toBe("new content\n");
});
test("rejects absolute paths", async () => {
testDir = new TestDirectory();
originalUserCwd = process.env.USER_CWD;
process.env.USER_CWD = testDir.path;
const absolutePath = join(testDir.path, "abs.txt");
await expect(
apply_patch({
input: `*** Begin Patch
*** Add File: ${absolutePath}
+hello
*** End Patch`,
}),
).rejects.toThrow(/must be relative/);
});
test("fails when adding an existing file", async () => {
testDir = new TestDirectory();
originalUserCwd = process.env.USER_CWD;
process.env.USER_CWD = testDir.path;
testDir.createFile("exists.txt", "original");
await expect(
apply_patch({
input: `*** Begin Patch
*** Add File: exists.txt
+new
*** End Patch`,
}),
).rejects.toThrow(/already exists/);
});
});

View File

@@ -63,6 +63,15 @@ describe("Shell Launchers", () => {
expect(bashLauncher).toBeDefined();
});
test("uses login shell flag when login=true", () => {
const launchers = buildShellLaunchers("echo test", { login: true });
const loginLauncher = launchers.find(
(l) =>
(l[0]?.includes("bash") || l[0]?.includes("zsh")) && l[1] === "-lc",
);
expect(loginLauncher).toBeDefined();
});
test("prefers user SHELL environment", () => {
const originalShell = process.env.SHELL;
process.env.SHELL = "/bin/zsh";

View File

@@ -14,11 +14,11 @@ Each operation starts with one of three headers:
*** Update File: <path> - patch an existing file in place (optionally with a rename).
May be immediately followed by *** Move to: <new path> if you want to rename the file.
Then one or more "hunks", each introduced by @@ (optionally followed by a hunk header).
Then one or more hunks, each introduced by @@ (optionally followed by a hunk header).
Within a hunk each line starts with:
For instructions on [context_before] and [context_after]:
- By default, show 3 lines of code immediately above and 3 lines immediately below each change. If a change is within 3 lines of a previous change, do NOT duplicate the first change's [context_after] lines in the second change's [context_before] lines.
- By default, show 3 lines of code immediately above and 3 lines immediately below each change. If a change is within 3 lines of a previous change, do NOT duplicate the first changes [context_after] lines in the second changes [context_before] lines.
- If 3 lines of context is insufficient to uniquely identify the snippet of code within the file, use the @@ operator to indicate the class or function to which the snippet belongs. For instance, we might have:
@@ class BaseClass
[3 lines of pre-context]
@@ -65,16 +65,3 @@ It is important to remember:
- You must include a header with your intended action (Add/Delete/Update)
- You must prefix new lines with `+` even when creating a new file
- File references can only be relative, NEVER ABSOLUTE.

View File

@@ -1,8 +1 @@
# ViewImage
Attach a local image file to the conversation context for this turn.
Usage:
- The `path` parameter must be an absolute path to a local image file
- Supported formats: PNG, JPG, JPEG, GIF, WEBP, BMP
- Large images are automatically resized to fit API limits
View a local image from the filesystem (only use if given a full filepath by the user, and the image isn't already attached to the thread context within <image ...> tags).

View File

@@ -32,7 +32,7 @@ interface Hunk {
}
/**
* Simple ApplyPatch implementation compatible with the Letta/Codex apply_patch tool format.
* ApplyPatch implementation compatible with the Letta/Codex apply_patch JSON tool format.
*
* Supports:
* - *** Add File: path
@@ -48,13 +48,21 @@ export async function apply_patch(
const { input } = args;
const lines = input.split(/\r?\n/);
if (lines[0]?.trim() !== "*** Begin Patch") {
const beginIndex = lines.findIndex(
(line) => line.trim() === "*** Begin Patch",
);
if (beginIndex !== 0) {
throw new Error('Patch must start with "*** Begin Patch"');
}
const endIndex = lines.lastIndexOf("*** End Patch");
const endIndex = lines.findIndex((line) => line.trim() === "*** End Patch");
if (endIndex === -1) {
throw new Error('Patch must end with "*** End Patch"');
}
for (let tail = endIndex + 1; tail < lines.length; tail += 1) {
if ((lines[tail] ?? "").trim().length > 0) {
throw new Error("Unexpected content after *** End Patch");
}
}
const ops: FileOperation[] = [];
let i = 1;
@@ -68,22 +76,34 @@ export async function apply_patch(
if (line.startsWith("*** Add File:")) {
const filePath = line.replace("*** Add File:", "").trim();
assertRelativePatchPath(filePath, "Add File");
i += 1;
const contentLines: string[] = [];
while (i < endIndex) {
const raw = lines[i];
if (raw === undefined || raw.startsWith("*** ")) break;
if (raw.startsWith("+")) {
contentLines.push(raw.slice(1));
if (raw === undefined || raw.startsWith("*** ")) {
break;
}
if (!raw.startsWith("+")) {
throw new Error(
`Invalid Add File line at ${i + 1}: expected '+' prefix`,
);
}
contentLines.push(raw.slice(1));
i += 1;
}
if (contentLines.length === 0) {
throw new Error(
`Add File for ${filePath} must include at least one + line`,
);
}
ops.push({ kind: "add", path: filePath, contentLines });
continue;
}
if (line.startsWith("*** Update File:")) {
const fromPath = line.replace("*** Update File:", "").trim();
assertRelativePatchPath(fromPath, "Update File");
i += 1;
let toPath: string | undefined;
@@ -91,6 +111,7 @@ export async function apply_patch(
const moveLine = lines[i];
if (moveLine?.startsWith("*** Move to:")) {
toPath = moveLine.replace("*** Move to:", "").trim();
assertRelativePatchPath(toPath, "Move to");
i += 1;
}
}
@@ -98,7 +119,9 @@ export async function apply_patch(
const hunks: Hunk[] = [];
while (i < endIndex) {
const hLine = lines[i];
if (hLine === undefined || hLine.startsWith("*** ")) break;
if (hLine === undefined || hLine.startsWith("*** ")) {
break;
}
if (hLine.startsWith("@@")) {
// Start of a new hunk
i += 1;
@@ -108,6 +131,10 @@ export async function apply_patch(
if (l === undefined || l.startsWith("@@") || l.startsWith("*** ")) {
break;
}
if (l === "*** End of File") {
i += 1;
break;
}
if (
l.startsWith(" ") ||
l.startsWith("+") ||
@@ -115,14 +142,19 @@ export async function apply_patch(
l === ""
) {
hunkLines.push(l);
} else {
throw new Error(
`Invalid hunk line at ${i + 1}: expected one of ' ', '+', '-'`,
);
}
i += 1;
}
hunks.push({ lines: hunkLines });
continue;
}
// Skip stray lines until next header/hunk
i += 1;
throw new Error(
`Invalid Update File body at ${i + 1}: expected '@@' hunk header`,
);
}
if (hunks.length === 0) {
@@ -135,21 +167,25 @@ export async function apply_patch(
if (line.startsWith("*** Delete File:")) {
const filePath = line.replace("*** Delete File:", "").trim();
assertRelativePatchPath(filePath, "Delete File");
ops.push({ kind: "delete", path: filePath });
i += 1;
continue;
}
// Unknown directive; skip
i += 1;
throw new Error(`Unknown patch directive at line ${i + 1}: ${line}`);
}
const cwd = process.cwd();
const cwd = process.env.USER_CWD || process.cwd();
const pendingWrites = new Map<string, string>();
const pendingDeletes = new Set<string>();
// Helper to get current content (including prior ops in this patch)
const loadFile = async (relativePath: string): Promise<string> => {
const abs = path.resolve(cwd, relativePath);
if (pendingDeletes.has(abs)) {
throw new Error(`File not found for update: ${relativePath}`);
}
const cached = pendingWrites.get(abs);
if (cached !== undefined) return cached;
@@ -175,6 +211,12 @@ export async function apply_patch(
for (const op of ops) {
if (op.kind === "add") {
const abs = path.resolve(cwd, op.path);
if (pendingWrites.has(abs)) {
throw new Error(`File already added/updated in patch: ${op.path}`);
}
if (!(await isMissing(abs))) {
throw new Error(`Cannot Add File that already exists: ${op.path}`);
}
const content = op.contentLines.join("\n");
pendingWrites.set(abs, content);
} else if (op.kind === "update") {
@@ -182,46 +224,20 @@ export async function apply_patch(
let content = await loadFile(currentPath);
for (const hunk of op.hunks) {
const { oldChunk, newChunk } = buildOldNewChunks(hunk.lines);
if (!oldChunk) {
continue;
}
const idx = content.indexOf(oldChunk);
if (idx === -1) {
throw new Error(
`Failed to apply hunk to ${currentPath}: context not found`,
);
}
content =
content.slice(0, idx) +
newChunk +
content.slice(idx + oldChunk.length);
content = applyHunk(content, hunk.lines, currentPath);
}
const targetPath = op.toPath ?? op.fromPath;
saveFile(targetPath, content);
// If file was renamed, also clear the old path so we don't write both
if (op.toPath && op.toPath !== op.fromPath) {
const oldAbs = path.resolve(cwd, op.fromPath);
if (pendingWrites.has(oldAbs)) {
pendingWrites.delete(oldAbs);
}
pendingWrites.delete(oldAbs);
pendingDeletes.add(oldAbs);
}
}
}
// Apply deletes on disk
for (const op of ops) {
if (op.kind === "delete") {
} else if (op.kind === "delete") {
const abs = path.resolve(cwd, op.path);
try {
await fs.unlink(abs);
} catch (error) {
const err = error as NodeJS.ErrnoException;
if (err.code !== "ENOENT") {
throw err;
}
}
pendingWrites.delete(abs);
pendingDeletes.add(abs);
}
}
@@ -232,11 +248,82 @@ export async function apply_patch(
await fs.writeFile(absPath, content, "utf8");
}
// Flush deletes after writes (for move semantics)
for (const absPath of pendingDeletes) {
if (pendingWrites.has(absPath)) continue;
if (await isMissing(absPath)) continue;
await fs.unlink(absPath);
}
return {
message: "Patch applied successfully",
};
}
function assertRelativePatchPath(patchPath: string, operation: string): void {
if (!patchPath) {
throw new Error(`${operation} path cannot be empty`);
}
if (path.isAbsolute(patchPath)) {
throw new Error(
`${operation} path must be relative (absolute paths are not allowed): ${patchPath}`,
);
}
}
async function isMissing(filePath: string): Promise<boolean> {
try {
await fs.access(filePath);
return false;
} catch {
return true;
}
}
function applyHunk(
content: string,
hunkLines: string[],
filePath: string,
): string {
const { oldChunk, newChunk } = buildOldNewChunks(hunkLines);
if (oldChunk.length === 0) {
throw new Error(
`Failed to apply hunk to ${filePath}: hunk has no anchor/context`,
);
}
const index = content.indexOf(oldChunk);
if (index !== -1) {
return (
content.slice(0, index) +
newChunk +
content.slice(index + oldChunk.length)
);
}
// Handle files that omit trailing newline
if (oldChunk.endsWith("\n")) {
const oldWithoutTrailingNewline = oldChunk.slice(0, -1);
const indexWithoutTrailingNewline = content.indexOf(
oldWithoutTrailingNewline,
);
if (indexWithoutTrailingNewline !== -1) {
const replacement = newChunk.endsWith("\n")
? newChunk.slice(0, -1)
: newChunk;
return (
content.slice(0, indexWithoutTrailingNewline) +
replacement +
content.slice(
indexWithoutTrailingNewline + oldWithoutTrailingNewline.length,
)
);
}
}
throw new Error(`Failed to apply hunk to ${filePath}: context not found`);
}
function buildOldNewChunks(lines: string[]): {
oldChunk: string;
newChunk: string;

View File

@@ -6,9 +6,11 @@ import { validateRequiredParams } from "./validation.js";
interface ShellCommandArgs {
command: string;
workdir?: string;
login?: boolean;
timeout_ms?: number;
with_escalated_permissions?: boolean;
sandbox_permissions?: "use_default" | "require_escalated";
justification?: string;
prefix_rule?: string[];
signal?: AbortSignal;
onOutput?: (chunk: string, stream: "stdout" | "stderr") => void;
}
@@ -31,13 +33,13 @@ export async function shell_command(
const {
command,
workdir,
login = true,
timeout_ms,
with_escalated_permissions,
justification,
signal,
onOutput,
} = args;
const launchers = buildShellLaunchers(command);
const launchers = buildShellLaunchers(command, { login });
if (launchers.length === 0) {
throw new Error("Command must be a non-empty string");
}
@@ -51,7 +53,6 @@ export async function shell_command(
command: launcher,
workdir,
timeout_ms,
with_escalated_permissions,
justification,
signal,
onOutput,

View File

@@ -1,4 +1,7 @@
const SEP = "\u0000";
type ShellLaunchOptions = {
login?: boolean;
};
function pushUnique(
list: string[][],
@@ -53,7 +56,16 @@ function windowsLaunchers(command: string): string[][] {
return launchers;
}
function unixLaunchers(command: string): string[][] {
function shellCommandFlag(shellName: string, login: boolean): string {
if (!login) return "-c";
const normalized = shellName.replace(/\\/g, "/").toLowerCase();
if (normalized.includes("bash") || normalized.includes("zsh")) {
return "-lc";
}
return "-c";
}
function unixLaunchers(command: string, login: boolean): string[][] {
const trimmed = command.trim();
if (!trimmed) return [];
const launchers: string[][] = [];
@@ -62,42 +74,50 @@ function unixLaunchers(command: string): string[][] {
// On macOS, ALWAYS prefer zsh first due to bash 3.2's HEREDOC parsing bug
// with odd numbers of apostrophes. This takes precedence over $SHELL.
if (process.platform === "darwin") {
pushUnique(launchers, seen, ["/bin/zsh", "-c", trimmed]);
pushUnique(launchers, seen, [
"/bin/zsh",
shellCommandFlag("/bin/zsh", login),
trimmed,
]);
}
// Try user's preferred shell from $SHELL environment variable
// Use -c (non-login) to avoid profile sourcing that can hang on CI
// Use login semantics only when explicitly requested.
const envShell = process.env.SHELL?.trim();
if (envShell) {
pushUnique(launchers, seen, [envShell, "-c", trimmed]);
pushUnique(launchers, seen, [
envShell,
shellCommandFlag(envShell, login),
trimmed,
]);
}
// Fallback defaults - prefer simple "bash" PATH lookup first (like original code)
// then absolute paths. Use -c (non-login shell) to avoid profile sourcing.
// Fallback defaults - prefer simple "bash" PATH lookup first (like original code),
// then absolute paths.
const defaults: string[][] =
process.platform === "darwin"
? [
["/bin/zsh", "-c", trimmed],
["bash", "-c", trimmed], // PATH lookup, like original
["/bin/bash", "-c", trimmed],
["/usr/bin/bash", "-c", trimmed],
["/bin/sh", "-c", trimmed],
["/bin/ash", "-c", trimmed],
["/usr/bin/env", "zsh", "-c", trimmed],
["/usr/bin/env", "bash", "-c", trimmed],
["/usr/bin/env", "sh", "-c", trimmed],
["/usr/bin/env", "ash", "-c", trimmed],
["/bin/zsh", shellCommandFlag("/bin/zsh", login), trimmed],
["bash", shellCommandFlag("bash", login), trimmed], // PATH lookup, like original
["/bin/bash", shellCommandFlag("/bin/bash", login), trimmed],
["/usr/bin/bash", shellCommandFlag("/usr/bin/bash", login), trimmed],
["/bin/sh", shellCommandFlag("/bin/sh", login), trimmed],
["/bin/ash", shellCommandFlag("/bin/ash", login), trimmed],
["/usr/bin/env", "zsh", shellCommandFlag("zsh", login), trimmed],
["/usr/bin/env", "bash", shellCommandFlag("bash", login), trimmed],
["/usr/bin/env", "sh", shellCommandFlag("sh", login), trimmed],
["/usr/bin/env", "ash", shellCommandFlag("ash", login), trimmed],
]
: [
["/bin/bash", "-c", trimmed],
["/usr/bin/bash", "-c", trimmed],
["/bin/zsh", "-c", trimmed],
["/bin/sh", "-c", trimmed],
["/bin/ash", "-c", trimmed],
["/usr/bin/env", "bash", "-c", trimmed],
["/usr/bin/env", "zsh", "-c", trimmed],
["/usr/bin/env", "sh", "-c", trimmed],
["/usr/bin/env", "ash", "-c", trimmed],
["/bin/bash", shellCommandFlag("/bin/bash", login), trimmed],
["/usr/bin/bash", shellCommandFlag("/usr/bin/bash", login), trimmed],
["/bin/zsh", shellCommandFlag("/bin/zsh", login), trimmed],
["/bin/sh", shellCommandFlag("/bin/sh", login), trimmed],
["/bin/ash", shellCommandFlag("/bin/ash", login), trimmed],
["/usr/bin/env", "bash", shellCommandFlag("bash", login), trimmed],
["/usr/bin/env", "zsh", shellCommandFlag("zsh", login), trimmed],
["/usr/bin/env", "sh", shellCommandFlag("sh", login), trimmed],
["/usr/bin/env", "ash", shellCommandFlag("ash", login), trimmed],
];
for (const entry of defaults) {
pushUnique(launchers, seen, entry);
@@ -105,8 +125,12 @@ function unixLaunchers(command: string): string[][] {
return launchers;
}
export function buildShellLaunchers(command: string): string[][] {
export function buildShellLaunchers(
command: string,
options?: ShellLaunchOptions,
): string[][] {
const login = options?.login ?? false;
return process.platform === "win32"
? windowsLaunchers(command)
: unixLaunchers(command);
: unixLaunchers(command, login);
}

View File

@@ -82,15 +82,11 @@ export const ANTHROPIC_DEFAULT_TOOLS: ToolName[] = [
export const OPENAI_DEFAULT_TOOLS: ToolName[] = [
"shell_command",
"shell",
"read_file",
"list_dir",
"grep_files",
// TODO(codex-parity): add once request_user_input tool exists in raw codex path.
// "request_user_input",
"apply_patch",
"update_plan",
"view_image",
"Skill",
"Task",
];
export const GEMINI_DEFAULT_TOOLS: ToolName[] = [
@@ -117,11 +113,7 @@ export const OPENAI_PASCAL_TOOLS: ToolName[] = [
"Skill",
// Standard Codex tools
"ShellCommand",
"Shell",
"ReadFile",
"view_image",
"ListDir",
"GrepFiles",
"ViewImage",
"ApplyPatch",
"UpdatePlan",
];
@@ -162,6 +154,7 @@ const TOOL_PERMISSIONS: Record<ToolName, { requiresApproval: boolean }> = {
MultiEdit: { requiresApproval: true },
Read: { requiresApproval: false },
view_image: { requiresApproval: false },
ViewImage: { requiresApproval: false },
ReadLSP: { requiresApproval: false },
Skill: { requiresApproval: false },
Task: { requiresApproval: true },

View File

@@ -17,13 +17,20 @@
"type": "number",
"description": "The timeout for the command in milliseconds"
},
"with_escalated_permissions": {
"type": "boolean",
"description": "Whether to request escalated permissions. Set to true if command needs to be run without sandbox restrictions"
"sandbox_permissions": {
"type": "string",
"description": "Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\"."
},
"justification": {
"type": "string",
"description": "Only set if with_escalated_permissions is true. 1-sentence explanation of why we want to run this command."
"description": "Only set if sandbox_permissions is \"require_escalated\". Request approval from the user to run this command outside the sandbox. Phrased as a simple question that summarizes the purpose of the command as it relates to the task at hand - e.g. 'Do you want to fetch and pull the latest version of this git branch?'"
},
"prefix_rule": {
"type": "array",
"items": {
"type": "string"
},
"description": "Only specify when sandbox_permissions is `require_escalated`. Suggest a prefix command pattern that will allow you to fulfill similar requests from the user in the future. Should be a short but reasonable prefix, e.g. [\"git\", \"pull\"] or [\"uv\", \"run\"] or [\"pytest\"]."
}
},
"required": ["command"],

View File

@@ -10,17 +10,28 @@
"type": "string",
"description": "The working directory to execute the command in"
},
"login": {
"type": "boolean",
"description": "Whether to run the shell with login shell semantics. Defaults to true."
},
"timeout_ms": {
"type": "number",
"description": "The timeout for the command in milliseconds"
},
"with_escalated_permissions": {
"type": "boolean",
"description": "Whether to request escalated permissions. Set to true if command needs to be run without sandbox restrictions"
"sandbox_permissions": {
"type": "string",
"description": "Sandbox permissions for the command. Set to \"require_escalated\" to request running without sandbox restrictions; defaults to \"use_default\"."
},
"justification": {
"type": "string",
"description": "Only set if with_escalated_permissions is true. 1-sentence explanation of why we want to run this command."
"description": "Only set if sandbox_permissions is \"require_escalated\". Request approval from the user to run this command outside the sandbox. Phrased as a simple question that summarizes the purpose of the command as it relates to the task at hand - e.g. 'Do you want to fetch and pull the latest version of this git branch?'"
},
"prefix_rule": {
"type": "array",
"items": {
"type": "string"
},
"description": "Only specify when sandbox_permissions is `require_escalated`. Suggest a prefix command pattern that will allow you to fulfill similar requests from the user in the future. Should be a short but reasonable prefix, e.g. [\"git\", \"pull\"] or [\"uv\", \"run\"] or [\"pytest\"]."
}
},
"required": ["command"],

View File

@@ -1,12 +1,12 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "The absolute path to the image file"
"description": "Local filesystem path to an image file"
}
},
"required": ["path"],
"additionalProperties": false,
"$schema": "http://json-schema.org/draft-07/schema#"
"additionalProperties": false
}

View File

@@ -194,6 +194,11 @@ const toolDefinitions = {
description: ViewImageDescription.trim(),
impl: view_image as unknown as ToolImplementation,
},
ViewImage: {
schema: ViewImageSchema,
description: ViewImageDescription.trim(),
impl: view_image as unknown as ToolImplementation,
},
// LSP-enhanced Read - used when LETTA_ENABLE_LSP is set
ReadLSP: {
schema: ReadLSPSchema,

View File

@@ -8,11 +8,13 @@ import {
isOpenAIModel,
loadSpecificTools,
loadTools,
OPENAI_DEFAULT_TOOLS,
OPENAI_PASCAL_TOOLS,
} from "./manager";
// Toolset definitions from manager.ts (single source of truth)
const CODEX_TOOLS = OPENAI_PASCAL_TOOLS;
const CODEX_SNAKE_TOOLS = OPENAI_DEFAULT_TOOLS;
const GEMINI_TOOLS = GEMINI_PASCAL_TOOLS;
// Server-side memory tool names that can mutate memory blocks.
@@ -228,7 +230,7 @@ export async function forceToolsetSwitch(
await loadSpecificTools([...CODEX_TOOLS]);
modelForLoading = "openai/gpt-4";
} else if (toolsetName === "codex_snake") {
await loadTools("openai/gpt-4");
await loadSpecificTools([...CODEX_SNAKE_TOOLS]);
modelForLoading = "openai/gpt-4";
} else if (toolsetName === "gemini") {
await loadSpecificTools([...GEMINI_TOOLS]);