feat: USE_MAGICK build env var to use magick variant of imageResize (#742)

This commit is contained in:
Kainoa Kanter
2026-01-29 14:46:31 -08:00
committed by GitHub
parent fe1070fb21
commit 66eeac5b59
6 changed files with 479 additions and 169 deletions

View File

@@ -15,8 +15,16 @@ const __dirname = dirname(__filename);
// Read version from package.json
const pkg = JSON.parse(readFileSync(join(__dirname, "package.json"), "utf-8"));
const version = pkg.version;
const useMagick = Bun.env.USE_MAGICK;
const features = []
const externalDeps = []
console.log(`📦 Building Letta Code v${version}...`);
if (useMagick) {
console.log(`🪄 Using magick variant of imageResize...`);
features.push("USE_MAGICK")
externalDeps.push("sharp")
}
await Bun.build({
entrypoints: ["./src/index.ts"],
@@ -38,6 +46,8 @@ await Bun.build({
".mdx": "text",
".txt": "text",
},
features: features,
external: externalDeps,
});
// Add shebang to output file

View File

@@ -12,7 +12,7 @@
"sharp": "^0.34.5",
},
"devDependencies": {
"@types/bun": "latest",
"@types/bun": "^1.3.7",
"@types/diff": "^8.0.0",
"@types/picomatch": "^4.0.2",
"@types/react": "^19.2.9",
@@ -93,7 +93,7 @@
"@letta-ai/letta-client": ["@letta-ai/letta-client@1.7.6", "", {}, "sha512-C/f03uE3TJdgfHk/8rRBxzWvY0YHCYAlrePHcTd0CRHMo++0TA1OTcgiCF+EFVDVYGzfPSeMpqgAZTNvD9r9GQ=="],
"@types/bun": ["@types/bun@1.3.1", "", { "dependencies": { "bun-types": "1.3.1" } }, "sha512-4jNMk2/K9YJtfqwoAa28c8wK+T7nvJFOjxI4h/7sORWcypRNxBpr+TPNaCfVWq70tLCJsqoFwcf0oI0JU/fvMQ=="],
"@types/bun": ["@types/bun@1.3.7", "", { "dependencies": { "bun-types": "1.3.7" } }, "sha512-lmNuMda+Z9b7tmhA0tohwy8ZWFSnmQm1UDWXtH5r9F7wZCfkeO3Jx7wKQ1EOiKq43yHts7ky6r8SDJQWRNupkA=="],
"@types/diff": ["@types/diff@8.0.0", "", { "dependencies": { "diff": "*" } }, "sha512-o7jqJM04gfaYrdCecCVMbZhNdG6T1MHg/oQoRFdERLV+4d+V7FijhiEAbFu0Usww84Yijk9yH58U4Jk4HbtzZw=="],
@@ -119,7 +119,7 @@
"buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="],
"bun-types": ["bun-types@1.3.1", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-NMrcy7smratanWJ2mMXdpatalovtxVggkj11bScuWuiOoXTiKIu2eVS1/7qbyI/4yHedtsn175n4Sm4JcdHLXw=="],
"bun-types": ["bun-types@1.3.7", "", { "dependencies": { "@types/node": "*" } }, "sha512-qyschsA03Qz+gou+apt6HNl6HnI+sJJLL4wLDke4iugsE6584CMupOtTY1n+2YC9nGVrEKUlTs99jjRLKgWnjQ=="],
"bundle-name": ["bundle-name@4.1.0", "", { "dependencies": { "run-applescript": "^7.0.0" } }, "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q=="],

View File

@@ -40,7 +40,7 @@
"@vscode/ripgrep": "^1.17.0"
},
"devDependencies": {
"@types/bun": "latest",
"@types/bun": "^1.3.7",
"@types/diff": "^8.0.0",
"@types/picomatch": "^4.0.2",
"@types/react": "^19.2.9",

View File

@@ -0,0 +1,280 @@
// Image resizing utilities for clipboard paste
// Follows Codex CLI's approach (codex-rs/utils/image/src/lib.rs)
import { execSync } from "node:child_process";
import { readFileSync, unlinkSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
// Anthropic limits: 8000x8000 for single images, but 2000x2000 for many-image requests
// We use 2000 to stay safe when conversation history accumulates multiple images
export const MAX_IMAGE_WIDTH = 2000;
export const MAX_IMAGE_HEIGHT = 2000;
// Anthropic's API enforces a 5MB limit on image bytes (not base64 string)
// We enforce this in the client to avoid API errors
export const MAX_IMAGE_BYTES = 5 * 1024 * 1024; // 5MB = 5,242,880 bytes
export interface ResizeResult {
data: string; // base64 encoded
mediaType: string;
width: number;
height: number;
resized: boolean;
}
/**
* Get image dimensions using ImageMagick identify
*/
async function getImageDimensions(
buffer: Buffer,
): Promise<{ width: number; height: number; format: string }> {
const tempInput = join(
tmpdir(),
`image-${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`,
);
writeFileSync(tempInput, buffer);
try {
const output = execSync(
`magick identify -format "%w %h %m" "${tempInput}"`,
{
encoding: "utf-8",
},
);
const [width, height, format] = output.trim().split(" ");
if (!width || !height || !format) {
throw new Error("Failed to get image dimensions");
}
return {
width: parseInt(width, 10),
height: parseInt(height, 10),
format: format.toLowerCase(),
};
} finally {
unlinkSync(tempInput);
}
}
/**
* Compress an image to fit within MAX_IMAGE_BYTES using progressive JPEG quality reduction.
* If quality reduction alone isn't enough, also reduces dimensions.
* Returns null if compression is not needed (image already under limit).
*/
async function compressToFitByteLimit(
buffer: Buffer,
currentWidth: number,
currentHeight: number,
): Promise<ResizeResult | null> {
// Check if compression is needed
if (buffer.length <= MAX_IMAGE_BYTES) {
return null; // No compression needed
}
const tempInput = join(
tmpdir(),
`compress-input-${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`,
);
writeFileSync(tempInput, buffer);
try {
// Try progressive JPEG quality reduction
const qualities = [85, 70, 55, 40];
for (const quality of qualities) {
const tempOutput = join(
tmpdir(),
`compress-output-${Date.now()}-${Math.random().toString(36).slice(2)}.jpg`,
);
try {
execSync(`magick "${tempInput}" -quality ${quality} "${tempOutput}"`, {
stdio: "ignore",
});
const compressed = readFileSync(tempOutput);
if (compressed.length <= MAX_IMAGE_BYTES) {
const { width, height } = await getImageDimensions(compressed);
return {
data: compressed.toString("base64"),
mediaType: "image/jpeg",
width,
height,
resized: true,
};
}
} finally {
try {
unlinkSync(tempOutput);
} catch {}
}
}
// Quality reduction wasn't enough - also reduce dimensions
const scales = [0.75, 0.5, 0.25];
for (const scale of scales) {
const scaledWidth = Math.floor(currentWidth * scale);
const scaledHeight = Math.floor(currentHeight * scale);
const tempOutput = join(
tmpdir(),
`compress-output-${Date.now()}-${Math.random().toString(36).slice(2)}.jpg`,
);
try {
execSync(
`magick "${tempInput}" -resize ${scaledWidth}x${scaledHeight} -quality 70 "${tempOutput}"`,
{
stdio: "ignore",
},
);
const reduced = readFileSync(tempOutput);
if (reduced.length <= MAX_IMAGE_BYTES) {
const { width, height } = await getImageDimensions(reduced);
return {
data: reduced.toString("base64"),
mediaType: "image/jpeg",
width,
height,
resized: true,
};
}
} finally {
try {
unlinkSync(tempOutput);
} catch {}
}
}
// Extremely rare: even 25% scale at q70 doesn't fit
throw new Error(
`Image too large: ${(buffer.length / 1024 / 1024).toFixed(1)}MB exceeds 5MB limit even after compression`,
);
} finally {
unlinkSync(tempInput);
}
}
/**
* Resize image if it exceeds MAX_IMAGE_WIDTH or MAX_IMAGE_HEIGHT.
* Uses 'inside' fit to preserve aspect ratio (like Codex's resize behavior).
* Returns original if already within limits and format is supported.
*/
export async function resizeImageIfNeeded(
buffer: Buffer,
inputMediaType: string,
): Promise<ResizeResult> {
const { width, height, format } = await getImageDimensions(buffer);
const needsResize = width > MAX_IMAGE_WIDTH || height > MAX_IMAGE_HEIGHT;
// Determine if we can pass through the original format
const isPassthroughFormat =
format === "png" || format === "jpeg" || format === "jpg";
if (!needsResize && isPassthroughFormat) {
// No resize needed and format is supported - but check byte limit
const compressed = await compressToFitByteLimit(buffer, width, height);
if (compressed) {
return compressed;
}
return {
data: buffer.toString("base64"),
mediaType: inputMediaType,
width,
height,
resized: false,
};
}
const tempInput = join(
tmpdir(),
`resize-input-${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`,
);
writeFileSync(tempInput, buffer);
try {
if (needsResize) {
// Resize preserving aspect ratio
// ImageMagick's -resize with geometry like "2000x2000>" preserves aspect ratio
// and only shrinks (doesn't enlarge) - equivalent to 'inside' fit
const tempOutput = join(
tmpdir(),
`resize-output-${Date.now()}-${Math.random().toString(36).slice(2)}`,
);
let outputBuffer: Buffer;
let outputMediaType: string;
if (format === "jpeg" || format === "jpg") {
// Preserve JPEG format with good quality (Codex uses 85)
execSync(
`magick "${tempInput}" -resize ${MAX_IMAGE_WIDTH}x${MAX_IMAGE_HEIGHT}> -quality 85 "${tempOutput}.jpg"`,
{
stdio: "ignore",
},
);
outputBuffer = readFileSync(`${tempOutput}.jpg`);
outputMediaType = "image/jpeg";
unlinkSync(`${tempOutput}.jpg`);
} else {
// Default to PNG for everything else
execSync(
`magick "${tempInput}" -resize ${MAX_IMAGE_WIDTH}x${MAX_IMAGE_HEIGHT}> "${tempOutput}.png"`,
{
stdio: "ignore",
},
);
outputBuffer = readFileSync(`${tempOutput}.png`);
outputMediaType = "image/png";
unlinkSync(`${tempOutput}.png`);
}
const { width: resizedWidth, height: resizedHeight } =
await getImageDimensions(outputBuffer);
// Check byte limit after dimension resize
const compressed = await compressToFitByteLimit(
outputBuffer,
resizedWidth,
resizedHeight,
);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: outputMediaType,
width: resizedWidth,
height: resizedHeight,
resized: true,
};
}
// No resize needed but format needs conversion (e.g., HEIC, TIFF, etc.)
const tempOutput = join(
tmpdir(),
`convert-output-${Date.now()}-${Math.random().toString(36).slice(2)}.png`,
);
execSync(`magick "${tempInput}" "${tempOutput}"`, {
stdio: "ignore",
});
const outputBuffer = readFileSync(tempOutput);
unlinkSync(tempOutput);
// Check byte limit after format conversion
const compressed = await compressToFitByteLimit(
outputBuffer,
width,
height,
);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: "image/png",
width,
height,
resized: false,
};
} finally {
unlinkSync(tempInput);
}
}

View File

@@ -0,0 +1,179 @@
// Image resizing utilities for clipboard paste
// Follows Codex CLI's approach (codex-rs/utils/image/src/lib.rs)
import sharp from "sharp";
// Anthropic limits: 8000x8000 for single images, but 2000x2000 for many-image requests
// We use 2000 to stay safe when conversation history accumulates multiple images
export const MAX_IMAGE_WIDTH = 2000;
export const MAX_IMAGE_HEIGHT = 2000;
// Anthropic's API enforces a 5MB limit on image bytes (not base64 string)
// We enforce this in the client to avoid API errors
export const MAX_IMAGE_BYTES = 5 * 1024 * 1024; // 5MB = 5,242,880 bytes
export interface ResizeResult {
data: string; // base64 encoded
mediaType: string;
width: number;
height: number;
resized: boolean;
}
/**
* Compress an image to fit within MAX_IMAGE_BYTES using progressive JPEG quality reduction.
* If quality reduction alone isn't enough, also reduces dimensions.
* Returns null if compression is not needed (image already under limit).
*/
async function compressToFitByteLimit(
buffer: Buffer,
currentWidth: number,
currentHeight: number,
): Promise<ResizeResult | null> {
// Check if compression is needed
if (buffer.length <= MAX_IMAGE_BYTES) {
return null; // No compression needed
}
// Try progressive JPEG quality reduction
const qualities = [85, 70, 55, 40];
for (const quality of qualities) {
const compressed = await sharp(buffer).jpeg({ quality }).toBuffer();
if (compressed.length <= MAX_IMAGE_BYTES) {
const meta = await sharp(compressed).metadata();
return {
data: compressed.toString("base64"),
mediaType: "image/jpeg",
width: meta.width ?? currentWidth,
height: meta.height ?? currentHeight,
resized: true,
};
}
}
// Quality reduction wasn't enough - also reduce dimensions
const scales = [0.75, 0.5, 0.25];
for (const scale of scales) {
const scaledWidth = Math.floor(currentWidth * scale);
const scaledHeight = Math.floor(currentHeight * scale);
const reduced = await sharp(buffer)
.resize(scaledWidth, scaledHeight, {
fit: "inside",
withoutEnlargement: true,
})
.jpeg({ quality: 70 })
.toBuffer();
if (reduced.length <= MAX_IMAGE_BYTES) {
const meta = await sharp(reduced).metadata();
return {
data: reduced.toString("base64"),
mediaType: "image/jpeg",
width: meta.width ?? scaledWidth,
height: meta.height ?? scaledHeight,
resized: true,
};
}
}
// Extremely rare: even 25% scale at q70 doesn't fit
throw new Error(
`Image too large: ${(buffer.length / 1024 / 1024).toFixed(1)}MB exceeds 5MB limit even after compression`,
);
}
/**
* Resize image if it exceeds MAX_IMAGE_WIDTH or MAX_IMAGE_HEIGHT.
* Uses 'inside' fit to preserve aspect ratio (like Codex's resize behavior).
* Returns original if already within limits and format is supported.
*/
export async function resizeImageIfNeeded(
buffer: Buffer,
inputMediaType: string,
): Promise<ResizeResult> {
const image = sharp(buffer);
const metadata = await image.metadata();
const width = metadata.width ?? 0;
const height = metadata.height ?? 0;
const format = metadata.format;
const needsResize = width > MAX_IMAGE_WIDTH || height > MAX_IMAGE_HEIGHT;
// Determine if we can pass through the original format
const isPassthroughFormat = format === "png" || format === "jpeg";
if (!needsResize && isPassthroughFormat) {
// No resize needed and format is supported - but check byte limit
const compressed = await compressToFitByteLimit(buffer, width, height);
if (compressed) {
return compressed;
}
return {
data: buffer.toString("base64"),
mediaType: inputMediaType,
width,
height,
resized: false,
};
}
if (needsResize) {
// Resize preserving aspect ratio
// Use 'inside' fit which is equivalent to Codex's resize behavior
const resized = image.resize(MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT, {
fit: "inside",
withoutEnlargement: true,
});
// Output as PNG for lossless quality (or JPEG if input was JPEG)
let outputBuffer: Buffer;
let outputMediaType: string;
if (format === "jpeg") {
// Preserve JPEG format with good quality (Codex uses 85)
outputBuffer = await resized.jpeg({ quality: 85 }).toBuffer();
outputMediaType = "image/jpeg";
} else {
// Default to PNG for everything else
outputBuffer = await resized.png().toBuffer();
outputMediaType = "image/png";
}
const resizedMeta = await sharp(outputBuffer).metadata();
const resizedWidth = resizedMeta.width ?? 0;
const resizedHeight = resizedMeta.height ?? 0;
// Check byte limit after dimension resize
const compressed = await compressToFitByteLimit(
outputBuffer,
resizedWidth,
resizedHeight,
);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: outputMediaType,
width: resizedWidth,
height: resizedHeight,
resized: true,
};
}
// No resize needed but format needs conversion (e.g., HEIC, TIFF, etc.)
const outputBuffer = await image.png().toBuffer();
// Check byte limit after format conversion
const compressed = await compressToFitByteLimit(outputBuffer, width, height);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: "image/png",
width,
height,
resized: false,
};
}

View File

@@ -1,15 +1,10 @@
// Image resizing utilities for clipboard paste
// Follows Codex CLI's approach (codex-rs/utils/image/src/lib.rs)
import sharp from "sharp";
import { feature } from "bun:bundle";
// Anthropic limits: 8000x8000 for single images, but 2000x2000 for many-image requests
// We use 2000 to stay safe when conversation history accumulates multiple images
export const MAX_IMAGE_WIDTH = 2000;
export const MAX_IMAGE_HEIGHT = 2000;
// Anthropic's API enforces a 5MB limit on image bytes (not base64 string)
// We enforce this in the client to avoid API errors
export const MAX_IMAGE_BYTES = 5 * 1024 * 1024; // 5MB = 5,242,880 bytes
export const MAX_IMAGE_BYTES = 5 * 1024 * 1024;
export interface ResizeResult {
data: string; // base64 encoded
@@ -19,161 +14,7 @@ export interface ResizeResult {
resized: boolean;
}
/**
* Compress an image to fit within MAX_IMAGE_BYTES using progressive JPEG quality reduction.
* If quality reduction alone isn't enough, also reduces dimensions.
* Returns null if compression is not needed (image already under limit).
*/
async function compressToFitByteLimit(
buffer: Buffer,
currentWidth: number,
currentHeight: number,
): Promise<ResizeResult | null> {
// Check if compression is needed
if (buffer.length <= MAX_IMAGE_BYTES) {
return null; // No compression needed
}
// Try progressive JPEG quality reduction
const qualities = [85, 70, 55, 40];
for (const quality of qualities) {
const compressed = await sharp(buffer).jpeg({ quality }).toBuffer();
if (compressed.length <= MAX_IMAGE_BYTES) {
const meta = await sharp(compressed).metadata();
return {
data: compressed.toString("base64"),
mediaType: "image/jpeg",
width: meta.width ?? currentWidth,
height: meta.height ?? currentHeight,
resized: true,
};
}
}
// Quality reduction wasn't enough - also reduce dimensions
const scales = [0.75, 0.5, 0.25];
for (const scale of scales) {
const scaledWidth = Math.floor(currentWidth * scale);
const scaledHeight = Math.floor(currentHeight * scale);
const reduced = await sharp(buffer)
.resize(scaledWidth, scaledHeight, {
fit: "inside",
withoutEnlargement: true,
})
.jpeg({ quality: 70 })
.toBuffer();
if (reduced.length <= MAX_IMAGE_BYTES) {
const meta = await sharp(reduced).metadata();
return {
data: reduced.toString("base64"),
mediaType: "image/jpeg",
width: meta.width ?? scaledWidth,
height: meta.height ?? scaledHeight,
resized: true,
};
}
}
// Extremely rare: even 25% scale at q70 doesn't fit
throw new Error(
`Image too large: ${(buffer.length / 1024 / 1024).toFixed(1)}MB exceeds 5MB limit even after compression`,
);
}
/**
* Resize image if it exceeds MAX_IMAGE_WIDTH or MAX_IMAGE_HEIGHT.
* Uses 'inside' fit to preserve aspect ratio (like Codex's resize behavior).
* Returns original if already within limits and format is supported.
*/
export async function resizeImageIfNeeded(
buffer: Buffer,
inputMediaType: string,
): Promise<ResizeResult> {
const image = sharp(buffer);
const metadata = await image.metadata();
const width = metadata.width ?? 0;
const height = metadata.height ?? 0;
const format = metadata.format;
const needsResize = width > MAX_IMAGE_WIDTH || height > MAX_IMAGE_HEIGHT;
// Determine if we can pass through the original format
const isPassthroughFormat = format === "png" || format === "jpeg";
if (!needsResize && isPassthroughFormat) {
// No resize needed and format is supported - but check byte limit
const compressed = await compressToFitByteLimit(buffer, width, height);
if (compressed) {
return compressed;
}
return {
data: buffer.toString("base64"),
mediaType: inputMediaType,
width,
height,
resized: false,
};
}
if (needsResize) {
// Resize preserving aspect ratio
// Use 'inside' fit which is equivalent to Codex's resize behavior
const resized = image.resize(MAX_IMAGE_WIDTH, MAX_IMAGE_HEIGHT, {
fit: "inside",
withoutEnlargement: true,
});
// Output as PNG for lossless quality (or JPEG if input was JPEG)
let outputBuffer: Buffer;
let outputMediaType: string;
if (format === "jpeg") {
// Preserve JPEG format with good quality (Codex uses 85)
outputBuffer = await resized.jpeg({ quality: 85 }).toBuffer();
outputMediaType = "image/jpeg";
} else {
// Default to PNG for everything else
outputBuffer = await resized.png().toBuffer();
outputMediaType = "image/png";
}
const resizedMeta = await sharp(outputBuffer).metadata();
const resizedWidth = resizedMeta.width ?? 0;
const resizedHeight = resizedMeta.height ?? 0;
// Check byte limit after dimension resize
const compressed = await compressToFitByteLimit(
outputBuffer,
resizedWidth,
resizedHeight,
);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: outputMediaType,
width: resizedWidth,
height: resizedHeight,
resized: true,
};
}
// No resize needed but format needs conversion (e.g., HEIC, TIFF, etc.)
const outputBuffer = await image.png().toBuffer();
// Check byte limit after format conversion
const compressed = await compressToFitByteLimit(outputBuffer, width, height);
if (compressed) {
return compressed;
}
return {
data: outputBuffer.toString("base64"),
mediaType: "image/png",
width,
height,
resized: false,
};
}
// Import the correct implementation based on feature flag
export const resizeImageIfNeeded = feature("USE_MAGICK")
? (await import("./imageResize.magick.js")).resizeImageIfNeeded
: (await import("./imageResize.sharp.js")).resizeImageIfNeeded;