Skip to content

Instantly share code, notes, and snippets.

@kenlane33
Last active February 28, 2026 20:48
Show Gist options
  • Select an option

  • Save kenlane33/39edd452fbd59787e0868fd37d1aaaae to your computer and use it in GitHub Desktop.

Select an option

Save kenlane33/39edd452fbd59787e0868fd37d1aaaae to your computer and use it in GitHub Desktop.
import sharp from 'sharp';
interface GluedImageOptions {
maxOutputHeight?: number;
gap?: number;
backgroundColor?: { r: number; g: number; b: number; alpha: number };
}
/**
* Glues an array of images side-by-side.
* The tallest image (capped by maxOutputHeight) sets the canvas height.
* Shorter images are letterboxed (vertically centered) to match that height.
*/
export async function glueImagesSideBySide(
imageBuffers: Buffer[],
options: GluedImageOptions = {}
): Promise<Buffer> {
const {
maxOutputHeight = 640,
gap = 10,
backgroundColor = { r: 0, g: 0, b: 0, alpha: 0 } // Transparent by default
} = options;
if (imageBuffers.length === 0) {
throw new Error("No images provided to glue.");
}
// 1. Get metadata for all images to calculate dimensions
const metadatas = await Promise.all(
imageBuffers.map(buf => sharp(buf).metadata())
);
// 2. Determine the target height: Tallest in the set, but clamped to your limit
const tallestInSet = Math.max(...metadatas.map(m => m.height || 0));
const finalHeight = Math.min(tallestInSet, maxOutputHeight);
// 3. Resize images to match the finalHeight while preserving aspect ratio
// 'contain' ensures the image is centered vertically if it doesn't fill the height
const processedImages = await Promise.all(
imageBuffers.map(async (buf, i) => {
const meta = metadatas[i];
const originalWidth = meta.width || 0;
const originalHeight = meta.height || 0;
// Calculate the width the image will have when scaled to finalHeight
const scaledWidth = Math.round(originalWidth * (finalHeight / originalHeight));
const resizedBuffer = await sharp(buf)
.resize({
height: finalHeight,
width: scaledWidth,
fit: 'contain',
background: backgroundColor,
})
.toBuffer();
return {
input: resizedBuffer,
width: scaledWidth,
};
})
);
// 4. Calculate total width of the final "strip"
const totalWidth =
processedImages.reduce((acc, img) => acc + img.width, 0) +
(gap * (processedImages.length - 1));
// 5. Build the composition array with X-offsets
let currentLeft = 0;
const composition = processedImages.map((img) => {
const layer = {
input: img.input,
left: currentLeft,
top: 0,
};
currentLeft += img.width + gap;
return layer;
});
// 6. Create the canvas and composite the images onto it
return await sharp({
create: {
width: totalWidth,
height: finalHeight,
channels: 4,
background: backgroundColor,
},
})
.composite(composition)
.png() // You can change this to .webp() or .jpeg() for smaller Vercel responses
.toBuffer();
}
// app/api/stitch/[id]/route.ts
import { glueImagesSideBySide } from 'lib/glueImagesSideBySide.ts';
import { NextRequest, NextResponse } from 'next/server';
export async function GET(req: NextRequest) {
const { searchParams } = new URL(req.url);
const maxHeight = parseInt(searchParams.get('h') || '640');
// Logic to fetch your images based on ID...
const buffers: Buffer[] = await fetchYourImages();
const result = await glueImagesSideBySide(buffers, { maxOutputHeight: maxHeight });
return new NextResponse(result, {
headers: {
'Content-Type': 'image/png',
'Cache-Control': 'public, s-maxage=86400',
},
});
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment