Skip to content

Instantly share code, notes, and snippets.

@nickyonge
Last active March 13, 2026 00:35
Show Gist options
  • Select an option

  • Save nickyonge/1988b42e7a30b5019c47125a0009bd81 to your computer and use it in GitHub Desktop.

Select an option

Save nickyonge/1988b42e7a30b5019c47125a0009bd81 to your computer and use it in GitHub Desktop.
JS script that fuzzy recolors images based on target color. WIP
// @ts-check
// gist: https://gist.github.com/nickyonge/1988b42e7a30b5019c47125a0009bd81
// current major issue is that color replcaement doesn't work, see RecolorImage func
import {
clampGamut,
converter,
differenceCiede2000
} from 'culori';
// npm i culori
// for info re: color spaces, see https://culorijs.org/color-spaces/
/** @typedef {import('culori').Color} Color */
/** @typedef {import('culori').Rgb} Rgb */
/** @typedef {import('culori').Lab} Lab */
/** @typedef {{r:number, g:number, b:number}} rgbOject */
/** @typedef {'png'|'jpeg'|'webp'} ImageType */
/** Helper math functions, cuz I want to keep this script fully self-contained */
class ColorMath {
/** Convert a {@linkcode Color} to culori type `Rgb` */
static toRgb = converter('rgb');
/** Convert a {@linkcode Color} to culori type `Lab` */
static toLab = converter('lab');
/** Clamp any color space to RGB, see {@linkcode clampGamut} */
static clampToRgb = clampGamut('rgb');
/** CIEDE2000 color difference algo, see {@linkcode differenceCiede2000} */
static deltaE00 = differenceCiede2000();
/**
*
* @param {number} value
* @returns {number}
*/
static Clamp01(value) {
return Math.max(0, Math.min(1, value));
}
/**
*
* @param {number} from
* @param {number} to
* @param {number} delta
* @returns
*/
static Lerp(from, to, delta) {
return from + ((to - from) * delta);
}
/**
* Converts a value between `0` and `1` to between `0` and `255`
* @param {number} value01
* @returns {number}
*/
static ToByte(value01) {
return Math.max(0, Math.min(255, Math.round(value01 * 255)));
}
/**
*
* @param {string|Color} color
* @returns {Rgb}
*/
static ParseToRGB(color) {
const rgb = ColorMath.toRgb(color);
if (!rgb) throw new Error(`Invalid color: ${color}`);
return rgb;
}
/**
*
* @param {string|Color} color
* @returns {Lab}
*/
static ParseToLAB(color) {
const lab = ColorMath.toLab(color);
if (!lab) throw new Error(`Invalid color: ${color}`);
return lab;
}
/**
* convert fuzziness to format used for {@linkcode deltaE00}
* @param {number} fuzziness
* @returns {number}
*/
static FuzzinessToDeltaEThreshold(fuzziness) {
return ColorMath.Clamp01(fuzziness) * 100;
}
/**
* Determines how much a pixel should be blended.
*
* Returned value of `1` means the pixel should be fully recolored, while
* `0` means it should be totally ignored.
* @param {number} distance Distance between two pixels, see {@linkcode deltaE00}
* @param {number} threshold Threshold for color replacement, see {@linkcode FuzzinessToDeltaEThreshold}
* @param {number} cutoff Sharpness of color cutoff, `0` is full blur, `1` is instant cutoff
* @returns {number}
*/
static MatchStrength(distance, threshold, cutoff) {
if (threshold === 0) {
return distance === 0 ? 1 : 0;
}
// get bounds from match (inner) to unmatched (outer), with cutoff blurring
const innerDistance = threshold * cutoff;
const outerDistance = threshold;
if (distance <= innerDistance) { return 1; } // fully color matched
if (distance >= outerDistance) { return 0; } // no color match at all
// failsafe, no div by zeros here
if (innerDistance === outerDistance) { return distance >= 0.5 ? 0 : 1; }
// normalize distance between 0 and 1
const normDist = (outerDistance - distance) / (outerDistance - innerDistance);
// smooth, and return
return useSmootherStep ?
Math.pow(normDist, 3) * ((normDist * ((6 * normDist) - 15)) + 10) : // smootherstep
Math.pow(normDist, 2) * (3 - (2 * normDist)); // smoothstep
}
/**
* Recolors an RGB object to the given recolorLAB, preserving lightness.
* The returned RGB object is blended with the orig image.
* @param {number} r
* @param {number} g
* @param {number} b
* @param {number} alpha
* @param {Lab} recolorLAB
* @returns {rgbOject}
*/
static RecolorPreservingLightness(r, g, b, alpha, recolorLAB) {
// convert RGB data to LAB
const lab = ColorMath.toLab({ mode: 'rgb', r, g, b, alpha });
if (!lab) return { r, g, b };
// recolor the pixel using culori magic I don't understand
const recolored = ColorMath.clampToRgb({
mode: 'lab',
l: lab.l,
a: recolorLAB.a,
b: recolorLAB.b,
alpha,
});
if (recolored == null) return { r, g, b };
// return new RGB object
return {
r: recolored.r ?? r,
g: recolored.g ?? g,
b: recolored.b ?? b,
};
}
}
/**
* If `true`, uses `smoothstep` interpolation. If `false`, uses
* `smootherstep` (nicer, more expensive)
* @see {@link https://en.wikipedia.org/wiki/Smoothstep Smoothstep} on Wikipedia
*/
const useSmootherStep = false;
/**
* Create an `HTMLCanvasElement` of the given dimensions
* @param {number} width
* @param {number} height
* @returns {HTMLCanvasElement}
*/
function CreateCanvas(width, height) {
const canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
return canvas;
}
/**
* Load an image from the given source element or path
* @param {string|HTMLImageElement} imageSource
* @returns
*/
async function LoadImageElement(imageSource) {
if (imageSource == null) {
throw new Error('imageSource is null, can\'t load image element');
}
// check if image is HTMLImageElement
if (imageSource instanceof HTMLImageElement) {
if (!imageSource.complete) {
await new Promise((resolve, reject) => {
imageSource.onload = () => resolve();
imageSource.onerror = reject;
});
} else if (imageSource.decode) {
try {
await imageSource.decode();
} catch {
// ignore decode failures and continue
}
}
return imageSource;
}
// string path
if (typeof imageSource === 'string') {
if (imageSource.trim() === '') {
throw new Error('ImageSource is empty string, can\'t load image from empty path');
}
const image = new Image();
// Needed only if loading from another origin with CORS enabled (Cross-Origin Resource Sharing)
// see: https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/crossOrigin
image.crossOrigin = 'anonymous';
image.src = imageSource;
await new Promise((resolve, reject) => {
image.onload = () => resolve();
image.onerror = () => reject(new Error(`Failed to load image: ${imageSource}`));
});
if (image.decode) {
try {
await image.decode();
} catch {
// ignore decode failures and continue
}
}
return image;
}
throw new Error(`imageSource must be a URL string or an HTMLImageElement, type: ${typeof imageSource}`);
}
/**
* After processing `n` pixels, await one `requestAnimationFrame`.
*
* Not *truly* async (not using webworkers), but prevents the website from
* locking up during recoloring. If `<= 0`, no animation delay is used.
*
* For smooth UI; use {@linkcode CALC_INTERVAL_EXECUTION} for code execution. */
const CALC_INTERVAL_ANIMATION = 8000;
/**
* After processing `n` pixels, await one `setTime((), 0)`, eg one tick.
*
* Not *truly* async (not using webworkers), but prevents the website from
* locking up during recoloring. If `<= 0`, no code execution delay is used.
*
* For smooth code execution; use {@linkcode CALC_INTERVAL_ANIMATION} for UI. */
const CALC_INTERVAL_EXECUTION = 15000;
/**
* If `true`, every other fully-alpha pixel does *not* impact
* {@link CALC_INTERVAL_ANIMATION animation} or {@link CALC_INTERVAL_EXECUTION code execution}
* intervals. This is because no color processing is performed on invisible pixels,
* so they are inherently faster to iterate over. Skipping them entirely would lead
* to pauses in the case of long stretches of alpha pixels (eg the edges of a transparent
* image, or gaps in a spritesheet) - this way, processing is doubled, but interval
* pauses still occur.
*
* Tested on a 1500x1500px with approx 40% pixels fully transparent,
* processing times were reduced ~24%.
*/
const INTERVAL_FASTER_ON_ALPHA_PIXELS = true;
let startTime;
/**
* Recolors an image, returning multiple copies of that image
* with each one recoloured to a given target color.
* @param {string|HTMLImageElement} imageSource Source image path or element
* @param {string|Color} targetColor Target color to be replaced
* @param {string|Color|(string|Color)[]} replaceColor Color or colors to
* recolor. For each given color, another copy of `imageSource` will be generated.
* @param {number} [fuzziness=0.2] Replace colors of the approx similarity range
* @param {number} [cutoff=0.7] How sharply colors above/below fuzziness are blended.
* Must be between `0` and `1`. `0.0` fades everything, blurry. `1.0` is instant cutoff.
* `0.9` has a very small fade zone, and `0.3` is a wide blurry transition. Default `0.7`
* @returns {Promise<(HTMLCanvasElement|null)[]>}
*/
export async function RecolorImage(
imageSource,
targetColor,
replaceColor = [],
fuzziness = 0.2,
cutoff = 0.7,
) {
console.warn("RECOLORER IS WORK IN PROGRESS, results are currently wack (colors are v weird)");
// TODO: recolorer not quiiiite working yet, color processing results in incorrect colors
// color fuzzy matching seemed to be working (needs more testing), but actual color
// replcaement isn't there. eg replacing #ffffff with #ff00ff resulting in #ffff00,
// but so did replacing with #000000. Hunch says something about LAB/RGB transitions, but
// yeah, this needs fixing. woo!
console.log('pre-start');
await new Promise(resolve => setTimeout(resolve, 4000));
startTime = performance.now();
console.log('STARTING RECOLOR');
// ensure valid array
if (replaceColor == null) { return []; }
if (!Array.isArray(replaceColor)) {
if (typeof replaceColor === 'string' && replaceColor.trim() === '') { return []; }
replaceColor = [replaceColor];
} else if (replaceColor.length === 0) {
return [];
}
// load the image, define size
const img = await LoadImageElement(imageSource);
const width = img.naturalWidth || img.width;
const height = img.naturalHeight || img.height;
// create canvas, get context, draw image to it
const baseCanvas = CreateCanvas(width, height);
const baseContext = baseCanvas.getContext('2d', { willReadFrequently: true });
if (baseContext == null) {
throw new Error('Could not get 2D canvas context');
}
baseContext.drawImage(img, 0, 0, width, height);
// collect image data from context
const imageData = baseContext.getImageData(0, 0, width, height);
const pixels = imageData.data;
/** target RGB color to be replaced */
const targetRGB = ColorMath.ParseToRGB(targetColor);
/**
* all recolors in LAB format
* @see {@link https://culorijs.org/color-spaces/ culoriJS color spaces docs}
*/
const recolorLABs = replaceColor.map(ColorMath.ParseToLAB);
/**
* source-match mask, basically big array of all the img pixels,
* where each element is a number representing how much that pixel
* should be adjusted to the recolor
*/
const matchMask = new Float32Array(width * height);
// calculate fuzziness threshold
const threshold = ColorMath.FuzzinessToDeltaEThreshold(fuzziness);
// clamp cutoff
cutoff = typeof cutoff === 'number' ? ColorMath.Clamp01(cutoff) : 0.7;
const USE_INTERVAL_ANIM = CALC_INTERVAL_ANIMATION > 0;
const USE_INTERVAL_EXEC = CALC_INTERVAL_EXECUTION > 0;
let alphaSkip = false;
// iterate through all pixel data, process colors
for (let i = 0, pixel = 0, intervalAnim = 0, intervalExec = 0; i < pixels.length; i += 4, pixel++) {
// skip fully alpha pixels
const alpha = pixels[i + 3] / 255;
const fullyAlpha = alpha === 0;
// check for timing intervals
if (USE_INTERVAL_ANIM) {
if (!fullyAlpha || !INTERVAL_FASTER_ON_ALPHA_PIXELS || !alphaSkip) {
intervalAnim++;
}
if (intervalAnim > CALC_INTERVAL_ANIMATION) {
await new Promise(requestAnimationFrame);
intervalAnim = 0;
}
}
if (USE_INTERVAL_EXEC) {
if (!fullyAlpha || !INTERVAL_FASTER_ON_ALPHA_PIXELS || !alphaSkip) {
intervalExec++;
}
if (intervalExec > CALC_INTERVAL_EXECUTION) {
await new Promise(resolve => setTimeout(resolve, 0));
intervalExec = 0;
}
}
if (fullyAlpha) {
if (INTERVAL_FASTER_ON_ALPHA_PIXELS && (USE_INTERVAL_ANIM || USE_INTERVAL_EXEC)) {
alphaSkip = !alphaSkip;
}
matchMask[pixel] = 0;
continue;
}
/** @type {Rgb} */
const sourceRGB = {
mode: 'rgb',
r: pixels[i] / 255,
g: pixels[i + 1] / 255,
b: pixels[i + 2] / 255,
alpha,
};
// get distance from src pixel color to target pixel color
const distance = ColorMath.deltaE00(sourceRGB, targetRGB);
// update match mask to reflect how much pixel should change
matchMask[pixel] = ColorMath.MatchStrength(distance, threshold, cutoff);
}
/** Output recoloured canvases @type {HTMLCanvasElement[]} */
const outputs = [];
// iterate through all recolors
for (const recolorLAB of recolorLABs) {
// create new output canvas/context
const outputCanvas = CreateCanvas(width, height);
const outputContext = outputCanvas.getContext('2d');
if (outputContext == null) {
console.error('Could not get 2D canvas context', recolorLAB);
outputs.push(null);
continue;
}
const outImageData = new ImageData(width, height);
const outImgDataRaw = outImageData.data;
// iterate through all pixels in the image
for (let i = 0, px = 0, intervalAnim = 0, intervalExec = 0; i < pixels.length; i += 4, px++) {
// get alpha and amount values
const alphaByte = pixels[i + 3];
const alpha01 = alphaByte / 255;
const amount = matchMask[px];
// determine if we're skipping this sweet fella
const skipPixel = (alphaByte === 0 || amount <= 0);
// check for timing intervals
if (USE_INTERVAL_ANIM) {
if (!skipPixel || !INTERVAL_FASTER_ON_ALPHA_PIXELS || !alphaSkip) {
intervalAnim++;
}
if (intervalAnim > CALC_INTERVAL_ANIMATION) {
await new Promise(requestAnimationFrame);
intervalAnim = 0;
}
}
if (USE_INTERVAL_EXEC) {
if (!skipPixel || !INTERVAL_FASTER_ON_ALPHA_PIXELS || !alphaSkip) {
intervalExec++;
}
if (intervalExec > CALC_INTERVAL_EXECUTION) {
await new Promise(resolve => setTimeout(resolve, 0));
intervalExec = 0;
}
}
if (skipPixel) {
// full alpha or totally ignored, simply apply default pixel data and continue
outImgDataRaw[i] = pixels[i];
outImgDataRaw[i + 1] = pixels[i + 1];
outImgDataRaw[i + 2] = pixels[i + 2];
outImgDataRaw[i + 3] = pixels[i + 3];
// update whether we're skipping the next alpha pixel
if (INTERVAL_FASTER_ON_ALPHA_PIXELS && (USE_INTERVAL_ANIM || USE_INTERVAL_EXEC)) {
alphaSkip = !alphaSkip;
}
continue;
}
// get initial rgba values
const r0 = pixels[i] / 255;
const g0 = pixels[i + 1] / 255;
const b0 = pixels[i + 2] / 255;
// get RGB object, recoloured! this is where the magic happens
const recolored = ColorMath.RecolorPreservingLightness(r0, g0, b0, alpha01, recolorLAB);
// lerp orig color to target color based on amount
const r = ColorMath.Lerp(r0, recolored.r, amount);
const g = ColorMath.Lerp(g0, recolored.g, amount);
const b = ColorMath.Lerp(b0, recolored.b, amount);
// assign pixel data to output raw image
outImgDataRaw[i] = ColorMath.ToByte(r);
outImgDataRaw[i + 1] = ColorMath.ToByte(g);
outImgDataRaw[i + 2] = ColorMath.ToByte(b);
outImgDataRaw[i + 3] = alphaByte;
}
// done! apply image data, and push to canvas output
outputContext.putImageData(outImageData, 0, 0);
outputs.push(outputCanvas);
}
return outputs;
}
/**
* Recolors an image, returning URLs to locally-encoded Blobs
* for each given recolor.
* @param {string|HTMLImageElement} imageSource Source image path or element
* @param {string|Color} targetColor Target color to be replaced
* @param {string|Color|(string|Color)[]} replaceColor Color or colors to
* recolor. For each given color, another copy of `imageSource` will be generated.
* @param {number} [fuzziness=0.2] Replace colors of the approx similarity range
* @param {number} [cutoff=0.7] How sharply colors above/below fuzziness are
* blended. `0.0` fades everything, blurry. `1.0` is an extremely sharp cutoff.
* `0.9` has a very small fade zone, and `0.3` is a wide blurry transition. Default `0.7`
* @param {`auto_${ImageType}`|ImageType} [imageType='auto_png']
* Type of image to create. Default `auto_png`, which attempts to detect type
* based on `imageSource`, and falls back to `png` if detection fails.
* @param {number} [imageQuality=0.9] Image quality to use. Undefined uses browser defaults, per
* {@linkcode https://developer.mozilla.org/en-US/docs/Web/API/HTMLCanvasElement/toBlob toBlob}
* MDN spec. Only used for `jpeg` and `webp` encoding.
* @returns {Promise<(string|null)[]>}
*/
export async function RecolorImageToBlobURLs(
imageSource,
targetColor,
replaceColor,
fuzziness = 0.2,
cutoff = 0.7,
imageType = 'png',
imageQuality = 0.9,
) {
// get recolored images
const canvases = await RecolorImage(imageSource, targetColor, replaceColor, fuzziness, cutoff);
if (canvases == null || canvases.length === 0) { return []; }
// ensure image type and quality are prepared
if (imageType == null || imageType.trim() === '') {
imageType = 'png';
} else if (imageType.startsWith('auto')) {
/** @param {string} src @returns {ImageType|null} */
const getSrcType = (src) => {
const dot = src.lastIndexOf('.');
if (dot === -1) { return null; }
const file = src.slice(dot + 1);
if (file == null || file.trim() === '') { return null; }
const end = Math.min(file.indexOf('#'), file.indexOf('?'), file.indexOf('&'), file.indexOf('$'), file.indexOf('/'), file.indexOf('\\'));
const ext = end >= 0 ? file.slice(0, end) : file;
switch (ext.toLowerCase().trim()) {
case 'png': return 'png';
case 'bmp': return 'png';// sure why not lol, idk if this'd even work
case 'jpg': return 'jpeg';
case 'jpeg': return 'jpeg';
case 'webp': return 'webp';
default:
console.warn(`Failed to autodetect image type from src: ${src}`);
return null;
}
}
const type = getSrcType(imageSource instanceof HTMLImageElement ? imageSource.src : imageSource);
if (type == null) {
imageType = /** @type {ImageType} */(imageType.slice(5));
} else {
imageType = type;
}
}
// done formatting, confirm imagetype and update quality as needed
switch (imageType) {
case 'auto_png':
// failsafe
imageType = 'png';
case 'png':
imageQuality = undefined;
break;
default:
switch (imageType) {
// failsafe
case 'auto_jpeg': imageType = 'jpeg'; break;
case 'auto_webp': imageType = 'webp'; break;
}
imageType = /** @type {ImageType} */(imageType);
case 'jpeg':
case 'webp':
imageQuality = ColorMath.Clamp01(imageQuality);
break;
}
// iterate through all canvases and convert to blob URLs
const urls = await Promise.all(
canvases.map(
(canvas) => {
return new Promise((
/** @type {function(string|null):void} */resolve) => {
if (canvas == null) {
// canvas is null
console.error('Can\'t create blob from null canvas');
resolve(null);
return;
}
canvas.toBlob((blob) => {
if (blob == null) {
// blob is null
console.error('Failed to create blob from canvas', canvas);
resolve(null);
return;
}
console.log("blob:" + blob);
console.log(blob);
const url = URL.createObjectURL(blob);
console.log("url: " + url);
if (url == null || url.trim() === '') {
console.error('Failed creating URL from blob', blob);
resolve(null);
return;
}
// success!
resolve(url);
}, `image/${imageType}`, imageQuality);
})
}
)
);
const duration = Math.round((performance.now() - startTime) * 0.1) * 0.01;
console.log('TOTAL TIME: ' + duration + 's');
return urls;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment