You can edit the below JavaScript code to customize the image tool.
Apply Changes
async function processImage(originalImg, rgbSplitAmount = 10, scanlineOpacity = 0.1, noiseAmount = 0.08, glitchSlices = 15, generateAudio = 'true', audioDuration = 2) {
// --- SETUP ---
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
canvas.width = originalImg.width;
canvas.height = originalImg.height;
// Turn off image smoothing for a more crisp, "digital" look
ctx.imageSmoothingEnabled = false;
// Draw the original image onto the canvas
ctx.drawImage(originalImg, 0, 0);
// --- A. VISUAL EFFECTS ---
// 1. Glitch Slices (Simulates data corruption)
if (glitchSlices > 0) {
for (let i = 0; i < glitchSlices; i++) {
const y = Math.random() * canvas.height;
const h = Math.random() * (canvas.height / 10);
const x = Math.random() * canvas.width;
const w = canvas.width - x;
const x2 = (Math.random() - 0.5) * (canvas.width / 8);
if (y + h > canvas.height) continue;
try {
const imageData = ctx.getImageData(x, y, w, h);
ctx.putImageData(imageData, x + x2, y);
} catch (e) {
// Ignore errors from drawing outside canvas bounds, which can happen with this effect
}
}
}
// Get the image data after the first glitch pass
const originalData = ctx.getImageData(0, 0, canvas.width, canvas.height);
const glitchedData = new Uint8ClampedArray(originalData.data);
// 2. RGB Split (Chromatic Aberration) & Noise
for (let i = 0; i < glitchedData.length; i += 4) {
// RGB Split
const rIndex = i - (rgbSplitAmount * 4);
const bIndex = i + (rgbSplitAmount * 4);
const r = (rIndex >= 0) ? glitchedData[rIndex] : glitchedData[i];
const g = glitchedData[i + 1];
const b = (bIndex < glitchedData.length) ? glitchedData[bIndex + 2] : glitchedData[i + 2];
// Noise
const noise = (0.5 - Math.random()) * 255 * noiseAmount;
originalData.data[i] = Math.max(0, Math.min(255, r + noise));
originalData.data[i + 1] = Math.max(0, Math.min(255, g + noise));
originalData.data[i + 2] = Math.max(0, Math.min(255, b + noise));
}
ctx.putImageData(originalData, 0, 0);
// 3. Scanlines (Simulates CRT monitor)
if (scanlineOpacity > 0) {
ctx.fillStyle = `rgba(0, 0, 0, ${scanlineOpacity})`;
for (let y = 0; y < canvas.height; y += 3) {
ctx.fillRect(0, y, canvas.width, 1);
}
}
// --- B. AUDIO EFFECTS (SONIFICATION) ---
let audioEl = null;
if (generateAudio === 'true' && window.AudioContext && window.OfflineAudioContext) {
/**
* Helper function to convert an AudioBuffer to a WAV file (as a Blob).
*/
const audioBufferToWav = (buffer) => {
const numOfChan = buffer.numberOfChannels;
const length = buffer.length * numOfChan * 2 + 44;
const bufferWav = new ArrayBuffer(length);
const view = new DataView(bufferWav);
const channels = [];
let pos = 0;
const setUint16 = (data) => {
view.setUint16(pos, data, true);
pos += 2;
}
const setUint32 = (data) => {
view.setUint32(pos, data, true);
pos += 4;
}
// RIFF chunk descriptor
setUint32(0x46464952); // "RIFF"
setUint32(length - 8);
setUint32(0x45564157); // "WAVE"
// FMT sub-chunk
setUint32(0x20746d66); // "fmt "
setUint32(16); //
setUint16(1); // PCM
setUint16(numOfChan);
setUint32(buffer.sampleRate);
setUint32(buffer.sampleRate * 2 * numOfChan);
setUint16(numOfChan * 2);
setUint16(16);
// data sub-chunk
setUint32(0x61746164); // "data"
setUint32(length - pos - 4);
for (let i = 0; i < buffer.numberOfChannels; i++) {
channels.push(buffer.getChannelData(i));
}
for (let i = 0; i < buffer.length; i++) {
for (let j = 0; j < numOfChan; j++) {
const s = Math.max(-1, Math.min(1, channels[j][i]));
view.setInt16(pos, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
pos += 2;
}
}
return new Blob([view], { type: 'audio/wav' });
};
const sampleRate = 44100;
const offlineCtx = new OfflineAudioContext(1, sampleRate * audioDuration, sampleRate);
const oscillator = offlineCtx.createOscillator();
const gainNode = offlineCtx.createGain();
gainNode.gain.setValueAtTime(0.5, 0); // Set volume to avoid clipping
oscillator.connect(gainNode);
gainNode.connect(offlineCtx.destination);
oscillator.type = 'sawtooth'; // A harsh, digital-sounding wave
const finalImageData = ctx.getImageData(0, 0, canvas.width, canvas.height).data;
const timeStep = audioDuration / canvas.width;
// "Scan" the image column by column and map brightness to frequency
for (let x = 0; x < canvas.width; x++) {
let columnLuminance = 0;
for (let y = 0; y < canvas.height; y++) {
const i = (y * canvas.width + x) * 4;
const r = finalImageData[i];
const g = finalImageData[i + 1];
const b = finalImageData[i + 2];
// Using the standard formula for luminance
columnLuminance += (0.299 * r + 0.587 * g + 0.114 * b);
}
const avgLuminance = columnLuminance / canvas.height;
// Map luminance (0-255) to a frequency range (e.g., 100Hz - 1200Hz)
const frequency = 100 + (avgLuminance / 255) * 1100;
oscillator.frequency.setValueAtTime(frequency, x * timeStep);
}
oscillator.start(0);
const renderedBuffer = await offlineCtx.startRendering();
const wavBlob = audioBufferToWav(renderedBuffer);
const audioUrl = URL.createObjectURL(wavBlob);
audioEl = document.createElement('audio');
audioEl.controls = true;
audioEl.src = audioUrl;
audioEl.style.width = '100%';
audioEl.style.marginTop = '10px';
}
// --- C. ASSEMBLE AND RETURN THE FINAL ELEMENT ---
const container = document.createElement('div');
container.style.display = 'inline-block';
container.style.maxWidth = '100%';
canvas.style.display = 'block';
canvas.style.maxWidth = '100%';
canvas.style.height = 'auto'; // Maintain aspect ratio
container.appendChild(canvas);
if (audioEl) {
container.appendChild(audioEl);
}
return container;
}
Apply Changes