You can edit the below JavaScript code to customize the image tool.
Apply Changes
async function processImage(
originalImg,
// --- Audio Parameters ---
audioSrc = '', // URL to the audio file (e.g., mp3, wav).
pitchShift = 0, // Semitones to shift pitch. Can be positive or negative.
reverb = 0, // Reverb amount from 0 (dry) to 1 (fully wet).
stutterCount = 0, // Number of times to repeat the first small segment of audio.
stutterDuration = 0.1, // Duration of the stutter segment in seconds.
reverseAudio = 'false', // Set 'true' to reverse the entire audio clip.
// --- Image Parameters ---
invertColors = 'false', // Set 'true' to invert image colors.
hueRotate = 0, // Degrees to rotate hue (0-360).
saturate = 100, // Saturation in percent (0 is grayscale, 100 is original).
contrast = 100, // Contrast in percent (0 is gray, 100 is original).
pixelate = 1, // Pixelation factor. 1 is no pixelation, > 1 increases effect.
mirror = 'none', // 'none', 'horizontal', or 'vertical'.
stretchX = 1, // Horizontal stretch multiplier (e.g., 2 is twice as wide).
stretchY = 1, // Vertical stretch multiplier (e.g., 0.5 is half the height).
shake = 0, // Maximum pixel offset for a random shake effect.
glowColor = 'rgba(255,255,255,0)', // Color of the glow effect. Default is transparent (no glow).
glowAmount = 0, // Blur radius for the glow effect.
overlayText = '', // Text to display over the image.
textColor = 'white', // Color for the overlay text.
textStrokeColor = 'black', // Outline color for the overlay text.
fontSize = 48 // Font size in pixels for the overlay text.
) {
// --- Helper function to encode AudioBuffer to a WAV file Blob ---
const bufferToWave = (abuffer) => {
const numOfChan = abuffer.numberOfChannels;
const L = abuffer.length * numOfChan * 2 + 44;
const buffer = new ArrayBuffer(L);
const view = new DataView(buffer);
const channels = [];
let i = 0;
let sample = 0;
let offset = 0;
let pos = 0;
// write WAV container
setUint32(0x46464952); // "RIFF"
setUint32(L - 8); // file length - 8
setUint32(0x45564157); // "WAVE"
// write "fmt " chunk
setUint32(0x20746d66); // "fmt "
setUint32(16); // length = 16
setUint16(1); // PCM (uncompressed)
setUint16(numOfChan);
setUint32(abuffer.sampleRate);
setUint32(abuffer.sampleRate * 2 * numOfChan); // avg. bytes/sec
setUint16(numOfChan * 2); // block-align
setUint16(16); // 16-bit precision
// write "data" chunk
setUint32(0x61746164); // "data"
setUint32(L - pos - 4); // chunk length
// write interleaved data
for (i = 0; i < abuffer.numberOfChannels; i++) {
channels.push(abuffer.getChannelData(i));
}
while (pos < L) {
for (i = 0; i < numOfChan; i++) {
sample = Math.max(-1, Math.min(1, channels[i][offset])); // clamp
sample = (0.5 + sample < 0 ? sample * 32768 : sample * 32767) | 0; // scale to 16-bit signed int
view.setInt16(pos, sample, true); // write 16-bit sample
pos += 2;
}
offset++;
}
return new Blob([buffer], { type: "audio/wav" });
function setUint16(data) { view.setUint16(pos, data, true); pos += 2; }
function setUint32(data) { view.setUint32(pos, data, true); pos += 4; }
};
// --- Parameter Parsing ---
const pAudioSrc = String(audioSrc);
const pPitchShift = Number(pitchShift) || 0;
const pReverb = Math.max(0, Math.min(1, Number(reverb) || 0));
const pStutterCount = Math.max(0, parseInt(stutterCount, 10) || 0);
const pStutterDuration = Math.max(0.01, Number(stutterDuration) || 0.1);
const bReverseAudio = String(reverseAudio).toLowerCase() === 'true';
const bInvertColors = String(invertColors).toLowerCase() === 'true';
const pHueRotate = Number(hueRotate) || 0;
const pSaturate = Number(saturate) || 100;
const pContrast = Number(contrast) || 100;
const pPixelate = Math.max(1, Number(pixelate) || 1);
const pMirror = String(mirror).toLowerCase();
const pStretchX = Number(stretchX) || 1;
const pStretchY = Number(stretchY) || 1;
const pShake = Number(shake) || 0;
const pGlowColor = String(glowColor);
const pGlowAmount = Number(glowAmount) || 0;
const pOverlayText = String(overlayText);
const pTextColor = String(textColor);
const pTextStrokeColor = String(textStrokeColor);
const pFontSize = Number(fontSize) || 48;
// --- Canvas Setup ---
const canvas = document.createElement('canvas');
canvas.width = originalImg.width * pStretchX;
canvas.height = originalImg.height * pStretchY;
const ctx = canvas.getContext('2d');
// --- Image Effects ---
ctx.save();
// 1. Shake, Mirror, and Stretching transform
const shakeX = (Math.random() - 0.5) * 2 * pShake;
const shakeY = (Math.random() - 0.5) * 2 * pShake;
const scaleX = pMirror === 'horizontal' ? -1 : 1;
const scaleY = pMirror === 'vertical' ? -1 : 1;
const translateX = pMirror === 'horizontal' ? canvas.width : 0;
const translateY = pMirror === 'vertical' ? canvas.height : 0;
ctx.translate(translateX + shakeX, translateY + shakeY);
ctx.scale(scaleX, scaleY);
// 2. Filters
ctx.filter = `hue-rotate(${pHueRotate}deg) saturate(${pSaturate}%) contrast(${pContrast}%)`;
// 3. Draw image (with pixelation if needed)
if (pPixelate > 1) {
ctx.imageSmoothingEnabled = false;
const pW = Math.max(1, Math.floor(originalImg.width / pPixelate));
const pH = Math.max(1, Math.floor(originalImg.height / pPixelate));
ctx.drawImage(originalImg, 0, 0, pW, pH, 0, 0, canvas.width / scaleX, canvas.height / scaleY);
} else {
ctx.imageSmoothingEnabled = true;
ctx.drawImage(originalImg, 0, 0, canvas.width / scaleX, canvas.height / scaleY);
}
ctx.filter = 'none'; // Reset filter to apply glow separately
// 4. Glow
if (pGlowAmount > 0) {
ctx.shadowColor = pGlowColor;
ctx.shadowBlur = pGlowAmount;
// Redraw image to apply the glow
if (pPixelate > 1) {
ctx.imageSmoothingEnabled = false;
const pW = Math.max(1, Math.floor(originalImg.width / pPixelate));
const pH = Math.max(1, Math.floor(originalImg.height / pPixelate));
ctx.drawImage(originalImg, 0, 0, pW, pH, 0, 0, canvas.width / scaleX, canvas.height / scaleY);
} else {
ctx.imageSmoothingEnabled = true;
ctx.drawImage(originalImg, 0, 0, canvas.width / scaleX, canvas.height / scaleY);
}
}
ctx.restore(); // Restore context from shake/mirror transforms
// 5. Invert Colors
if (bInvertColors) {
ctx.globalCompositeOperation = 'difference';
ctx.fillStyle = 'white';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.globalCompositeOperation = 'source-over'; // Reset
}
// 6. Text Overlay
if (pOverlayText) {
ctx.font = `${pFontSize}px Impact, sans-serif`;
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
ctx.lineWidth = Math.ceil(pFontSize / 8);
ctx.strokeStyle = pTextStrokeColor;
ctx.fillStyle = pTextColor;
const x = canvas.width / 2;
const y = canvas.height / 2;
ctx.strokeText(pOverlayText, x, y);
ctx.fillText(pOverlayText, x, y);
}
// --- Audio Processing ---
let audioPlayer = null;
if (pAudioSrc) {
try {
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const response = await fetch(pAudioSrc);
const arrayBuffer = await response.arrayBuffer();
let sourceBuffer = await audioContext.decodeAudioData(arrayBuffer);
// Effect 1: Reverse (modifies buffer)
if (bReverseAudio) {
for (let i = 0; i < sourceBuffer.numberOfChannels; i++) {
Array.prototype.reverse.call(sourceBuffer.getChannelData(i));
}
}
// Effect 2: Stutter (modifies buffer)
if (pStutterCount > 0) {
const stutterSamples = Math.floor(pStutterDuration * sourceBuffer.sampleRate);
const originalLength = sourceBuffer.length;
const newLength = originalLength + (stutterSamples * pStutterCount);
const stutteredBuffer = audioContext.createBuffer(sourceBuffer.numberOfChannels, newLength, sourceBuffer.sampleRate);
for (let i = 0; i < stutteredBuffer.numberOfChannels; i++) {
const originalData = sourceBuffer.getChannelData(i);
const newData = stutteredBuffer.getChannelData(i);
const stutterSegment = originalData.slice(0, stutterSamples);
// Add stutters
for (let j = 0; j < pStutterCount + 1; j++) {
newData.set(stutterSegment, j * stutterSamples);
}
// Add the rest of the original audio
newData.set(originalData.slice(stutterSamples), (pStutterCount + 1) * stutterSamples);
}
sourceBuffer = stutteredBuffer;
}
// Use Offline Context to apply remaining effects and render to a new buffer
const offlineCtx = new OfflineAudioContext(sourceBuffer.numberOfChannels, sourceBuffer.length, sourceBuffer.sampleRate);
const source = offlineCtx.createBufferSource();
source.buffer = sourceBuffer;
// Effect 3: Pitch Shift
source.playbackRate.value = Math.pow(2, pPitchShift / 12);
let lastNode = source;
// Effect 4: Reverb
if (pReverb > 0) {
const convolver = offlineCtx.createConvolver();
const impulseSeconds = 2; // Reverb tail duration
const impulseBuffer = offlineCtx.createBuffer(2, offlineCtx.sampleRate * impulseSeconds, offlineCtx.sampleRate);
for (let i = 0; i < impulseBuffer.numberOfChannels; i++) {
const channel = impulseBuffer.getChannelData(i);
for (let j = 0; j < channel.length; j++) {
channel[j] = (Math.random() * 2 - 1) * Math.pow(1 - j / channel.length, 2.5);
}
}
convolver.buffer = impulseBuffer;
const wetGain = offlineCtx.createGain();
wetGain.gain.value = pReverb;
const dryGain = offlineCtx.createGain();
dryGain.gain.value = 1 - pReverb;
lastNode.connect(convolver).connect(wetGain).connect(offlineCtx.destination);
lastNode.connect(dryGain).connect(offlineCtx.destination);
} else {
lastNode.connect(offlineCtx.destination);
}
source.start(0);
const renderedBuffer = await offlineCtx.startRendering();
const wavBlob = bufferToWave(renderedBuffer);
const audioUrl = URL.createObjectURL(wavBlob);
audioPlayer = document.createElement('audio');
audioPlayer.controls = true;
audioPlayer.src = audioUrl;
} catch (e) {
console.error("Audio processing failed:", e);
}
}
// --- Final Assembly ---
const container = document.createElement('div');
container.style.display = "inline-flex";
container.style.flexDirection = "column";
container.style.alignItems = "center";
container.style.gap = "10px";
container.appendChild(canvas);
if (audioPlayer) {
container.appendChild(audioPlayer);
}
return container;
}
Apply Changes