Please bookmark this page to avoid losing your image tool!

Image Gradient Map Vocoder For GIF And Video Audio

(Free & Supports Bulk Upload)

Drag & drop your images here or

The result will appear here...
You can edit the below JavaScript code to customize the image tool.
async function processImage(originalImg, audioUrl = '', gradientColors = '#fe0000,#ff8000,#ffff00,#80ff00,#00ff00,#00ff80,#00ffff,#0080ff,#0000ff,#8000ff,#ff00ff,#ff0080', fftSize = 1024) {

    // --- Helper Functions ---

    /**
     * Converts a hex color string to an RGB object.
     * @param {string} hex - The hex color string (e.g., "#RRGGBB").
     * @returns {{r: number, g: number, b: number}|null}
     */
    const hexToRgb = (hex) => {
        const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
        return result ? {
            r: parseInt(result[1], 16),
            g: parseInt(result[2], 16),
            b: parseInt(result[3], 16)
        } : null;
    };

    /**
     * Parses a comma-separated string of hex colors into an array of RGB objects.
     * @param {string} colorsString - The color string.
     * @returns {Array<{r: number, g: number, b: number}>}
     */
    const parseGradient = (colorsString) => {
        return colorsString.split(',')
            .map(color => hexToRgb(color.trim()))
            .filter(color => color !== null);
    };

    /**
     * Gets a color from a gradient at a specific position using linear interpolation.
     * @param {number} position - A value from 0 to 1.
     * @param {Array<{r: number, g: number, b: number}>} gradient - The gradient color array.
     * @returns {{r: number, g: number, b: number}}
     */
    const getColorFromGradient = (position, gradient) => {
        if (!gradient || gradient.length === 0) return { r: 0, g: 0, b: 0 };
        if (gradient.length === 1) return gradient[0];

        position = Math.max(0, Math.min(1, position));
        const scaledPosition = position * (gradient.length - 1);
        const index1 = Math.floor(scaledPosition);
        const index2 = Math.min(Math.ceil(scaledPosition), gradient.length - 1);
        const localPosition = scaledPosition - index1;

        if (index1 === index2) return gradient[index1];

        const color1 = gradient[index1];
        const color2 = gradient[index2];

        return {
            r: Math.round(color1.r + (color2.r - color1.r) * localPosition),
            g: Math.round(color1.g + (color2.g - color1.g) * localPosition),
            b: Math.round(color1.b + (color2.b - color1.b) * localPosition)
        };
    };

    // --- Main Logic ---

    const container = document.createElement('div');
    container.style.position = 'relative';
    container.style.display = 'inline-block';

    const canvas = document.createElement('canvas');
    const width = originalImg.naturalWidth || originalImg.width;
    const height = originalImg.naturalHeight || originalImg.height;

    if (width === 0 || height === 0) {
        const errorMsg = document.createElement('p');
        errorMsg.textContent = 'Image has no dimensions. Please wait for the image to load before processing.';
        return errorMsg;
    }
    canvas.width = width;
    canvas.height = height;
    const ctx = canvas.getContext('2d', { willReadFrequently: true });
    container.appendChild(canvas);

    // --- Pre-process image to get grayscale data ---
    let grayscaleData;
    try {
        const offscreenCanvas = document.createElement('canvas');
        offscreenCanvas.width = width;
        offscreenCanvas.height = height;
        const offscreenCtx = offscreenCanvas.getContext('2d');
        offscreenCtx.drawImage(originalImg, 0, 0, width, height);
        const imageData = offscreenCtx.getImageData(0, 0, width, height);
        const pixels = imageData.data;
        grayscaleData = new Uint8Array(width * height);
        for (let i = 0; i < pixels.length; i += 4) {
            const r = pixels[i];
            const g = pixels[i + 1];
            const b = pixels[i + 2];
            const luminance = 0.299 * r + 0.587 * g + 0.114 * b;
            grayscaleData[i / 4] = luminance;
        }
    } catch (e) {
        const errorMsg = document.createElement('p');
        errorMsg.textContent = 'Could not process image due to CORS policy. Use an image from the same origin.';
        errorMsg.style.color = 'red';
        console.error(e);
        return errorMsg;
    }


    // --- Audio Setup ---
    const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
    const analyser = audioCtx.createAnalyser();

    const validFFTSizes = [32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768];
    let finalFftSize = fftSize;
    if (!validFFTSizes.includes(fftSize)) {
        finalFftSize = validFFTSizes.reduce((prev, curr) => (Math.abs(curr - fftSize) < Math.abs(prev - fftSize) ? curr : prev));
    }
    analyser.fftSize = finalFftSize;
    analyser.smoothingTimeConstant = 0.8;

    const frequencyData = new Uint8Array(analyser.frequencyBinCount);
    let sourceNode, audioElement, oscillatorInterval;
    const useGeneratedAudio = !(audioUrl && audioUrl.trim() !== '');

    if (useGeneratedAudio) {
        const oscillator = audioCtx.createOscillator();
        const gainNode = audioCtx.createGain();
        oscillator.type = 'sawtooth';
        gainNode.gain.setValueAtTime(0.5, audioCtx.currentTime);
        sourceNode = oscillator;
        sourceNode.connect(gainNode).connect(analyser);
    } else {
        audioElement = document.createElement('audio');
        audioElement.src = audioUrl;
        audioElement.crossOrigin = 'anonymous';
        audioElement.controls = true;
        audioElement.loop = true;
        audioElement.style.width = '100%';
        audioElement.style.display = 'block';
        sourceNode = audioCtx.createMediaElementSource(audioElement);
        sourceNode.connect(analyser);
    }
    analyser.connect(audioCtx.destination);

    // --- UI and Animation Control ---
    const parsedGradient = parseGradient(gradientColors);
    let animationFrameId;

    const drawFrame = () => {
        animationFrameId = requestAnimationFrame(drawFrame);
        analyser.getByteFrequencyData(frequencyData);
        const outputImageData = ctx.createImageData(width, height);
        const outputPixels = outputImageData.data;

        for (let i = 0; i < grayscaleData.length; i++) {
            const luminance = grayscaleData[i];
            const binIndex = Math.floor((luminance / 255) * (analyser.frequencyBinCount - 1));
            const amplitude = frequencyData[binIndex];
            const intensity = amplitude / 255;
            const gradientColor = getColorFromGradient(luminance / 255, parsedGradient);

            outputPixels[i * 4] = gradientColor.r * intensity;
            outputPixels[i * 4 + 1] = gradientColor.g * intensity;
            outputPixels[i * 4 + 2] = gradientColor.b * intensity;
            outputPixels[i * 4 + 3] = 255;
        }
        ctx.putImageData(outputImageData, 0, 0);
    };

    const startButton = document.createElement('button');
    startButton.textContent = 'Click to Start';
    Object.assign(startButton.style, {
        position: 'absolute', top: '50%', left: '50%', transform: 'translate(-50%, -50%)',
        padding: '1em 2em', fontSize: '1.2em', cursor: 'pointer', border: '2px solid white',
        borderRadius: '8px', backgroundColor: 'rgba(0, 0, 0, 0.7)', color: 'white', zIndex: '10'
    });
    container.appendChild(startButton);

    const start = async () => {
        if (audioCtx.state === 'suspended') await audioCtx.resume();
        
        if (useGeneratedAudio) {
            sourceNode.start(0);
            const scheduleSweep = () => {
                const now = audioCtx.currentTime;
                sourceNode.frequency.cancelScheduledValues(now);
                sourceNode.frequency.setValueAtTime(110, now);
                sourceNode.frequency.linearRampToValueAtTime(880, now + 4);
                sourceNode.frequency.linearRampToValueAtTime(110, now + 8);
            };
            scheduleSweep();
            oscillatorInterval = setInterval(scheduleSweep, 8000);
        } else {
             try {
                await audioElement.play();
            } catch (e) {
                console.error("Audio playback failed:", e);
                // Optionally show an error to the user on the canvas
                ctx.fillStyle = "rgba(0,0,0,0.7)";
                ctx.fillRect(0, 0, width, height);
                ctx.fillStyle = "white";
                ctx.font = "16px sans-serif";
                ctx.textAlign = "center";
                ctx.fillText("Error: Could not play audio.", width / 2, height / 2);
            }
        }

        container.removeChild(startButton);
        if (audioElement) container.appendChild(audioElement);
        drawFrame();
    };

    startButton.addEventListener('click', start, { once: true });
    
    // Draw initial image so something is visible before starting.
    ctx.drawImage(originalImg, 0, 0, width, height);
    return container;
}

Free Image Tool Creator

Can't find the image tool you're looking for?
Create one based on your own needs now!

Description

The ‘Image Gradient Map Vocoder for GIF and Video Audio’ tool allows users to transform images into dynamic visual representations that react to audio input. By mapping the luminance of an image to a selected gradient of colors, the tool creates an animated effect that corresponds to the audio’s frequency data. This can be particularly useful for creating eye-catching visualizations for music videos, art projects, or presentations, where the visuals change in real-time with the sound. Users can upload images, provide an audio URL, and customize the color gradients to produce unique multimedia experiences.

Leave a Reply

Your email address will not be published. Required fields are marked *