You can edit the below JavaScript code to customize the image tool.
Apply Changes
/**
* Pitches audio from a URL based on the average brightness of a provided image,
* without changing the audio's speed. Returns a canvas element with an interactive
* play/pause button overlaid on the image.
*
* @param {Image} originalImg The input Image object. Used for display and to calculate pitch in 'auto' mode.
* @param {string} [audioUrl='https://mdn.github.io/webaudio-examples/samples/audio/viper.ogg'] The URL of the audio file to process. Must be CORS-enabled.
* @param {string|number} [pitch='auto'] The desired pitch shift in cents. 100 cents = 1 semitone. 1200 cents = 1 octave.
* If set to 'auto', the pitch is determined by the image's average brightness
* (darker = lower pitch, brighter = higher pitch).
* If set to a number (e.g., -500), that value is used directly.
* @returns {Promise<HTMLCanvasElement>} A promise that resolves to a canvas element which displays the image and can be clicked to play/stop the pitched audio.
*/
async function processImage(originalImg, audioUrl = 'https://mdn.github.io/webaudio-examples/samples/audio/viper.ogg', pitch = 'auto') {
// --- 1. State Management & Setup ---
let audioContext = null;
let audioBuffer = null;
let sourceNode = null;
let isPlaying = false;
let detuneValue = 0;
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
// Set canvas size to match the original image
canvas.width = originalImg.naturalWidth || 300;
canvas.height = originalImg.naturalHeight || 150;
// --- 2. Audio Pre-loading and Decoding ---
try {
const response = await fetch(audioUrl);
if (!response.ok) {
throw new Error(`Audio fetch failed: ${response.status}`);
}
const arrayBuffer = await response.arrayBuffer();
// Create a temporary AudioContext solely for decoding the audio data.
// This avoids creating the main playback context until the user interacts.
const decodingContext = new(window.AudioContext || window.webkitAudioContext)();
audioBuffer = await decodingContext.decodeAudioData(arrayBuffer);
decodingContext.close(); // Clean up the temporary context
} catch (e) {
console.error('Error loading or decoding audio:', e);
ctx.fillStyle = '#FAFAFA';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.font = '16px sans-serif';
ctx.fillStyle = 'red';
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
ctx.fillText('Error: Could not load audio.', canvas.width / 2, canvas.height / 2);
return canvas;
}
// --- 3. Pitch Calculation ---
if (String(pitch).toLowerCase() === 'auto') {
const tempCanvas = document.createElement('canvas');
const tempCtx = tempCanvas.getContext('2d');
tempCanvas.width = canvas.width;
tempCanvas.height = canvas.height;
tempCtx.drawImage(originalImg, 0, 0, tempCanvas.width, tempCanvas.height);
const imageData = tempCtx.getImageData(0, 0, tempCanvas.width, tempCanvas.height);
const data = imageData.data;
let totalBrightness = 0;
const pixelCount = data.length / 4;
for (let i = 0; i < data.length; i += 4) {
// Using the luma formula for percieved brightness
const brightness = 0.299 * data[i] + 0.587 * data[i + 1] + 0.114 * data[i + 2];
totalBrightness += brightness;
}
const avgBrightness = totalBrightness / pixelCount; // Average brightness from 0 to 255
// Map brightness (0-255) to pitch detune (-1200 to +1200 cents, i.e., +/- 1 octave)
detuneValue = ((avgBrightness / 255) * 2400) - 1200;
} else {
const parsedPitch = parseFloat(pitch);
detuneValue = isNaN(parsedPitch) ? 0 : parsedPitch;
}
// --- 4. Canvas Drawing Logic ---
const redrawCanvas = () => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.drawImage(originalImg, 0, 0, canvas.width, canvas.height);
// Draw overlay
ctx.fillStyle = 'rgba(0, 0, 0, 0.3)';
ctx.fillRect(0, 0, canvas.width, canvas.height);
// Draw play/pause icon
const iconSize = Math.min(canvas.width, canvas.height) * 0.25;
const centerX = canvas.width / 2;
const centerY = canvas.height / 2;
ctx.fillStyle = 'rgba(255, 255, 255, 0.8)';
ctx.beginPath();
if (isPlaying) { // Draw Pause Icon
const barWidth = iconSize / 3.5;
ctx.rect(centerX - iconSize / 2, centerY - iconSize / 2, barWidth, iconSize);
ctx.rect(centerX + iconSize / 2 - barWidth, centerY - iconSize / 2, barWidth, iconSize);
} else { // Draw Play Icon
ctx.moveTo(centerX - iconSize / 2.5, centerY - iconSize / 2);
ctx.lineTo(centerX + iconSize / 2, centerY);
ctx.lineTo(centerX - iconSize / 2.5, centerY + iconSize / 2);
ctx.closePath();
}
ctx.fill();
// Draw pitch info text
ctx.font = 'bold 16px sans-serif';
ctx.textAlign = 'left';
ctx.textBaseline = 'top';
ctx.fillStyle = 'rgba(255, 255, 255, 0.9)';
ctx.strokeStyle = 'rgba(0, 0, 0, 0.7)';
ctx.lineWidth = 3;
const text = `Pitch: ${detuneValue.toFixed(0)} cents`;
ctx.strokeText(text, 10, 10);
ctx.fillText(text, 10, 10);
};
// --- 5. Audio Playback Logic ---
const play = () => {
if (!audioBuffer) return;
if (!audioContext) {
audioContext = new(window.AudioContext || window.webkitAudioContext)();
}
if (audioContext.state === 'suspended') {
audioContext.resume();
}
sourceNode = audioContext.createBufferSource();
sourceNode.buffer = audioBuffer;
sourceNode.detune.value = detuneValue; // Apply the calculated pitch shift
sourceNode.connect(audioContext.destination);
sourceNode.onended = () => {
isPlaying = false;
sourceNode = null;
redrawCanvas();
};
sourceNode.start(0);
isPlaying = true;
redrawCanvas();
};
const stop = () => {
if (sourceNode) {
sourceNode.stop(); // onended will fire to handle state changes
}
};
// --- 6. Event Handling & Initialization ---
canvas.addEventListener('click', () => {
if (isPlaying) {
stop();
} else {
play();
}
});
// Set cursor to pointer to indicate it's clickable
canvas.style.cursor = 'pointer';
redrawCanvas(); // Initial draw
return canvas;
}
Apply Changes