You can edit the below JavaScript code to customize the image tool.
Apply Changes
async function processImage(originalImg, audioUrl = '', lineColor = 'rgba(255, 255, 0, 0.8)', lineWidth = 2, maxFrequency = 2000, minFrequency = 80) {
/**
* Creates a default audio buffer with a sine wave sweep if no audio URL is provided.
* @param {AudioContext} context - The audio context to create the buffer in.
* @param {number} duration - The duration of the wave in seconds.
* @param {number} baseFrequency - The base frequency of the sine wave.
* @returns {AudioBuffer} The generated audio buffer.
*/
const createDefaultAudioBuffer = (context, duration, baseFrequency) => {
const sampleRate = context.sampleRate;
const frameCount = duration * sampleRate;
const buffer = context.createBuffer(1, frameCount, sampleRate);
const data = buffer.getChannelData(0);
// A simple frequency modulation (FM) sweep to make the default sound more interesting
for (let i = 0; i < frameCount; i++) {
const time = i / sampleRate;
const instantFreq = baseFrequency + 100 * Math.sin(2 * Math.PI * 2 * time);
data[i] = Math.sin(2 * Math.PI * instantFreq * time) * 0.5; // Amplitude 0.5
}
return buffer;
};
/**
* Analyzes the pitch of an audio buffer over time using a simplified autocorrelation method.
* @param {AudioBuffer} audioBuffer - The audio buffer to analyze.
* @param {number} numPoints - The number of data points (time slices) to generate.
* @param {number} minFreq - The minimum frequency to detect.
* @param {number} maxFreq - The maximum frequency to detect.
* @returns {Array<number>} An array of detected frequencies (in Hz) over time. 0 indicates silence or unpitched sound.
*/
const analyzePitch = (audioBuffer, numPoints, minFreq, maxFreq) => {
const pcmData = audioBuffer.getChannelData(0); // Use the first channel
const sampleRate = audioBuffer.sampleRate;
const frequencies = [];
const samplesPerPoint = Math.floor(pcmData.length / numPoints);
const windowSize = 2048; // A common window size for pitch detection algorithms
// Calculate period bounds from frequency bounds
const minPeriod = Math.floor(sampleRate / maxFreq);
const maxPeriod = Math.ceil(sampleRate / minFreq);
for (let i = 0; i < numPoints; i++) {
const start = i * samplesPerPoint;
const window = pcmData.slice(start, start + windowSize);
if (window.length < windowSize) break;
// Calculate Root Mean Square (RMS) to detect silence
let rms = 0;
for (let j = 0; j < window.length; j++) {
rms += window[j] * window[j];
}
rms = Math.sqrt(rms / window.length);
if (rms < 0.01) { // Silence threshold
frequencies.push(0);
continue;
}
// Autocorrelation: Find the lag with the highest correlation
let bestLag = -1;
let maxCorrelation = 0;
for (let lag = minPeriod; lag <= maxPeriod; lag++) {
let sum = 0;
for (let j = 0; j < windowSize - lag; j++) {
sum += window[j] * window[j + lag];
}
if (sum > maxCorrelation) {
maxCorrelation = sum;
bestLag = lag;
}
}
if (bestLag !== -1) {
frequencies.push(sampleRate / bestLag);
} else {
frequencies.push(0); // No clear pitch found in the range
}
}
return frequencies;
};
/**
* Draws the pitch data onto the canvas.
* @param {CanvasRenderingContext2D} ctx - The canvas context to draw on.
* @param {Array<number>} pitchData - Array of frequencies.
* @param {string} color - The color of the line.
* @param {number} width - The width of the line.
* @param {number} maxFreq - The maximum frequency for the Y-axis scale.
*/
const visualizePitch = (ctx, pitchData, color, width, maxFreq) => {
const { width: canvasWidth, height: canvasHeight } = ctx.canvas;
ctx.strokeStyle = color;
ctx.lineWidth = width;
ctx.beginPath();
let isLineStarted = false;
pitchData.forEach((freq, index) => {
if (freq > minFrequency) { // Only draw if a valid frequency was detected
const x = index * (canvasWidth / pitchData.length);
// Map frequency to y position. Higher frequency = lower y value.
const y = canvasHeight - (freq / maxFreq) * canvasHeight;
if (y >= 0 && y <= canvasHeight) { // Only draw points within the visible canvas
if (!isLineStarted) {
ctx.moveTo(x, y);
isLineStarted = true;
} else {
ctx.lineTo(x, y);
}
} else {
// Break the line if it goes off-canvas
isLineStarted = false;
}
} else {
// Break the line for silence (freq=0) or very low frequencies
isLineStarted = false;
}
});
ctx.stroke();
};
// --- Main function logic ---
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
canvas.width = originalImg.width;
canvas.height = originalImg.height;
ctx.drawImage(originalImg, 0, 0);
// Add a semi-transparent overlay to make the pitch line more visible on any background
ctx.fillStyle = 'rgba(0, 0, 0, 0.5)';
ctx.fillRect(0, 0, canvas.width, canvas.height);
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
try {
let audioBuffer;
if (!audioUrl) {
audioBuffer = createDefaultAudioBuffer(audioContext, 3, 440); // 3-second, 440Hz sweep
} else {
const response = await fetch(audioUrl);
if (!response.ok) throw new Error(`HTTP error! Status: ${response.status}`);
const arrayBuffer = await response.arrayBuffer();
audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
}
// Analyze pitch across the canvas width
const pitchData = analyzePitch(audioBuffer, canvas.width, minFrequency, maxFrequency);
// Visualize the resulting pitch data
visualizePitch(ctx, pitchData, lineColor, lineWidth, maxFrequency);
} catch (error) {
console.error('Image Audio URL Pitch Analyzer Error:', error);
// Display a user-friendly error on the canvas
const fontSize1 = Math.min(24, canvas.width / 20);
const fontSize2 = Math.min(16, canvas.width / 30);
ctx.fillStyle = 'rgba(0, 0, 0, 0.7)';
ctx.fillRect(0, canvas.height / 2 - 50, canvas.width, 100);
ctx.fillStyle = 'white';
ctx.textAlign = 'center';
ctx.textBaseline = 'middle';
ctx.font = `bold ${fontSize1}px sans-serif`;
ctx.fillText('Error Processing Audio', canvas.width / 2, canvas.height / 2 - 20);
ctx.font = `${fontSize2}px sans-serif`;
ctx.fillText(error.message, canvas.width / 2, canvas.height / 2 + 10);
if (error instanceof TypeError) { // Often indicates a CORS issue
ctx.fillText('Please use a CORS-enabled audio URL.', canvas.width / 2, canvas.height / 2 + 35);
}
} finally {
if (audioContext.state !== 'closed') {
audioContext.close();
}
}
return canvas;
}
Apply Changes