You can edit the below JavaScript code to customize the image tool.
Apply Changes
async function processImage(originalImg, scanColumns = 16, noteDurationMs = 250, scale = 'major') {
/**
* Analyzes an image and generates a melody based on its colors.
* The image is scanned vertically in columns. For each column:
* - Hue determines the musical note pitch within the selected scale.
* - Lightness determines the octave of the note.
* - Saturation determines the volume of the note.
*
* @returns {HTMLElement} A div element containing a "Play" button to trigger the audio.
*/
// Helper function to convert RGB color to HSL (Hue, Saturation, Lightness)
// r, g, b are in the range [0, 255]
// returns [h, s, l] where h is [0, 360], s is [0, 1], l is [0, 1]
const rgbToHsl = (r, g, b) => {
r /= 255;
g /= 255;
b /= 255;
const max = Math.max(r, g, b);
const min = Math.min(r, g, b);
let h = 0, s = 0, l = (max + min) / 2;
if (max !== min) {
const d = max - min;
s = l > 0.5 ? d / (2 - max - min) : d / (max + min);
switch (max) {
case r: h = (g - b) / d + (g < b ? 6 : 0); break;
case g: h = (b - r) / d + 2; break;
case b: h = (r - g) / d + 4; break;
}
h /= 6;
}
return [h * 360, s, l];
};
// Helper function to convert a MIDI note number to frequency in Hz
const midiToFreq = (midi) => {
if (midi <= 0) return 0;
return 440 * Math.pow(2, (midi - 69) / 12);
};
// 1. Setup Canvas to analyze image data
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d', { willReadFrequently: true });
canvas.width = originalImg.width;
canvas.height = originalImg.height;
ctx.drawImage(originalImg, 0, 0);
// 2. Define musical scales (intervals from the root note)
const scales = {
major: [0, 2, 4, 5, 7, 9, 11],
minor: [0, 2, 3, 5, 7, 8, 10],
pentatonic: [0, 2, 4, 7, 9],
chromatic: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
whole: [0, 2, 4, 6, 8, 10]
};
const currentScaleIntervals = scales[scale.toLowerCase()] || scales.major;
const noteDurationSec = noteDurationMs / 1000.0;
// 3. Analyze image and generate the melody sequence
const melodyNotes = [];
const colWidth = Math.max(1, Math.floor(canvas.width / scanColumns));
for (let i = 0; i < scanColumns; i++) {
const x = i * colWidth;
const imageData = ctx.getImageData(x, 0, colWidth, canvas.height);
const data = imageData.data;
let totalR = 0, totalG = 0, totalB = 0;
const pixelCount = data.length / 4;
if (pixelCount === 0) continue;
for (let j = 0; j < data.length; j += 4) {
totalR += data[j];
totalG += data[j + 1];
totalB += data[j + 2];
}
const avgR = totalR / pixelCount;
const avgG = totalG / pixelCount;
const avgB = totalB / pixelCount;
const [h, s, l] = rgbToHsl(avgR, avgG, avgB);
// Map HSL values to musical parameters
// Hue (0-360) -> Note in the scale
const scaleDegreeIndex = Math.floor((h / 360) * currentScaleIntervals.length) % currentScaleIntervals.length;
const noteInterval = currentScaleIntervals[scaleDegreeIndex];
// Lightness (0-1) -> Octave shift. We'll use 3 octaves centered around Middle C.
const octaveShift = Math.floor(l * 3) - 1; // Results in -1, 0, or 1
const baseMidiNote = 60; // Middle C (C4)
const finalMidiNote = baseMidiNote + noteInterval + (12 * octaveShift);
// Saturation (0-1) -> Volume. Max volume capped at 0.7 to avoid clipping.
const noteVolume = s * 0.7;
melodyNotes.push({
midi: finalMidiNote,
volume: noteVolume,
duration: noteDurationSec
});
}
// 4. Create UI element (a button to play the sound)
const container = document.createElement('div');
container.style.textAlign = 'center';
const button = document.createElement('button');
button.textContent = '▶ Play Image Melody';
button.style.padding = '10px 15px';
button.style.fontSize = '16px';
button.style.cursor = 'pointer';
button.style.border = '1px solid #ccc';
button.style.borderRadius = '5px';
container.appendChild(button);
let audioCtx = null;
let isPlaying = false;
// 5. Attach click handler to play the melody
button.onclick = async () => {
if (isPlaying) return;
isPlaying = true;
button.disabled = true;
button.textContent = 'Playing...';
// Initialize AudioContext on first user interaction
if (!audioCtx) {
audioCtx = new (window.AudioContext || window.webkitAudioContext)();
}
// Resume context if it was in a suspended state
if (audioCtx.state === 'suspended') {
await audioCtx.resume();
}
// Schedule all notes to be played
let startTime = audioCtx.currentTime;
melodyNotes.forEach(note => {
if (note.volume > 0.01) { // Only play audible notes
const osc = audioCtx.createOscillator();
const gainNode = audioCtx.createGain();
osc.connect(gainNode);
gainNode.connect(audioCtx.destination);
osc.frequency.setValueAtTime(midiToFreq(note.midi), startTime);
osc.type = 'triangle'; // 'sine', 'square', 'sawtooth', 'triangle'
// Simple envelope to prevent clicking sounds
gainNode.gain.setValueAtTime(0, startTime);
gainNode.gain.linearRampToValueAtTime(note.volume, startTime + 0.05); // Attack
gainNode.gain.exponentialRampToValueAtTime(0.0001, startTime + note.duration); // Decay/Release
osc.start(startTime);
osc.stop(startTime + note.duration);
}
startTime += note.duration; // Schedule next note
});
// Re-enable the button after the full melody has played
const totalDurationMs = melodyNotes.length * noteDurationMs;
setTimeout(() => {
isPlaying = false;
button.disabled = false;
button.textContent = '▶ Play Image Melody';
}, totalDurationMs);
};
return container;
}
Apply Changes