You can edit the below JavaScript code to customize the image tool.
Apply Changes
async function processImage(originalImg, audioUrl = '', enabledEffects = 'reverb,delay,lowpass') {
/**
* Creates a standard dry/wet mix block for a given effect node.
* @param {AudioContext} audioContext - The audio context.
* @param {AudioNode} effectNode - The node that will process the 'wet' signal.
* @returns {object} An object with input/output nodes and a setMix function.
*/
const createEffectBlock = (audioContext, effectNode) => {
const input = audioContext.createGain();
const output = audioContext.createGain();
const dry = audioContext.createGain();
const wet = audioContext.createGain();
input.connect(dry);
dry.connect(output);
input.connect(wet);
wet.connect(effectNode);
effectNode.connect(output);
// A single mix control: 0 means fully dry, 1 means fully wet.
const setMix = (value) => {
const clampedValue = Math.max(0, Math.min(1, value));
wet.gain.setValueAtTime(clampedValue, audioContext.currentTime);
dry.gain.setValueAtTime(1 - clampedValue, audioContext.currentTime);
};
setMix(0.5); // Default mix
return {
input,
output,
setMix
};
};
/**
* Generates a simple 1-second 440Hz sine wave as a default audio source.
* @param {AudioContext} audioContext - The audio context.
* @returns {AudioBuffer} The generated audio buffer.
*/
const createDefaultBuffer = (audioContext) => {
const sampleRate = audioContext.sampleRate;
const duration = 1;
const frameCount = sampleRate * duration;
const buffer = audioContext.createBuffer(1, frameCount, sampleRate);
const data = buffer.getChannelData(0);
for (let i = 0; i < frameCount; i++) {
const time = i / sampleRate;
data[i] = Math.sin(2 * Math.PI * 440 * time) * 0.5; // Lower volume to avoid clipping
}
return buffer;
};
/**
* Generates a simple impulse response for a reverb effect.
* @param {AudioContext} audioContext - The audio context.
* @param {number} duration - The duration of the impulse response in seconds.
* @param {number} decay - The decay rate of the impulse.
* @returns {AudioBuffer} The generated impulse response buffer.
*/
const createReverbImpulse = (audioContext, duration = 2, decay = 2) => {
const sampleRate = audioContext.sampleRate;
const length = sampleRate * duration;
const impulse = audioContext.createBuffer(2, length, sampleRate);
const impulseL = impulse.getChannelData(0);
const impulseR = impulse.getChannelData(1);
for (let i = 0; i < length; i++) {
const n = length - i;
const env = Math.pow(n / length, decay);
impulseL[i] = (Math.random() * 2 - 1) * env;
impulseR[i] = (Math.random() * 2 - 1) * env;
}
return impulse;
};
// --- Main Function Logic ---
// 1. Create container and style it
const container = document.createElement('div');
container.style.position = 'relative';
container.style.width = `${originalImg.width}px`;
container.style.height = `${originalImg.height}px`;
container.style.display = 'inline-block';
container.style.overflow = 'hidden';
container.style.borderRadius = '8px';
container.style.boxShadow = '0 4px 12px rgba(0,0,0,0.3)';
originalImg.style.display = 'block';
originalImg.style.width = '100%';
originalImg.style.height = '100%';
container.appendChild(originalImg);
const controlsContainer = document.createElement('div');
controlsContainer.style.position = 'absolute';
controlsContainer.style.bottom = '0';
controlsContainer.style.left = '0';
controlsContainer.style.width = '100%';
controlsContainer.style.backgroundColor = 'rgba(0, 0, 0, 0.7)';
controlsContainer.style.color = 'white';
controlsContainer.style.fontFamily = 'sans-serif';
controlsContainer.style.padding = '15px';
controlsContainer.style.boxSizing = 'border-box';
controlsContainer.style.display = 'flex';
controlsContainer.style.flexDirection = 'column';
controlsContainer.style.gap = '10px';
container.appendChild(controlsContainer);
// 2. Initialize Audio
const audioContext = new AudioContext();
let audioBuffer;
let sourceNode;
let isPlaying = false;
// Load or generate audio data
try {
if (audioUrl) {
const response = await fetch(audioUrl);
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const arrayBuffer = await response.arrayBuffer();
audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
} else {
audioBuffer = createDefaultBuffer(audioContext);
}
} catch (e) {
console.error("Audio loading failed:", e);
controlsContainer.textContent = 'Error: Could not load audio. Using default sound.';
audioBuffer = createDefaultBuffer(audioContext);
}
// 3. Create Audio Graph and UI Controls
const effectChainHead = audioContext.createGain();
let currentNode = effectChainHead;
const effects = enabledEffects.toLowerCase().split(',').map(e => e.trim());
// --- Create a UI control helper ---
const createSliderControl = (label, min, max, step, value, callback) => {
const controlDiv = document.createElement('div');
controlDiv.style.display = 'flex';
controlDiv.style.alignItems = 'center';
controlDiv.style.justifyContent = 'space-between';
const labelEl = document.createElement('label');
labelEl.textContent = label;
labelEl.style.marginRight = '10px';
const slider = document.createElement('input');
slider.type = 'range';
slider.min = min;
slider.max = max;
slider.step = step;
slider.value = value;
slider.style.flexGrow = '1';
const valueEl = document.createElement('span');
valueEl.textContent = value;
valueEl.style.minWidth = '30px';
valueEl.style.textAlign = 'right';
slider.oninput = () => {
callback(slider.value);
valueEl.textContent = slider.value;
};
controlDiv.append(labelEl, slider, valueEl);
return controlDiv;
};
// --- Build chain based on effects string ---
if (effects.includes('reverb')) {
const convolverNode = audioContext.createConvolver();
convolverNode.buffer = createReverbImpulse(audioContext);
const reverbBlock = createEffectBlock(audioContext, convolverNode);
currentNode.connect(reverbBlock.input);
currentNode = reverbBlock.output;
controlsContainer.appendChild(createSliderControl('Reverb Mix', 0, 1, 0.01, 0.3, (val) => {
reverbBlock.setMix(parseFloat(val));
}));
reverbBlock.setMix(0.3);
}
if (effects.includes('delay')) {
const delayNode = audioContext.createDelay(5.0); // Max delay
const feedbackNode = audioContext.createGain();
delayNode.connect(feedbackNode);
feedbackNode.connect(delayNode); // Feedback loop
const delayBlock = createEffectBlock(audioContext, delayNode);
currentNode.connect(delayBlock.input);
currentNode = delayBlock.output;
controlsContainer.appendChild(createSliderControl('Delay Mix', 0, 1, 0.01, 0.4, val => delayBlock.setMix(parseFloat(val))));
controlsContainer.appendChild(createSliderControl('Delay Time', 0, 2, 0.01, 0.5, val => delayNode.delayTime.setValueAtTime(parseFloat(val), audioContext.currentTime)));
controlsContainer.appendChild(createSliderControl('Feedback', 0, 0.95, 0.01, 0.4, val => feedbackNode.gain.setValueAtTime(parseFloat(val), audioContext.currentTime)));
delayBlock.setMix(0.4);
delayNode.delayTime.value = 0.5;
feedbackNode.gain.value = 0.4;
}
const filterTypes = ['lowpass', 'highpass', 'bandpass', 'notch'];
const activeFilterType = effects.find(e => filterTypes.includes(e));
if (activeFilterType) {
const filterNode = audioContext.createBiquadFilter();
filterNode.type = activeFilterType;
currentNode.connect(filterNode);
currentNode = filterNode;
const maxFreq = audioContext.sampleRate / 2;
controlsContainer.appendChild(createSliderControl(
`${activeFilterType.charAt(0).toUpperCase() + activeFilterType.slice(1)} Freq`,
20, maxFreq, 1, 1000,
val => filterNode.frequency.setValueAtTime(parseFloat(val), audioContext.currentTime)
));
filterNode.frequency.value = 1000;
}
currentNode.connect(audioContext.destination);
// 4. Play control
const playButton = document.createElement('button');
playButton.textContent = '▶ Play';
playButton.style.width = '100%';
playButton.style.padding = '10px';
playButton.style.border = 'none';
playButton.style.borderRadius = '5px';
playButton.style.backgroundColor = '#4CAF50';
playButton.style.color = 'white';
playButton.style.cursor = 'pointer';
playButton.style.fontSize = '16px';
playButton.onclick = async () => {
if (audioContext.state === 'suspended') {
await audioContext.resume();
}
if (!isPlaying) {
sourceNode = audioContext.createBufferSource();
sourceNode.buffer = audioBuffer;
sourceNode.loop = true;
sourceNode.connect(effectChainHead);
sourceNode.start(0);
isPlaying = true;
playButton.textContent = '■ Stop';
playButton.style.backgroundColor = '#f44336';
} else {
sourceNode.stop(0);
sourceNode.disconnect();
isPlaying = false;
playButton.textContent = '▶ Play';
playButton.style.backgroundColor = '#4CAF50';
}
};
controlsContainer.prepend(playButton);
return container;
}
Apply Changes