import { mapValue, mix, } from './utils.js'; window.mapValue = mapValue; const Audio = function(tp, record) { const audioDom = document.querySelector('.audioWrapper'); const heading = audioDom.querySelector("h1"); heading.textContent = "CLICK HERE TO START"; //document.body.addEventListener("click", init); let started = false; const mapping = {}; const addAudioOptions = (layer, propTitle) => { const panelPropTitle = tp.getPanelPropTitle(propTitle); if (panelPropTitle === null) { console.log('Audio::addAudioOptions::error',`cannot find panelPropTitle "${propTitle}"`); return; } const container = tp.getPanelPropContainer(panelPropTitle); const mappingOptions = mapping[layer.id()][propTitle]; const panel = tp.getPanel(); const audioOptions = document.createElement('div'); audioOptions.classList.add('audioOptions'); audioOptions.classList.add('audioOptionsTypeDefault'); audioOptions.classList.add(`audioOptions${propTitle}`); audioOptions.style.position = 'relative'; audioOptions.style.width = '100%'; audioOptions.style.background = 'rgba(0,255,255,0.2)'; audioOptions.style.order = window.getComputedStyle(container).order; mappingOptions.freq_min = 0; mappingOptions.freq_max = 256 * 8 / 2; const updateMappingOptions = () => { mappingOptions.min_out = parseFloat(panel.querySelector(`#audio_min${propTitle}`).value); mappingOptions.max_out = parseFloat(panel.querySelector(`#audio_max${propTitle}`).value); mappingOptions.sync = panel.querySelector(`input[name="audio_sync${propTitle}"]:checked`).value; const s = panel.querySelector(`#audio_smoothing${propTitle}`).value; mappingOptions.smoothing = parseFloat(s); }; const min_max_Dom = document.createElement('div'); min_max_Dom.classList.add('audio_min_max'); const min_inputDom_label = document.createElement('label'); min_inputDom_label.for = 'audio_min'; min_inputDom_label.innerHTML = 'audio_min'; const min_inputDom = document.createElement('input'); min_inputDom.type = 'number'; min_inputDom.name = `audio_min${propTitle}`; min_inputDom.id = `audio_min${propTitle}`; min_inputDom.value = '0'; const max_inputDom_label = document.createElement('label'); max_inputDom_label.for = 'audio_max'; max_inputDom_label.innerHTML = 'audio_max'; const max_inputDom = document.createElement('input'); max_inputDom.type = 'number'; max_inputDom.name = `audio_max${propTitle}`; max_inputDom.id = `audio_max${propTitle}`; max_inputDom.value = '255'; const smoothing_inputDom_label = document.createElement('label'); smoothing_inputDom_label.for = 'audio_smoothing'; smoothing_inputDom_label.innerHTML = 'audio_smoothing'; const smoothing_inputDom = document.createElement('input'); smoothing_inputDom.type = 'number'; smoothing_inputDom.name = `audio_smoothing${propTitle}`; smoothing_inputDom.id = `audio_smoothing${propTitle}`; smoothing_inputDom.value = config.audio.defaultSmoothing; smoothing_inputDom.min = 0; smoothing_inputDom.max = 1; smoothing_inputDom.step = 0.01; min_max_Dom.append(smoothing_inputDom_label); min_max_Dom.append(smoothing_inputDom); min_max_Dom.append(min_inputDom_label); min_max_Dom.append(min_inputDom); min_max_Dom.append(max_inputDom_label); min_max_Dom.append(max_inputDom); audioOptions.append(min_max_Dom); const sync_Dom = document.createElement('div'); const sync_titleDom = document.createElement('p'); sync_titleDom.innerHTML = 'sync with:'; sync_Dom.append(sync_titleDom); const sync_options = ['volume', 'pitch', 'frequency']; sync_options.forEach((o, oi) => { const sync_inputDom_label = document.createElement('label'); sync_inputDom_label.for = `audio_sync${o}`; sync_inputDom_label.innerHTML = o; const sync_inputDom = document.createElement('input'); sync_inputDom.type = 'radio'; sync_inputDom.name = `audio_sync${propTitle}`; sync_inputDom.id = `audio_sync${propTitle}${o}`; sync_inputDom.value = o; // default select first option if (oi === 0) { sync_inputDom.checked = '1'; } sync_Dom.append(sync_inputDom_label); sync_Dom.append(sync_inputDom); sync_inputDom.addEventListener('change', updateMappingOptions); }); audioOptions.append(sync_Dom); const fft_Dom = document.createElement('div'); const fft_imgDom = document.createElement('img'); const fft_selectDom = document.createElement('div'); fft_Dom.style.position = 'relative'; fft_Dom.style.top = '0px'; fft_Dom.style.left = '0px'; fft_imgDom.classList.add('audio_fft'); fft_imgDom.style.width = '100%'; fft_imgDom.style.userDrag = 'none'; fft_imgDom.style.userSelect = 'none'; fft_imgDom.style.pointerEvents = 'none'; fft_selectDom.style.position = 'absolute'; fft_selectDom.style.top = '0px'; fft_selectDom.style.left = '0px'; fft_selectDom.style.width = '100%'; fft_selectDom.style.height = '100%'; fft_selectDom.style.pointerEvents = 'none'; fft_selectDom.style.backgroundColor = 'rgba(0,255,0,0.2)'; fft_selectDom.style.border = '1px solid rgba(0,255,0,1.0)'; fft_Dom.append(fft_imgDom); fft_Dom.append(fft_selectDom); audioOptions.append(fft_Dom); min_inputDom.addEventListener('change', updateMappingOptions); max_inputDom.addEventListener('change', updateMappingOptions); smoothing_inputDom.addEventListener('change', updateMappingOptions); let setFrequency = false; let freq_down = 0; let freq_up = 0; fft_Dom.addEventListener('mousedown', (e) => { setFrequency = true; const bb = fft_Dom.getBoundingClientRect(); const x = e.clientX - bb.x; freq_down = mapValue(e.clientX, bb.x, bb.x + bb.width, 0, 256 * 8 / 2, true); }); fft_Dom.addEventListener('mouseup', (e) => { setFrequency = false; const bb = fft_Dom.getBoundingClientRect(); const x = e.clientX - bb.x; freq_down = mapValue(e.clientX, bb.x, bb.x + bb.width, 0, 256 * 8 / 2, true); }); //removeAudioOptions(); container.after(audioOptions); updateMappingOptions(); mappingOptions.value = mappingOptions.min_out; }; const removeAudioOptions = (propTitle = '') => { const panel = tp.getPanel(); if (propTitle === '') { const otherAudioOptions = panel.querySelectorAll('.audioOptions'); if (otherAudioOptions !== null) { for (let i = 0; i < otherAudioOptions.length; i++) { otherAudioOptions[i].remove(); } } } else { const audioOptions = panel.querySelector(`.audioOptions${propTitle}`); if (audioOptions !== null) { audioOptions.remove(); } } }; const addAudioButton = (layer, propTitle, isActive) => { const panel = tp.getPanel(); const panelPropTitle = tp.getPanelPropTitle(propTitle); if (panelPropTitle !== null) { const container = tp.getPanelPropContainer(panelPropTitle); if (container === null) { console.log("Audio::addAudioButton", `impossible! cannot find panelPropContainer for ${propTitle}`); } else if (container.querySelector('.audioButton') !== null) { // this is super verbose, let's not log by default //console.log("Audio::addAudioButton", //`already added an audio button for ${propTitle}`); } else { const button = document.createElement('div'); button.classList.add('audioButton'); button.classList.add(`audioButton${propTitle}`); button.innerHTML = `audio`; container.append(button); button.addEventListener('click', () => { if (!started) { init(); } if (!mapping.hasOwnProperty(layer.id())) { mapping[layer.id()] = {}; } if (!mapping[layer.id()].hasOwnProperty(propTitle)) { mapping[layer.id()][propTitle] = {}; button.classList.add('active'); addAudioOptions(layer, propTitle); } else { delete mapping[layer.id()][propTitle]; if (Object.keys(mapping[layer.id()]).length === 0) { delete mapping[layer.id()]; } button.classList.remove('active'); removeAudioOptions(propTitle); } }); if (isActive) { button.classList.add('active'); addAudioOptions(layer, propTitle); } } } else { console.log("Audio::addAudioButton", `cannot find panelPropTitle for ${propTitle}`); } }; const injectPanel = (layer) => { const props = Object.keys(layer.theatreObject.value); props.forEach((propTitle) => { if (config.audio.ignoreProps.indexOf(propTitle) < 0) { let isActive = false; if (mapping.hasOwnProperty(layer.id())) { if (mapping[layer.id()].hasOwnProperty(propTitle)) { isActive = true; } } addAudioButton(layer, propTitle, isActive); } }); }; function init() { started = true; heading.textContent = "Voice-change-O-matic"; //document.body.removeEventListener("click", init); // Older browsers might not implement mediaDevices at all, so we set an empty object first if (navigator.mediaDevices === undefined) { navigator.mediaDevices = {}; } // Some browsers partially implement mediaDevices. We can't assign an object // with getUserMedia as it would overwrite existing properties. // Add the getUserMedia property if it's missing. if (navigator.mediaDevices.getUserMedia === undefined) { navigator.mediaDevices.getUserMedia = function(constraints) { // First get ahold of the legacy getUserMedia, if present const getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; // Some browsers just don't implement it - return a rejected promise with an error // to keep a consistent interface if (!getUserMedia) { return Promise.reject( new Error("getUserMedia is not implemented in this browser") ); } // Otherwise, wrap the call to the old navigator.getUserMedia with a Promise return new Promise(function(resolve, reject) { getUserMedia.call(navigator, constraints, resolve, reject); }); }; } // Set up forked web audio context, for multiple browsers // window. is needed otherwise Safari explodes const audioCtx = new(window.AudioContext || window.webkitAudioContext)(); const voiceSelect = audioDom.querySelector("#voice"); let source; let stream; // Grab the mute button to use below const mute = audioDom.querySelector(".mute"); // Set up the different audio nodes we will use for the app const analyser = audioCtx.createAnalyser(); analyser.minDecibels = -90; analyser.maxDecibels = -10; analyser.smoothingTimeConstant = 0.85; window.analyser = analyser; const distortion = audioCtx.createWaveShaper(); const gainNode = audioCtx.createGain(); const biquadFilter = audioCtx.createBiquadFilter(); const convolver = audioCtx.createConvolver(); const echoDelay = createEchoDelayEffect(audioCtx); // Distortion curve for the waveshaper, thanks to Kevin Ennis // http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion function makeDistortionCurve(amount) { let k = typeof amount === "number" ? amount : 50, n_samples = 44100, curve = new Float32Array(n_samples), deg = Math.PI / 180, i = 0, x; for (; i < n_samples; ++i) { x = (i * 2) / n_samples - 1; curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x)); } return curve; } // Grab audio track via XHR for convolver node let soundSource; const ajaxRequest = new XMLHttpRequest(); ajaxRequest.open( "GET", "https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg", true ); ajaxRequest.responseType = "arraybuffer"; ajaxRequest.onload = function() { const audioData = ajaxRequest.response; audioCtx.decodeAudioData( audioData, function(buffer) { soundSource = audioCtx.createBufferSource(); convolver.buffer = buffer; }, function(e) { console.log("Error with decoding audio data" + e.err); } ); }; ajaxRequest.send(); // Set up canvas context for visualizer const canvas = audioDom.querySelector(".visualizer"); const canvasCtx = canvas.getContext("2d"); const intendedWidth = audioDom.clientWidth; canvas.setAttribute("width", 256 * 8 / 2); const visualSelect = audioDom.querySelector("#visual"); let drawVisual; // Main block for doing the audio recording if (navigator.mediaDevices.getUserMedia) { console.log("getUserMedia supported."); const constraints = { audio: true }; navigator.mediaDevices .getUserMedia(constraints) .then(function(stream) { source = audioCtx.createMediaStreamSource(stream); source.connect(distortion); distortion.connect(biquadFilter); biquadFilter.connect(gainNode); convolver.connect(gainNode); echoDelay.placeBetween(gainNode, analyser); analyser.connect(audioCtx.destination); visualize(); voiceChange(); }) .catch(function(err) { console.log("The following gUM error occured: " + err); }); } else { console.log("getUserMedia not supported on your browser!"); } function visualize() { const WIDTH = canvas.width; const HEIGHT = canvas.height; const visualSetting = visualSelect.value; if (visualSetting === "sinewave") { analyser.fftSize = 2048; const bufferLength = analyser.fftSize; // We can use Float32Array instead of Uint8Array if we want higher precision // const dataArray = new Float32Array(bufferLength); const dataArray = new Uint8Array(bufferLength); canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); const draw = function() { drawVisual = requestAnimationFrame(draw); analyser.getByteTimeDomainData(dataArray); canvasCtx.fillStyle = "rgb(200, 200, 200)"; canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); canvasCtx.lineWidth = 2; canvasCtx.strokeStyle = "rgb(0, 0, 0)"; canvasCtx.beginPath(); const sliceWidth = (WIDTH * 1.0) / bufferLength; let x = 0; for (let i = 0; i < bufferLength; i++) { let v = dataArray[i] / 128.0; let y = (v * HEIGHT) / 2; if (i === 0) { canvasCtx.moveTo(x, y); } else { canvasCtx.lineTo(x, y); } x += sliceWidth; } canvasCtx.lineTo(canvas.width, canvas.height / 2); canvasCtx.stroke(); }; draw(); } else if (visualSetting == "frequencybars") { analyser.fftSize = 256 * 8; const bufferLengthAlt = analyser.frequencyBinCount / 2; // See comment above for Float32Array() const dataArrayAlt = new Uint8Array(bufferLengthAlt); canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); let frameCount = 0; const drawAlt = function() { drawVisual = requestAnimationFrame(drawAlt); analyser.getByteFrequencyData(dataArrayAlt); canvasCtx.fillStyle = "rgb(0, 0, 0)"; canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); const barWidth = (WIDTH / bufferLengthAlt) * 2.5; let barHeight; let x = 0; let max_i = 0; let max_v = 0; for (let i = 0; i < bufferLengthAlt; i++) { barHeight = dataArrayAlt[i]; if (barHeight > max_v) { max_v = barHeight; max_i = i; } canvasCtx.fillStyle = "rgb(" + (barHeight + 100) + ",50,50)"; canvasCtx.fillRect( x, HEIGHT - barHeight / 2, barWidth, barHeight / 2 ); x += barWidth + 1; } const propsToSet = []; getLayers().forEach((layer) => { if (mapping.hasOwnProperty(layer.id())) { Object.keys(mapping[layer.id()]).forEach((propTitle) => { const m = mapping[layer.id()][propTitle]; switch(m.sync) { case 'volume': { let a = mapValue(max_v, 0, 255, m.min_out, m.max_out, true); m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; propsToSet.push({ title: propTitle, prop: layer.theatreObject.props[propTitle], value: m.value, }); break; } case 'pitch': { let a = mapValue(max_i, 0, bufferLengthAlt-1, m.min_out, m.max_out, true); m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; propsToSet.push({ title: propTitle, prop: layer.theatreObject.props[propTitle], value: m.value, }); break; } default: break; } if (m.sync === 'volume') { } }); } }); if (propsToSet.length > 0 && frameCount % 2 === 0) { if (!record.isRecording()) { tp.studio.transaction(({ set }) => { propsToSet.forEach((p) => { set(p.prop, p.value, true); }); }); } else { propsToSet.forEach((p) => { // TODO: this does not have to be queried // but we could store it in a map/set/dictionary/array/object const inputElement = tp .getPanelPropContainer(p.title) .querySelector('input.recording'); if (inputElement !== null) { inputElement.value = p.value; inputElement.dispatchEvent(new Event('change')); } }); } } const panel = tp.getPanel(); const fft_images = panel.querySelectorAll('.audio_fft'); if (fft_images !== null) { const src = canvas.toDataURL(); fft_images.forEach((e) => { e.src = src; }); } frameCount++; }; drawAlt(); } else if (visualSetting == "off") { canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); canvasCtx.fillStyle = "red"; canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); } } function voiceChange() { distortion.oversample = "4x"; biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0); const voiceSetting = voiceSelect.value; if (echoDelay.isApplied()) { echoDelay.discard(); } // When convolver is selected it is connected back into the audio path if (voiceSetting == "convolver") { biquadFilter.disconnect(0); biquadFilter.connect(convolver); } else { biquadFilter.disconnect(0); biquadFilter.connect(gainNode); if (voiceSetting == "distortion") { distortion.curve = makeDistortionCurve(400); } else if (voiceSetting == "biquad") { biquadFilter.type = "lowshelf"; biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0); biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0); } else if (voiceSetting == "delay") { echoDelay.apply(); } else if (voiceSetting == "off") { console.log("Voice settings turned off"); } } } function createEchoDelayEffect(audioContext) { const delay = audioContext.createDelay(1); const dryNode = audioContext.createGain(); const wetNode = audioContext.createGain(); const mixer = audioContext.createGain(); const filter = audioContext.createBiquadFilter(); delay.delayTime.value = 0.75; dryNode.gain.value = 1; wetNode.gain.value = 0; filter.frequency.value = 1100; filter.type = "highpass"; return { apply: function() { wetNode.gain.setValueAtTime(0.75, audioContext.currentTime); }, discard: function() { wetNode.gain.setValueAtTime(0, audioContext.currentTime); }, isApplied: function() { return wetNode.gain.value > 0; }, placeBetween: function(inputNode, outputNode) { inputNode.connect(delay); delay.connect(wetNode); wetNode.connect(filter); filter.connect(delay); inputNode.connect(dryNode); dryNode.connect(mixer); wetNode.connect(mixer); mixer.connect(outputNode); }, }; } // Event listeners to change visualize and voice settings visualSelect.onchange = function() { window.cancelAnimationFrame(drawVisual); visualize(); }; voiceSelect.onchange = function() { voiceChange(); }; mute.onclick = voiceMute; let previousGain; function voiceMute() { if (mute.id === "") { previousGain = gainNode.gain.value; gainNode.gain.value = 0; mute.id = "activated"; mute.innerHTML = "Unmute"; } else { gainNode.gain.value = previousGain; mute.id = ""; mute.innerHTML = "Mute"; } } } this.init = init; this.injectPanel = injectPanel; this.mapping = mapping; }; export { Audio }