diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 088b899..d11b8c7 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -5,36 +5,10 @@ import { window.mapValue = mapValue; - -const AudioMappingOptions = function() { - this.freq_min = 0.0; - this.freq_max = config.audio.fftBandsUsed; - this.min_out = 0.0; - this.max_out = 1.0; - this.smoothing = config.audio.defaultSmoothing; - this.sync = 'volume'; - this.value = 0.0; -}; - const Audio = function(tp, record) { - const audioDom = document.querySelector('.audioWrapper'); - let audioCtx = false; const heading = audioDom.querySelector("h1"); heading.textContent = "CLICK HERE TO START"; - - // an array of possible sync options. - const audio_sync_options = ['volume', 'pitch', 'frequency']; - // could also be an enum - // like that - //const AudioSyncOptions = Object.freeze({ - //RED: Symbol("volume"), - //BLUE: Symbol("pitch"), - //GREEN: Symbol("frequency"), - //toString: (e) => { - //return e.toString.match(/\(([\S\s]*)\)/)[1] - //}, - //}); //document.body.addEventListener("click", init); let started = false; @@ -42,66 +16,14 @@ const Audio = function(tp, record) { const canvass = []; const canvasCtxs = []; - const isMapped = (layer, propTitle) => { - if (!mapping.hasOwnProperty(layer.id())) { - return false; - } - if (!mapping[layer.id()].hasOwnProperty(propTitle)) { - return false; - } - return true; - }; - - const addAudioMapping = (layer, propTitle, options = new AudioMappingOptions()) => { - if (!mapping.hasOwnProperty(layer.id())) { - mapping[layer.id()] = {}; - } - if (!mapping[layer.id()].hasOwnProperty(propTitle)) { - mapping[layer.id()][propTitle] = options; - return true; - } else { - // already there - return false; - } - }; - - const removeAudioMapping = (layer = false, propTitle = false) => { - if (!layer && !propTitle) { - Object.keys(mapping).forEach((layerID) => { - Object.keys(mapping[layerID]).forEach((propTitle) => { - delete mapping[layerID][propTitle]; - }); - delete mapping[layerID]; - }); - return true; - } - if (!mapping.hasOwnProperty(layer.id())) { - // no layer - return false; - } - if (!mapping[layer.id()].hasOwnProperty(propTitle)) { - // no propTitle - return false; - } - delete mapping[layer.id()][propTitle]; - if (Object.keys(mapping[layer.id()]).length === 0) { - delete mapping[layer.id()]; - } - return true; - } - const addAudioOptions = (layer, propTitle) => { - if (!started) { - // audioOptions need a started init - init(); - } const panelPropTitle = tp.getPanelPropTitle(propTitle); if (panelPropTitle === null) { console.log('Audio::addAudioOptions::error',`cannot find panelPropTitle "${propTitle}"`); return; } if (tp.getPanel().querySelector(`.audioOptions${propTitle}`) !== null) { - //console.log('Audio::addAudioOptions::error',`audioOptions already exist for "${propTitle}"`); + console.log('Audio::addAudioOptions::error',`audioOptions already exist for "${propTitle}"`); return; } const container = tp.getPanelPropContainer(panelPropTitle); @@ -116,6 +38,9 @@ const Audio = function(tp, record) { audioOptions.style.background = 'rgba(0,255,255,0.2)'; audioOptions.style.order = parseInt(container.style.order) + 1; + mappingOptions.freq_min = 0; + mappingOptions.freq_max = config.audio.fftBandsUsed; + const updateMappingOptions = () => { mappingOptions.min_out = parseFloat(panel.querySelector(`#audio_min${propTitle}`).value); mappingOptions.max_out = parseFloat(panel.querySelector(`#audio_max${propTitle}`).value); @@ -134,7 +59,7 @@ const Audio = function(tp, record) { min_inputDom.type = 'number'; min_inputDom.name = `audio_min${propTitle}`; min_inputDom.id = `audio_min${propTitle}`; - min_inputDom.value = `${mappingOptions.min_out}`; + min_inputDom.value = '0'; const max_inputDom_label = document.createElement('label'); max_inputDom_label.for = 'audio_max'; max_inputDom_label.innerHTML = 'audio_max'; @@ -142,7 +67,7 @@ const Audio = function(tp, record) { max_inputDom.type = 'number'; max_inputDom.name = `audio_max${propTitle}`; max_inputDom.id = `audio_max${propTitle}`; - max_inputDom.value = `${mappingOptions.max_out}`; + max_inputDom.value = '255'; const smoothing_inputDom_label = document.createElement('label'); smoothing_inputDom_label.for = 'audio_smoothing'; smoothing_inputDom_label.innerHTML = 'audio_smoothing'; @@ -150,7 +75,7 @@ const Audio = function(tp, record) { smoothing_inputDom.type = 'number'; smoothing_inputDom.name = `audio_smoothing${propTitle}`; smoothing_inputDom.id = `audio_smoothing${propTitle}`; - smoothing_inputDom.value = mappingOptions.smoothing; + smoothing_inputDom.value = config.audio.defaultSmoothing; smoothing_inputDom.min = 0; smoothing_inputDom.max = 1; smoothing_inputDom.step = 0.01; @@ -167,7 +92,8 @@ const Audio = function(tp, record) { sync_titleDom.innerHTML = 'sync with:'; sync_Dom.append(sync_titleDom); - audio_sync_options.forEach((o, oi) => { + const sync_options = ['volume', 'pitch', 'frequency']; + sync_options.forEach((o, oi) => { const sync_inputDom_label = document.createElement('label'); sync_inputDom_label.for = `audio_sync${o}`; sync_inputDom_label.innerHTML = o; @@ -177,7 +103,7 @@ const Audio = function(tp, record) { sync_inputDom.id = `audio_sync${propTitle}${o}`; sync_inputDom.value = o; // default select first option - if (o === mappingOptions.sync) { + if (oi === 0) { sync_inputDom.checked = '1'; } sync_Dom.append(sync_inputDom_label); @@ -232,39 +158,26 @@ const Audio = function(tp, record) { //removeAudioOptions(); container.after(audioOptions); - const audioButton = container.querySelector('.audioButton'); - audioButton.classList.add('active'); - canvass.push(fft_imgDom); canvasCtxs.push(fft_imgDom.getContext("2d")); updateMappingOptions(); + mappingOptions.value = mappingOptions.min_out; }; - const removeAudioOptions = (layer = false, propTitle = false) => { + const removeAudioOptions = (propTitle = '') => { const panel = tp.getPanel(); - if (!layer && !propTitle) { - const allAudioOptions = panel.querySelectorAll('.audioOptions'); - if (allAudioOptions !== null) { - for (let i = 0; i < allAudioOptions.length; i++) { - allAudioOptions[i].remove(); + if (propTitle === '') { + const otherAudioOptions = panel.querySelectorAll('.audioOptions'); + if (otherAudioOptions !== null) { + for (let i = 0; i < otherAudioOptions.length; i++) { + otherAudioOptions[i].remove(); } } - panel.querySelectorAll('.audioButton').forEach((button) => { - button.classList.remove('active'); - }); } else { - // only selected layers have options - // otherwise the ui is not there - if (layer.isSelected()) { - const audioOptions = panel.querySelector(`.audioOptions${propTitle}`); - if (audioOptions !== null) { - audioOptions.remove(); - } - const audioButton = panel.querySelector(`.audioButton${propTitle}`); - if (audioButton !== null) { - audioButton.classList.remove('active'); - } + const audioOptions = panel.querySelector(`.audioOptions${propTitle}`); + if (audioOptions !== null) { + audioOptions.remove(); } } }; @@ -292,16 +205,24 @@ const Audio = function(tp, record) { if (!started) { init(); } - if (!isMapped(layer, propTitle)) { - addAudioMapping(layer, propTitle); + if (!mapping.hasOwnProperty(layer.id())) { + mapping[layer.id()] = {}; + } + if (!mapping[layer.id()].hasOwnProperty(propTitle)) { + mapping[layer.id()][propTitle] = {}; + button.classList.add('active'); addAudioOptions(layer, propTitle); } else { - removeAudioMapping(layer, propTitle); - removeAudioOptions(layer, propTitle); + delete mapping[layer.id()][propTitle]; + if (Object.keys(mapping[layer.id()]).length === 0) { + delete mapping[layer.id()]; + } + button.classList.remove('active'); + removeAudioOptions(propTitle); } }); if (isActive) { - addAudioMapping(layer, propTitle); + button.classList.add('active'); addAudioOptions(layer, propTitle); } } @@ -326,479 +247,446 @@ const Audio = function(tp, record) { }); }; - const init = () => { - if (!started) { - started = true; - if (audioCtx !== false && audioCtx.state === 'suspended') { - audioCtx.resume(); - return; - } - heading.textContent = "Voice-change-O-matic"; - //document.body.removeEventListener("click", init); + function init() { + started = true; + heading.textContent = "Voice-change-O-matic"; + //document.body.removeEventListener("click", init); - // Older browsers might not implement mediaDevices at all, so we set an empty object first - if (navigator.mediaDevices === undefined) { - navigator.mediaDevices = {}; - } + // Older browsers might not implement mediaDevices at all, so we set an empty object first + if (navigator.mediaDevices === undefined) { + navigator.mediaDevices = {}; + } - // Some browsers partially implement mediaDevices. We can't assign an object - // with getUserMedia as it would overwrite existing properties. - // Add the getUserMedia property if it's missing. - if (navigator.mediaDevices.getUserMedia === undefined) { - navigator.mediaDevices.getUserMedia = function(constraints) { - // First get ahold of the legacy getUserMedia, if present - const getUserMedia = - navigator.webkitGetUserMedia || - navigator.mozGetUserMedia || - navigator.msGetUserMedia; + // Some browsers partially implement mediaDevices. We can't assign an object + // with getUserMedia as it would overwrite existing properties. + // Add the getUserMedia property if it's missing. + if (navigator.mediaDevices.getUserMedia === undefined) { + navigator.mediaDevices.getUserMedia = function(constraints) { + // First get ahold of the legacy getUserMedia, if present + const getUserMedia = + navigator.webkitGetUserMedia || + navigator.mozGetUserMedia || + navigator.msGetUserMedia; - // Some browsers just don't implement it - return a rejected promise with an error - // to keep a consistent interface - if (!getUserMedia) { - return Promise.reject( - new Error("getUserMedia is not implemented in this browser") - ); - } - - // Otherwise, wrap the call to the old navigator.getUserMedia with a Promise - return new Promise(function(resolve, reject) { - getUserMedia.call(navigator, constraints, resolve, reject); - }); - }; - } - - // Set up forked web audio context, for multiple browsers - // window. is needed otherwise Safari explodes - audioCtx = new(window.AudioContext || window.webkitAudioContext)(); - const voiceSelect = audioDom.querySelector("#voice"); - let source; - let stream; - - // Grab the mute button to use below - const mute = audioDom.querySelector(".mute"); - - // Set up the different audio nodes we will use for the app - const analyser = audioCtx.createAnalyser(); - analyser.minDecibels = -90; - analyser.maxDecibels = -10; - analyser.smoothingTimeConstant = 0.85; - window.analyser = analyser; - - const distortion = audioCtx.createWaveShaper(); - const gainNode = audioCtx.createGain(); - const biquadFilter = audioCtx.createBiquadFilter(); - const convolver = audioCtx.createConvolver(); - - const echoDelay = createEchoDelayEffect(audioCtx); - - // Distortion curve for the waveshaper, thanks to Kevin Ennis - // http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion - function makeDistortionCurve(amount) { - let k = typeof amount === "number" ? amount : 50, - n_samples = 44100, - curve = new Float32Array(n_samples), - deg = Math.PI / 180, - i = 0, - x; - for (; i < n_samples; ++i) { - x = (i * 2) / n_samples - 1; - curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x)); + // Some browsers just don't implement it - return a rejected promise with an error + // to keep a consistent interface + if (!getUserMedia) { + return Promise.reject( + new Error("getUserMedia is not implemented in this browser") + ); } - return curve; - } - // Grab audio track via XHR for convolver node - let soundSource; - const ajaxRequest = new XMLHttpRequest(); - - ajaxRequest.open( - "GET", - "https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg", - true - ); - - ajaxRequest.responseType = "arraybuffer"; - - ajaxRequest.onload = function() { - const audioData = ajaxRequest.response; - - audioCtx.decodeAudioData( - audioData, - function(buffer) { - soundSource = audioCtx.createBufferSource(); - convolver.buffer = buffer; - }, - function(e) { - console.log("Error with decoding audio data" + e.err); - } - ); + // Otherwise, wrap the call to the old navigator.getUserMedia with a Promise + return new Promise(function(resolve, reject) { + getUserMedia.call(navigator, constraints, resolve, reject); + }); }; + } - ajaxRequest.send(); + // Set up forked web audio context, for multiple browsers + // window. is needed otherwise Safari explodes + const audioCtx = new(window.AudioContext || window.webkitAudioContext)(); + const voiceSelect = audioDom.querySelector("#voice"); + let source; + let stream; - // Set up canvas context for visualizer - const canvas = audioDom.querySelector(".visualizer"); - const canvasCtx = canvas.getContext("2d"); + // Grab the mute button to use below + const mute = audioDom.querySelector(".mute"); - const intendedWidth = audioDom.clientWidth; - canvas.setAttribute("width", config.audio.fftBandsUsed); - const visualSelect = audioDom.querySelector("#visual"); - let drawVisual; + // Set up the different audio nodes we will use for the app + const analyser = audioCtx.createAnalyser(); + analyser.minDecibels = -90; + analyser.maxDecibels = -10; + analyser.smoothingTimeConstant = 0.85; + window.analyser = analyser; - // Main block for doing the audio recording - if (navigator.mediaDevices.getUserMedia) { - console.log("getUserMedia supported."); - const constraints = { - audio: true - }; - navigator.mediaDevices - .getUserMedia(constraints) - .then(function(stream) { - source = audioCtx.createMediaStreamSource(stream); - source.connect(distortion); - distortion.connect(biquadFilter); - biquadFilter.connect(gainNode); - convolver.connect(gainNode); - echoDelay.placeBetween(gainNode, analyser); - analyser.connect(audioCtx.destination); + const distortion = audioCtx.createWaveShaper(); + const gainNode = audioCtx.createGain(); + const biquadFilter = audioCtx.createBiquadFilter(); + const convolver = audioCtx.createConvolver(); - visualize(); - voiceChange(); - }) - .catch(function(err) { - console.log("The following gUM error occured: " + err); - }); - } else { - console.log("getUserMedia not supported on your browser!"); + const echoDelay = createEchoDelayEffect(audioCtx); + + // Distortion curve for the waveshaper, thanks to Kevin Ennis + // http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion + function makeDistortionCurve(amount) { + let k = typeof amount === "number" ? amount : 50, + n_samples = 44100, + curve = new Float32Array(n_samples), + deg = Math.PI / 180, + i = 0, + x; + for (; i < n_samples; ++i) { + x = (i * 2) / n_samples - 1; + curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x)); } + return curve; + } - function visualize() { - const WIDTH = canvas.width; - const HEIGHT = canvas.height; + // Grab audio track via XHR for convolver node + let soundSource; + const ajaxRequest = new XMLHttpRequest(); - const visualSetting = visualSelect.value; + ajaxRequest.open( + "GET", + "https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg", + true + ); - if (visualSetting === "sinewave") { - analyser.fftSize = 2048; - const bufferLength = analyser.fftSize; + ajaxRequest.responseType = "arraybuffer"; - // We can use Float32Array instead of Uint8Array if we want higher precision - // const dataArray = new Float32Array(bufferLength); - const dataArray = new Uint8Array(bufferLength); + ajaxRequest.onload = function() { + const audioData = ajaxRequest.response; - canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); + audioCtx.decodeAudioData( + audioData, + function(buffer) { + soundSource = audioCtx.createBufferSource(); + convolver.buffer = buffer; + }, + function(e) { + console.log("Error with decoding audio data" + e.err); + } + ); + }; - const draw = function() { - drawVisual = requestAnimationFrame(draw); + ajaxRequest.send(); - analyser.getByteTimeDomainData(dataArray); + // Set up canvas context for visualizer + const canvas = audioDom.querySelector(".visualizer"); + const canvasCtx = canvas.getContext("2d"); - canvasCtx.fillStyle = "rgb(200, 200, 200)"; - canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); + const intendedWidth = audioDom.clientWidth; + canvas.setAttribute("width", config.audio.fftBandsUsed); + const visualSelect = audioDom.querySelector("#visual"); + let drawVisual; - canvasCtx.lineWidth = 2; - canvasCtx.strokeStyle = "rgb(0, 0, 0)"; + // Main block for doing the audio recording + if (navigator.mediaDevices.getUserMedia) { + console.log("getUserMedia supported."); + const constraints = { + audio: true + }; + navigator.mediaDevices + .getUserMedia(constraints) + .then(function(stream) { + source = audioCtx.createMediaStreamSource(stream); + source.connect(distortion); + distortion.connect(biquadFilter); + biquadFilter.connect(gainNode); + convolver.connect(gainNode); + echoDelay.placeBetween(gainNode, analyser); + analyser.connect(audioCtx.destination); - canvasCtx.beginPath(); + visualize(); + voiceChange(); + }) + .catch(function(err) { + console.log("The following gUM error occured: " + err); + }); + } else { + console.log("getUserMedia not supported on your browser!"); + } - const sliceWidth = (WIDTH * 1.0) / bufferLength; - let x = 0; + function visualize() { + const WIDTH = canvas.width; + const HEIGHT = canvas.height; - for (let i = 0; i < bufferLength; i++) { - let v = dataArray[i] / 128.0; - let y = (v * HEIGHT) / 2; + const visualSetting = visualSelect.value; - if (i === 0) { - canvasCtx.moveTo(x, y); - } else { - canvasCtx.lineTo(x, y); - } + if (visualSetting === "sinewave") { + analyser.fftSize = 2048; + const bufferLength = analyser.fftSize; - x += sliceWidth; + // We can use Float32Array instead of Uint8Array if we want higher precision + // const dataArray = new Float32Array(bufferLength); + const dataArray = new Uint8Array(bufferLength); + + canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); + + const draw = function() { + drawVisual = requestAnimationFrame(draw); + + analyser.getByteTimeDomainData(dataArray); + + canvasCtx.fillStyle = "rgb(200, 200, 200)"; + canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); + + canvasCtx.lineWidth = 2; + canvasCtx.strokeStyle = "rgb(0, 0, 0)"; + + canvasCtx.beginPath(); + + const sliceWidth = (WIDTH * 1.0) / bufferLength; + let x = 0; + + for (let i = 0; i < bufferLength; i++) { + let v = dataArray[i] / 128.0; + let y = (v * HEIGHT) / 2; + + if (i === 0) { + canvasCtx.moveTo(x, y); + } else { + canvasCtx.lineTo(x, y); } - canvasCtx.lineTo(canvas.width, canvas.height / 2); - canvasCtx.stroke(); - }; + x += sliceWidth; + } - draw(); - } else if (visualSetting == "frequencybars") { - analyser.fftSize = config.audio.fftBandsAnalysed; - const w = config.audio.fftBandsUsed; - const h = config.audio.fftHeight; - const bufferLengthAlt = analyser.frequencyBinCount / 2; + canvasCtx.lineTo(canvas.width, canvas.height / 2); + canvasCtx.stroke(); + }; - // See comment above for Float32Array() - const dataArrayAlt = new Uint8Array(bufferLengthAlt); + draw(); + } else if (visualSetting == "frequencybars") { + analyser.fftSize = config.audio.fftBandsAnalysed; + const w = config.audio.fftBandsUsed; + const h = config.audio.fftHeight; + const bufferLengthAlt = analyser.frequencyBinCount / 2; + + // See comment above for Float32Array() + const dataArrayAlt = new Uint8Array(bufferLengthAlt); + + for (let i = 0; i < canvasCtxs.length; i++) { + canvasCtxs[i].clearRect(0, 0, w, h); + } + + let frameCount = 0; + const drawAlt = function() { + drawVisual = requestAnimationFrame(drawAlt); + + analyser.getByteFrequencyData(dataArrayAlt); for (let i = 0; i < canvasCtxs.length; i++) { - canvasCtxs[i].clearRect(0, 0, w, h); + canvasCtxs[i].fillStyle = "rgb(0, 0, 0)"; + canvasCtxs[i].fillRect(0, 0, w, h); } - let frameCount = 0; - const drawAlt = function() { - drawVisual = requestAnimationFrame(drawAlt); + const barWidth = (w / bufferLengthAlt) * 2.5; + let barHeight; + let x = 0; - analyser.getByteFrequencyData(dataArrayAlt); + let max_i = 0; + let max_v = 0; + for (let i = 0; i < bufferLengthAlt; i++) { + barHeight = dataArrayAlt[i]; + if (barHeight > max_v) { + max_v = barHeight; + max_i = i; + } for (let i = 0; i < canvasCtxs.length; i++) { - canvasCtxs[i].fillStyle = "rgb(0, 0, 0)"; - canvasCtxs[i].fillRect(0, 0, w, h); + canvasCtxs[i].fillStyle = "rgb(" + (barHeight + 100) + ",50,50)"; + canvasCtxs[i].fillRect( + x, + h - barHeight / 2, + barWidth, + barHeight / 2 + ); } - const barWidth = (w / bufferLengthAlt) * 2.5; - let barHeight; - let x = 0; - - let max_i = 0; - let max_v = 0; - for (let i = 0; i < bufferLengthAlt; i++) { - barHeight = dataArrayAlt[i]; - - if (barHeight > max_v) { - max_v = barHeight; - max_i = i; - } - for (let i = 0; i < canvasCtxs.length; i++) { - canvasCtxs[i].fillStyle = "rgb(" + (barHeight + 100) + ",50,50)"; - canvasCtxs[i].fillRect( - x, - h - barHeight / 2, - barWidth, - barHeight / 2 - ); - } - - x += barWidth + 1; - } - const propsToSet = []; - getLayers().forEach((layer) => { - if (mapping.hasOwnProperty(layer.id())) { - Object.keys(mapping[layer.id()]).forEach((propTitle) => { - const m = mapping[layer.id()][propTitle]; - switch (m.sync) { - case 'volume': { - let a = mapValue(max_v, 0, 255, m.min_out, m.max_out, true); - m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; - propsToSet.push({ - layer, - id: layer.id(), - title: propTitle, - prop: layer.theatreObject.props[propTitle], - value: m.value, - }); - break; - } - case 'pitch': { - let a = mapValue(max_i, 0, bufferLengthAlt - 1, m.min_out, m.max_out, true); - m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; - propsToSet.push({ - layer, - id: layer.id(), - title: propTitle, - prop: layer.theatreObject.props[propTitle], - value: m.value, - }); - break; - } - default: - break; + x += barWidth + 1; + } + const propsToSet = []; + getLayers().forEach((layer) => { + if (mapping.hasOwnProperty(layer.id())) { + Object.keys(mapping[layer.id()]).forEach((propTitle) => { + const m = mapping[layer.id()][propTitle]; + switch(m.sync) { + case 'volume': { + let a = mapValue(max_v, 0, 255, m.min_out, m.max_out, true); + m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; + propsToSet.push({ + layer, + id: layer.id(), + title: propTitle, + prop: layer.theatreObject.props[propTitle], + value: m.value, + }); + break; } - if (m.sync === 'volume') {} - }); - } - }); - if (propsToSet.length > 0 && frameCount % 2 === 0) { - // this is when to monitor live - if (!record.isRecording()) { - if (!tp.core.val(tp.sheet.sequence.pointer.playing)) { - if (typeof window.immediateUpdate !== 'function') { - window.immediateUpdate = (layer, values) => { - const v = { - ...layer.theatreObject.value, - ...values - }; - const p = layer.values2cppProps(v); - if (p !== false) { - const id = layer.id(); - if (id !== 'artboard') { - Module.setProps(p, layer.id()); - } else { - Module.setArtboardProps(p, layer.id()); - } - } - }; + case 'pitch': { + let a = mapValue(max_i, 0, bufferLengthAlt-1, m.min_out, m.max_out, true); + m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; + propsToSet.push({ + layer, + id: layer.id(), + title: propTitle, + prop: layer.theatreObject.props[propTitle], + value: m.value, + }); + break; } - let values = {}; - propsToSet.forEach((p) => { - const newValues = { - [p.title]: p.value - }; - if (!values.hasOwnProperty(p.id)) { - values[p.id] = {}; - } - values[p.id] = { - ...values[p.id], - ...newValues, - }; - }); - Object.keys(values).forEach((layerID) => { - immediateUpdate(getLayer(layerID), values[layerID]); - }); + default: + break; } - } else { - propsToSet.forEach((p) => { - const title = tp - .getPanelPropContainer(p.title); - - if (title !== null) { - const inputElement = title - .querySelector('input.recording'); - - if (inputElement !== null) { - inputElement.value = p.value; - inputElement.dispatchEvent(new Event('change')); + if (m.sync === 'volume') { + } + }); + } + }); + if (propsToSet.length > 0 && frameCount % 2 === 0) { + // this is when to monitor live + if (!record.isRecording()) { + if (!tp.core.val(tp.sheet.sequence.pointer.playing)) { + if (typeof window.immediateUpdate !== 'function') { + window.immediateUpdate = (layer, values) => { + const v = { + ...layer.theatreObject.value, + ...values + }; + const p = layer.values2cppProps(v); + if (p !== false) { + const id = layer.id(); + if (id !== 'artboard') { + Module.setProps(p, layer.id()); + } else { + Module.setArtboardProps(p, layer.id()); + } } - } + }; + } + propsToSet.forEach((p) => { + immediateUpdate(p.layer, { + [p.title]: p.value + }); }); } + } else { + propsToSet.forEach((p) => { + const title = tp + .getPanelPropContainer(p.title); + + if (title !== null) { + const inputElement = title + .querySelector('input.recording'); + + if (inputElement !== null) { + inputElement.value = p.value; + inputElement.dispatchEvent(new Event('change')); + } + } + }); } - //const panel = tp.getPanel(); - //const fft_images = panel.querySelectorAll('.audio_fft'); - //if (fft_images !== null) { + } + //const panel = tp.getPanel(); + //const fft_images = panel.querySelectorAll('.audio_fft'); + //if (fft_images !== null) { //const src = canvas.toDataURL(); //fft_images.forEach((e) => { - //e.src = src; + //e.src = src; //}); - //} - frameCount++; - }; - drawAlt(); - } else if (visualSetting == "off") { - canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); - canvasCtx.fillStyle = "red"; - canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); - } - } - - function voiceChange() { - distortion.oversample = "4x"; - biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0); - - const voiceSetting = voiceSelect.value; - - if (echoDelay.isApplied()) { - echoDelay.discard(); - } - - // When convolver is selected it is connected back into the audio path - if (voiceSetting == "convolver") { - biquadFilter.disconnect(0); - biquadFilter.connect(convolver); - } else { - biquadFilter.disconnect(0); - biquadFilter.connect(gainNode); - - if (voiceSetting == "distortion") { - distortion.curve = makeDistortionCurve(400); - } else if (voiceSetting == "biquad") { - biquadFilter.type = "lowshelf"; - biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0); - biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0); - } else if (voiceSetting == "delay") { - echoDelay.apply(); - } else if (voiceSetting == "off") { - console.log("Voice settings turned off"); - } - } - } - - function createEchoDelayEffect(audioContext) { - const delay = audioContext.createDelay(1); - const dryNode = audioContext.createGain(); - const wetNode = audioContext.createGain(); - const mixer = audioContext.createGain(); - const filter = audioContext.createBiquadFilter(); - - delay.delayTime.value = 0.75; - dryNode.gain.value = 1; - wetNode.gain.value = 0; - filter.frequency.value = 1100; - filter.type = "highpass"; - - return { - apply: function() { - wetNode.gain.setValueAtTime(0.75, audioContext.currentTime); - }, - discard: function() { - wetNode.gain.setValueAtTime(0, audioContext.currentTime); - }, - isApplied: function() { - return wetNode.gain.value > 0; - }, - placeBetween: function(inputNode, outputNode) { - inputNode.connect(delay); - delay.connect(wetNode); - wetNode.connect(filter); - filter.connect(delay); - - inputNode.connect(dryNode); - dryNode.connect(mixer); - wetNode.connect(mixer); - mixer.connect(outputNode); - }, + //} + frameCount++; }; + drawAlt(); + } else if (visualSetting == "off") { + canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); + canvasCtx.fillStyle = "red"; + canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); + } + } + + function voiceChange() { + distortion.oversample = "4x"; + biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0); + + const voiceSetting = voiceSelect.value; + + if (echoDelay.isApplied()) { + echoDelay.discard(); } - // Event listeners to change visualize and voice settings - visualSelect.onchange = function() { - window.cancelAnimationFrame(drawVisual); - visualize(); - }; + // When convolver is selected it is connected back into the audio path + if (voiceSetting == "convolver") { + biquadFilter.disconnect(0); + biquadFilter.connect(convolver); + } else { + biquadFilter.disconnect(0); + biquadFilter.connect(gainNode); - voiceSelect.onchange = function() { - voiceChange(); - }; - - mute.onclick = voiceMute; - - let previousGain; - - function voiceMute() { - if (mute.id === "") { - previousGain = gainNode.gain.value; - gainNode.gain.value = 0; - mute.id = "activated"; - mute.innerHTML = "Unmute"; - } else { - gainNode.gain.value = previousGain; - mute.id = ""; - mute.innerHTML = "Mute"; + if (voiceSetting == "distortion") { + distortion.curve = makeDistortionCurve(400); + } else if (voiceSetting == "biquad") { + biquadFilter.type = "lowshelf"; + biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0); + biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0); + } else if (voiceSetting == "delay") { + echoDelay.apply(); + } else if (voiceSetting == "off") { + console.log("Voice settings turned off"); } } } + + function createEchoDelayEffect(audioContext) { + const delay = audioContext.createDelay(1); + const dryNode = audioContext.createGain(); + const wetNode = audioContext.createGain(); + const mixer = audioContext.createGain(); + const filter = audioContext.createBiquadFilter(); + + delay.delayTime.value = 0.75; + dryNode.gain.value = 1; + wetNode.gain.value = 0; + filter.frequency.value = 1100; + filter.type = "highpass"; + + return { + apply: function() { + wetNode.gain.setValueAtTime(0.75, audioContext.currentTime); + }, + discard: function() { + wetNode.gain.setValueAtTime(0, audioContext.currentTime); + }, + isApplied: function() { + return wetNode.gain.value > 0; + }, + placeBetween: function(inputNode, outputNode) { + inputNode.connect(delay); + delay.connect(wetNode); + wetNode.connect(filter); + filter.connect(delay); + + inputNode.connect(dryNode); + dryNode.connect(mixer); + wetNode.connect(mixer); + mixer.connect(outputNode); + }, + }; + } + + // Event listeners to change visualize and voice settings + visualSelect.onchange = function() { + window.cancelAnimationFrame(drawVisual); + visualize(); + }; + + voiceSelect.onchange = function() { + voiceChange(); + }; + + mute.onclick = voiceMute; + + let previousGain; + + function voiceMute() { + if (mute.id === "") { + previousGain = gainNode.gain.value; + gainNode.gain.value = 0; + mute.id = "activated"; + mute.innerHTML = "Unmute"; + } else { + gainNode.gain.value = previousGain; + mute.id = ""; + mute.innerHTML = "Mute"; + } + } } - const deinit = () => { - if (started) { - if (audioCtx !== false) { - audioCtx.suspend(); - } - started = false; - } - }; - this.getContext = () => { - return audioCtx; - }; this.init = init; - this.deinit = deinit; this.injectPanel = injectPanel; this.mapping = mapping; - this.addAudioMapping = addAudioMapping; - this.removeAudioMapping = removeAudioMapping; - this.addAudioOptions = addAudioOptions; - this.removeAudioOptions = removeAudioOptions; - this.AudioMappingOptions = AudioMappingOptions; }; export { diff --git a/bin/web/js/config.js b/bin/web/js/config.js index c3fc4bc..be09a8e 100644 --- a/bin/web/js/config.js +++ b/bin/web/js/config.js @@ -91,7 +91,6 @@ const config = { }, record: { ignoreProps: ['fontVariationAxes','letterDelays','color'], - recordMapped: true, }, midi: { touchTimeThreshold_s: 0.5, diff --git a/bin/web/js/main.js b/bin/web/js/main.js index 0bc1e9e..b991476 100644 --- a/bin/web/js/main.js +++ b/bin/web/js/main.js @@ -188,6 +188,7 @@ window.onload = () => { alert('Sorry, Variable Time is a tool currently designed to be used on desktop!'); } window.addEventListener('panelEvent', (e) => { + console.log('debug panelEvent received', e); clearTimeout(window.panelFinderTimeout); let target = false; if (e.detail.panelID === 'artboard') { @@ -203,6 +204,7 @@ window.onload = () => { } }); window.addEventListener('sequenceEvent', (e) => { + console.log('debug sequenceEvent received', e); let target = false; if (e.detail.panelID === 'artboard') { target = artboard; diff --git a/bin/web/js/record.js b/bin/web/js/record.js index 1a654d5..b1dca97 100644 --- a/bin/web/js/record.js +++ b/bin/web/js/record.js @@ -137,7 +137,7 @@ const Record = function(tp) { return hot.hasOwnProperty(layerID) && hot[layerID].hasOwnProperty(propTitle); }; - const addHot = (layerID, propTitle) => { + const makeHot = (layerID, propTitle) => { if (!isHot(layerID, propTitle)) { if (!hot.hasOwnProperty(layerID)) { hot[layerID] = {}; @@ -146,36 +146,11 @@ const Record = function(tp) { recording: [], }; } - buffy.register(layerID); - // handle UI only if layer is selected - if (getLayer(layerID).isSelected()) { - const button = tp - .getPanelPropContainer(propTitle) - .querySelector('.recordButton'); - if (button !== null) { - button.classList.add('active'); - } - } - }; - const removeHot = (layerID, propTitle) => { - if (isHot(layerID, propTitle)) { - delete hot[layerID][propTitle]; - } - // what if it is the last prop in the layer - if (hot.hasOwnProperty(layerID)) { - if (Object.keys(hot[layerID]).length === 0) { - delete hot[layerID]; - buffy.deregister(layerID); - } - } - // handle UI only if layer is selected - if (getLayer(layerID).isSelected()) { - const button = tp - .getPanelPropContainer(propTitle) - .querySelector('.recordButton'); - if (button !== null) { - button.classList.remove('active'); - } + const button = tp + .getPanelPropContainer(propTitle) + .querySelector('.recordButton'); + if (button !== null) { + button.classList.add('active'); } }; //const makeNotHot = (layerID, propTitle) => { @@ -206,27 +181,21 @@ const Record = function(tp) { if(isRecording) { stopRecording(); } else { - if (config.record.recordMapped) { - // make all mapped props hot and - Object.keys(audio.mapping) - .forEach((layerID) => { - if (getLayer(layerID).isSelected()) { - Object.keys(audio.mapping[layerID]) - .forEach((propTitle) => { - addHot(layerID, propTitle); - }); - } + Object.keys(audio.mapping) + .forEach((layerID) => { + if (getLayer(layerID).isSelected()) { + Object.keys(audio.mapping[layerID]) + .forEach((propTitle) => { + makeHot(layerID, propTitle); }); - } else { - // only make this propTitle hot and - // register its layer for recording - addHot(layer.id(), propTitle); - } + buffy.register(layerID); + } + }); startRecording(); } }); - //console.log("Record::addRecordButton", - //`added a record button for ${propTitle}`); + console.log("Record::addRecordButton", + `added a record button for ${propTitle}`); } } else { console.log("Record::addRecordButton", @@ -304,7 +273,6 @@ const Record = function(tp) { }; const startRecording = () => { - console.log('Record::startRecording'); tp.sheet.sequence.pause(); const layerKeys = Object.keys(hot); layerKeys.forEach((layerID) => { @@ -328,12 +296,11 @@ const Record = function(tp) { [propTitle]: value, }; buffy.addValues(layerID, recording, position, lastPosition); - const merged = buffy.getValues(layerID, position); - liveUpdater.immediateUpdate(layer, merged); + liveUpdater.immediateUpdate(layer, recording); lastPosition = position; }); } else { - console.log('Record::startRecording', `whoops input_clone for ${propTitle} is null`); + console.log('whoops input_clone is null'); } }); tp.sheet.sequence.position = 0; @@ -343,7 +310,11 @@ const Record = function(tp) { }; const stopRecording = () => { return new Promise((resolve) => { + console.log('stoprecording'); const layerKeys = Object.keys(hot); + console.log('stopRecording', 'layerKeys', { + layerKeys + }, 'hot', JSON.stringify(hot)); const promises = []; promises.push(() => { return new Promise((subResolve) => { @@ -358,19 +329,23 @@ const Record = function(tp) { }); }); layerKeys.forEach((layerID) => { + console.log('stopRecording', layerID); const layer = getLayer(layerID); const propTitles = Object.keys(hot[layerID]); const keyframes = []; propTitles.forEach((propTitle) => { + console.log('stopRecording', propTitle); // NOTE: layerID is not actually used atm // and should be the layer anyways uncloneInput(layerID, propTitle); + console.log('stopRecording', 'should have uncloned input for ' + propTitle); keyframes.push({ path: [propTitle], keyframes: hot[layerID][propTitle].recording, }); }); //setTimeout(() => { + console.log('stopRecording', 'adding the keyframes now because we wnat it to happen right now please', keyframes); promises.push(() => { return new Promise((subResolve) => { tp.setKeyframes(layer, keyframes).then(() => { @@ -383,12 +358,17 @@ const Record = function(tp) { }); sequencialPromises(promises, () => { Object.keys(hot).forEach((layerID) => { - Object.keys(hot[layerID]).forEach((propTitle) => { - removeHot(layerID, propTitle); - }); buffy.deregister(layerID); + Object.keys(hot[layerID]).forEach((propTitle) => { + delete hot[layerID][propTitle]; + if (Object.keys(hot[layerID]).length === 0) { + delete hot[layerID]; + } + const button = tp.getPanel().querySelector(`.recordButton${propTitle}`); + button.classList.remove('active'); + }); }); - console.log('Record::stopRecording', 'stopped recording'); + console.log('stopRecording', 'absolutely stopped recording'); isRecording = false; resolve(); }); @@ -397,8 +377,6 @@ const Record = function(tp) { // public this.addRecordButton = addRecordButton; - this.addHot = addHot; - this.removeHot = removeHot; this.getHot = () => { return hot; }; diff --git a/bin/web/js/theatre-play.js b/bin/web/js/theatre-play.js index 09e268b..920d0d4 100644 --- a/bin/web/js/theatre-play.js +++ b/bin/web/js/theatre-play.js @@ -135,84 +135,48 @@ const TheatrePlay = function(autoInit = false) { return keyframes; }; const getSequenceButton = (path) => { - let t = getPanelPropTitle(Array.isArray(path) ? path.join('.') : path); + let t = getPanelPropTitle(path.join('.')); if (t === null) { return null; } return t.parentElement.querySelector('[title="Sequence this prop"]'); }; - const isSequenced = (path) => { - return getSequenceButton(path) === null; - }; - const setSequenced = (propTitle, sequenced, metaResolve = false) => { - const f = (resolve) => { - const propIsSequenced = isSequenced(propTitle); - const somethingToDo = sequenced !== propIsSequenced; + const setSequenced = (propTitle, sequenced) => { + return new Promise((resolve) => { + const contextItem = sequenced ? 'sequence' : 'make static'; + const antiContextItem = sequenced ? 'make static' : 'sequence'; - if (somethingToDo) { - const contextItem = sequenced ? 'sequence' : 'make static'; - const antiContextItem = sequenced ? 'make static' : 'sequence'; + const finishedSequencedEvent = (e) => { + tp.getPanel().removeEventListener('injected', finishedSequencedEvent); + console.log('debug FINISHED SEQUENCED EVENT', e, propTitle); + resolve(true); + }; - const finishedSequencedEvent = (e) => { - // only care about events from our prop - if (propTitle === e.detail.prop.join('.')) { - // if we un-sequence, we listen to stateEditors' event - if (!sequenced && e.detail.origin === 'stateEditors.ts' && e.detail.sequenced === sequenced) { - window.removeEventListener('sequenceEvent', finishedSequencedEvent); - resolve(true); - - // if we sequence, then we wait until the track is there - } else if (sequenced && e.detail.origin === 'BasicKeyframedTrack.tsx' && e.detail.sequenced === sequenced) { - window.removeEventListener('sequenceEvent', finishedSequencedEvent); - resolve(true); - } else { - console.log('TheatrePlayu::setSequenced', 'ignored event', e, e.detail); - } + const clickContextMenu = () => { + let done = false; + tp.getPanelPropTitle(propTitle).removeEventListener('contextmenu', clickContextMenu); + tp.shadowRoot.querySelectorAll('ul li span').forEach((s) => { + if (s.innerHTML.toLowerCase() === contextItem.toLowerCase()) { + tp.getPanel().addEventListener('injected', finishedSequencedEvent); + s.click(); + console.log('debug click'); + done = true; + } else if (s.innerHTML.toLowerCase() === antiContextItem.toLowerCase()) { + done = true; + resolve(false); } - }; + }); + if (!done) { + setTimeout(() => { + clickContextMenu(); + }, 100); + } + }; - let counter = 0; - const clickContextMenu = (e) => { - let done = false; - if (e.target !== null) { - e.target.removeEventListener('contextmenu', clickContextMenu); - } - tp.shadowRoot.querySelectorAll('ul li span').forEach((s) => { - if (s.innerHTML.toLowerCase() === contextItem.toLowerCase()) { - window.addEventListener('sequenceEvent', finishedSequencedEvent); - s.click(); - done = true; - } else if (s.innerHTML.toLowerCase() === antiContextItem.toLowerCase()) { - done = true; - resolve(false); - } - }); - if (!done) { - setTimeout(() => { - if (counter < 4) { - clickContextMenu(e); - counter++; - } else { - setSequenced(propTitle, sequenced, resolve); - } - }, 100); - } - }; - - getPanelPropTitle(propTitle).addEventListener('contextmenu', clickContextMenu); - getPanelPropTitle(propTitle).dispatchEvent(new Event('contextmenu')); - } else { - resolve(); - } - }; - if (!metaResolve) { - return new Promise((resolve) => { - f(resolve); - }); - } else { - f(metaResolve); - } + getPanelPropTitle(propTitle).addEventListener('contextmenu', clickContextMenu); + getPanelPropTitle(propTitle).dispatchEvent(new Event('contextmenu')); + }); }; const addKeyframes = (layer, keyframes) => { @@ -223,53 +187,48 @@ const TheatrePlay = function(autoInit = false) { } const existingKeyframes = getKeyframes(layer); const promises = []; - const ms = 0; //config.tp.addKeyframesTimeout_s * 1000; + const ms = 0;//config.tp.addKeyframesTimeout_s * 1000; keyframes.forEach((k) => { let prop = layer.theatreObject.props; for (let i = 0; i < k.path.length; i++) { prop = prop[k.path[i]]; } const position = tp.sheet.sequence.position; - promises.push(() => { - return new Promise((subResolve) => { + // NOTE: can we sequence values without pretend clicking? + const sequenceButton = getSequenceButton(k.path); + if (sequenceButton !== null) { + promises.push(() => { return new Promise((subResolve) => { setTimeout(() => { - if (layer.isSelected()) { - setSequenced(k.path.join('.'), true) - .then(() => { + sequenceButton.click(); + const detectSE = (e) => { + if (e.detail.panelID === layer.id()) { + window.removeEventListener('sequenceEvent',detectSE); + console.log('received sequenceEvent',e); + const f = (e) => { + tp.getPanel().removeEventListener('injected', f); subResolve(); - }); - } else { - // we cannot select layers without pseudoclicking - // so let's wait for a happy 'injected' event that - // closes off the selection - // - // first, the listener callback - const f = () => { - tp.getPanel().removeEventListener('injected', f); - setSequenced(k.path.join('.'), true) - .then(() => { - subResolve(); - }); - }; - // then add it - tp.getPanel().addEventListener('injected', f); - // and fire the click - layer.select(); - } - }, ms); // * promises.length); - }) - }); - let propHasKeyframesAt = -1; - if (existingKeyframes !== null && - existingKeyframes !== false && - typeof existingKeyframes !== 'undefined' && - Array.isArray(existingKeyframes)) { - existingKeyframes.forEach((existingK, existingKI) => { - if (arraysEqual(k.path, existingK.path)) { - propHasKeyframesAt = existingKI; - } - }); + }; + tp.getPanel().addEventListener('injected', f); + } + }; + window.addEventListener('sequenceEvent', detectSE); + }, ms);// * promises.length); + })}); + } else { + //console.error(k.path, 'did not find sequence button'); + // is (probably) already sequenced } + let propHasKeyframesAt = -1; + if (existingKeyframes !== null && + existingKeyframes !== false && + typeof existingKeyframes !== 'undefined' && + Array.isArray(existingKeyframes)) { + existingKeyframes.forEach((existingK, existingKI) => { + if (arraysEqual(k.path, existingK.path)) { + propHasKeyframesAt = existingKI; + } + }); + } k.keyframes.forEach((keyframe) => { let alreadyThere = false; if (propHasKeyframesAt >= 0) { @@ -281,33 +240,29 @@ const TheatrePlay = function(autoInit = false) { }); } if (!alreadyThere) { - promises.push(() => { - return new Promise((subResolve) => { - setTimeout(() => { - tp.sheet.sequence.position = keyframe.position; - this.studio.transaction(({ - set - }) => { - set(prop, keyframe.value); - subResolve(); - }); - }, ms); // * promises.length); - }) - }); + promises.push(() => { return new Promise((subResolve) => { + setTimeout(() => { + tp.sheet.sequence.position = keyframe.position; + this.studio.transaction(({ + set + }) => { + set(prop, keyframe.value); + subResolve(); + }); + }, ms);// * promises.length); + })}); } }); - promises.push(() => { - return new Promise((subResolve) => { - setTimeout(() => { - tp.sheet.sequence.position = position; - subResolve(); - }, ms); // * promises.length); - }) - }); + promises.push(() => { return new Promise((subResolve) => { + setTimeout(() => { + tp.sheet.sequence.position = position; + subResolve(); + }, ms);// * promises.length); + })}); }); sequencialPromises(promises, resolve); //Promise.all(promises).then(() => { - //resolve(); + //resolve(); //}); }); }; @@ -318,28 +273,19 @@ const TheatrePlay = function(autoInit = false) { return false; } const promises = []; - let waitify = false; keyframes.forEach((k) => { - const propTitle = k.path.join('.'); - if (isSequenced(propTitle)) { - waitify = true; - promises.push(() => { - return new Promise((subResolve) => { - setSequenced(propTitle, false) - .then(() => { - subResolve(); - }); - }); - }); - } + promises.push(new Promise((subResolve) => { + const propTitle = k.path.join('.'); + setSequenced(propTitle, false) + .then(subResolve); + })); }); - sequencialPromises(promises, () => { - const timeout_ms = waitify ? 1000 : 0; - setTimeout(() => { + Promise + .all(promises) + .then(() => { addKeyframes(layer, keyframes) .then(resolve); - }, timeout_ms); - }); + }); }); }; @@ -412,8 +358,6 @@ const TheatrePlay = function(autoInit = false) { // remove object from objects list delete theatreObjects[name]; }; - this.isSequenced = isSequenced; - this.getSequenceButton = getSequenceButton; this.getSequencePanelLeft = getSequencePanelLeft; this.getPanel = getPanel; this.getPanelPropTitle = getPanelPropTitle; diff --git a/bin/web/js/utils.js b/bin/web/js/utils.js index 9b69b78..21c114a 100644 --- a/bin/web/js/utils.js +++ b/bin/web/js/utils.js @@ -395,21 +395,11 @@ const isMobile = () => { return false; }; -// NOTE: -// promises must be delivered inside a function like: -// -// const promises = []; -// -// promises.push(() => { return new Promise((resolve) => { console.log('lalala ONE'); resolve() }); }); -// promises.push(() => { return new Promise((resolve) => { console.log('lalala TWO'); resolve() }); }); -// promises.push(() => { return new Promise((resolve) => { console.log('lalala THREE'); resolve() }); }); -// -// sequencialPromises(promises, () => { console.log('i am done'); }); const sequencialPromises = async (iterable, callback = false) => { for (const x of iterable) { await x(); } - if (typeof callback === 'function') { + if (callback !== false) { callback(); } };