From 62f03862d678cb10272b4dae2bd3ab1b6c0974a0 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Tue, 10 Oct 2023 13:28:40 +0200 Subject: [PATCH 1/9] better click handling --- bin/web/js/audio.js | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 11546bc..94c7687 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -361,12 +361,24 @@ const Audio = function(tp, record) { mappingOptions.max_in = (bb.height - min_y) * y_factor; } }); - fft_Dom.addEventListener('mouseup', (e) => { + const unset = (e) => { setFrequency = false; + }; + const unsetFromOutside = (e) => { + document.removeEventListener('mouseup', unsetFromOutside); + unset(e); + }; + fft_Dom.addEventListener('mouseup', unset); + fft_Dom.addEventListener('mouseleave', (e) => { + if (setFrequency) { + document.addEventListener('mouseup', unsetFromOutside); + } + }); + fft_Dom.addEventListener('mouseenter', (e) => { + if (setFrequency) { + document.removeEventListener('mouseup', unsetFromOutside); + } }); - //fft_Dom.addEventListener('mouseout', (e) => { - //setFrequency = false; - //}); container.after(audioOptions); From 64af8d49d1020105a086e3bee32dd0135141cf91 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Tue, 10 Oct 2023 15:13:53 +0200 Subject: [PATCH 2/9] recording notice, default mapping range --- assets/template.html | 6 ++++ bin/web/css/demo.css | 23 ++++++++++++ bin/web/js/audio.js | 86 +++++++++++++++++++++++++++++++++++--------- bin/web/js/config.js | 13 ++++++- bin/web/js/record.js | 46 +++++++++++++++++++++--- 5 files changed, 151 insertions(+), 23 deletions(-) diff --git a/assets/template.html b/assets/template.html index bdf963d..853d981 100644 --- a/assets/template.html +++ b/assets/template.html @@ -359,6 +359,12 @@

+
+
+

recording

+

please wait

+
+
diff --git a/bin/web/css/demo.css b/bin/web/css/demo.css index 6012523..9d6d978 100755 --- a/bin/web/css/demo.css +++ b/bin/web/css/demo.css @@ -223,6 +223,29 @@ body.debug div:not(.centerLine) { #notice .content .details p { color: black; } +#notice_recording { + position: fixed; + top: 0px; + left: 0px; + width: 100%; + height: 100%; + background-color: rgba(0,0,0,0.1); + z-index: 2000; + display: none; + justify-content: center; + align-items: center; + font-family: "Tonka"; + font-variation-settings: 'wght' 500; + font-size: 0.8em; + pointer-events: none; +} +#notice_recording.visible { + display: flex; +} +#notice_recording.impenetrable { + pointer-events: all; + background-color: rgba(0,0,0,0.5); +} .exporterChild * { font-family: "Tonka"; diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 94c7687..57a47d7 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -50,16 +50,20 @@ const Audio = function(tp, record) { const mutationObserver = new MutationObserver(function(e) { if (e[0].removedNodes) { e[0].removedNodes.forEach((n) => { - if (n.hasAttribute('data-propTitle')) { - const propTitle = n.getAttribute('data-propTitle'); - delete canvasCombos[propTitle]; - } else { - const subProps = n.querySelectorAll('[data-propTitle]'); - if (subProps.length > 0) { - subProps.forEach((sp) => { - const propTitle = sp.getAttribute('data-propTitle'); - delete canvasCombos[propTitle]; - }); + if (typeof n === 'object' && + n.hasOwnProperty('hasAttribute') && + n.hasOwnProperty('querySelectorAll')) { + if (n.hasAttribute('data-propTitle')) { + const propTitle = n.getAttribute('data-propTitle'); + delete canvasCombos[propTitle]; + } else { + const subProps = n.querySelectorAll('[data-propTitle]'); + if (subProps.length > 0) { + subProps.forEach((sp) => { + const propTitle = sp.getAttribute('data-propTitle'); + delete canvasCombos[propTitle]; + }); + } } } }); @@ -85,24 +89,68 @@ const Audio = function(tp, record) { return true; }; + const getDefaultRange = (layer, propTitle) => { + if (config.audio.defaultRange.hasOwnProperty(propTitle)) { + return config.audio.defaultRange[propTitle]; + } else if (propTitle.indexOf('width') === 0) { + return [ + getArtboard().theatreObject.value.width / 2, + getArtboard().theatreObject.value.width + ]; + } else if (propTitle.indexOf('y') === 0) { + return [ + 0, + getArtboard().theatreObject.value.height / 2 + ]; + } else if (propTitle.indexOf('x') === 0) { + return [ + 0, + getArtboard().theatreObject.value.width / 2 + ]; + } else if (propTitle.indexOf('y') === 0) { + return [ + 0, + getArtboard().theatreObject.value.height / 2 + ]; + } else if (propTitle.indexOf('letterDelay') === 0) { + return [ + config.audio.defaultRange.letterDelay[0], + config.audio.defaultRange.letterDelay[1] + ]; + } else if (propTitle.split('.')[0] === 'fontVariationAxes') { + return layer.props.fontVariationAxes + .props[propTitle.split('.')[1]].range; + } + }; + const getAudioMappingOptions = (layer, propTitle) => { if (propTitle === 'color') { + const mm = getDefaultRange(layer, 'color'); if (config.audio.colorSeparateRGBA) { const r = new AudioMappingOptions(); + r.min_out = mm[0]; + r.max_out = mm[1]; const g = new AudioMappingOptions(); + g.min_out = mm[0]; + g.max_out = mm[1]; const b = new AudioMappingOptions(); + b.min_out = mm[0]; + b.max_out = mm[1]; const a = new AudioMappingOptions(); + a.min_out = mm[0]; + a.max_out = mm[1]; return [{r}, {g}, {b}, {a}]; } else { - const rgba = new AudioMappingOptions(); - rgba.min_out = {r: 0, b: 0, g: 0, a: 0}; - rgba.max_out = {r: 1, b: 1, g: 1, a: 1}; - return rgba; + const o = new AudioMappingOptions(); + o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]}; + o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]}; + return o; } } else { const o = new AudioMappingOptions(); - // TODO: get min_out, max_out from layer.props - // check for typeof layer.props[propTitle.split('.')[0]] blabla + const mm = getDefaultRange(layer, propTitle); + o.min_out = mm[0]; + o.max_out = mm[1]; return o; } }; @@ -909,7 +957,11 @@ const Audio = function(tp, record) { } } record.addValue(p.id, p.title, p.value, position); - if (!config.audio.colorSeparateRGBA || p.title === 'color.a') { + if (p.title.indexOf('color') === 0) { + if (!config.audio.colorSeparateRGBA || p.title === 'color.a') { + record.liveUpdate(p.layer, position); + } + } else { record.liveUpdate(p.layer, position); } }); diff --git a/bin/web/js/config.js b/bin/web/js/config.js index 3d2ecec..11f13b8 100644 --- a/bin/web/js/config.js +++ b/bin/web/js/config.js @@ -83,7 +83,18 @@ const config = { zoomDynamicMax: 42, }, audio: { - ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'], + defaultRange: { // check audio.getDefaultRange for dynamic defaults + 'textAlignment': [0, 1], + 'fontSize_px': [42, 100], + 'letterSpacing': [0, 1], + 'lineHeight': [0, 1], + 'rotation': [0, 180], + 'mirror_x_distance': [0, 200], + 'mirror_y_distance': [0, 70], + 'color': [0, 1], + 'letterDelays': [0, 1000], + }, + ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy', 'height'], defaultSmoothing: 0.7, fftBandsAnalysed: 256 * 8, fftBandsUsed: 256 / 2, diff --git a/bin/web/js/record.js b/bin/web/js/record.js index 5ba73bc..6786c46 100644 --- a/bin/web/js/record.js +++ b/bin/web/js/record.js @@ -144,10 +144,27 @@ const LiveUpdater = function(tp, buffy) { const Record = function(tp) { + const NOT_RECORDING = 0; + const STARTING_RECORDING = 1; + const RECORDING = 2; + const STOPPING_RECORDING = 3; + const hot = {}; - let isRecording = false; + let isRecording = NOT_RECORDING; const buffy = new LiveBuffer(); const liveUpdater = new LiveUpdater(tp, buffy); + let isInitialized = false; + + const init = () => { + if (!isInitialized) { + tp.core.onChange(tp.sheet.sequence.pointer.playing, (playing) => { + if (isRecording === RECORDING && !playing) { + stopRecording(); + } + }); + isInitialized = true; + } + }; const isHot = (layerID, propTitle) => { return hot.hasOwnProperty(layerID) @@ -231,7 +248,7 @@ const Record = function(tp) { button.innerHTML = `record`; container.append(button); button.addEventListener('click', () => { - if(isRecording) { + if(isRecording === RECORDING) { stopRecording(); } else { if (config.record.recordMapped) { @@ -369,7 +386,17 @@ const Record = function(tp) { }; const startRecording = () => { + isRecording = STARTING_RECORDING; console.log('Record::startRecording'); + document.querySelector('#notice_recording') + .classList.add('visible'); + document.querySelector('#notice_recording') + .classList.remove('imprenetrable'); + document.querySelector('#notice_recording .what p').innerHTML = 'recording'; + document.querySelector('#notice_recording .details p').innerHTML = ''; + if (!isInitialized) { + init(); + } lastPositions = {}; tp.sheet.sequence.pause(); const layerKeys = Object.keys(hot); @@ -398,9 +425,16 @@ const Record = function(tp) { tp.sheet.sequence.position = 0; tp.sheet.sequence.play(); }); - isRecording = true; + isRecording = RECORDING; }; const stopRecording = () => { + document.querySelector('#notice_recording') + .classList.add('visible'); + document.querySelector('#notice_recording') + .classList.add('imprenetrable'); + document.querySelector('#notice_recording .what p').innerHTML = 'digesting recording'; + document.querySelector('#notice_recording .details p').innerHTML = 'please wait'; + isRecording = STOPPING_RECORDING; return new Promise((resolve) => { const layerKeys = Object.keys(hot); const promises = []; @@ -474,8 +508,10 @@ const Record = function(tp) { }); buffy.deregister(layerID); }); + document.querySelector('#notice_recording') + .classList.remove('visible'); console.log('Record::stopRecording', 'stopped recording'); - isRecording = false; + isRecording = NOT_RECORDING; resolve(); }); }); @@ -493,7 +529,7 @@ const Record = function(tp) { return hot; }; this.isRecording = () => { - return isRecording; + return isRecording != NOT_RECORDING; }; this.injectPanel = injectPanel; this.startRecording = startRecording; From 6aba91b6ca695c49d264f4fedefc1a42c5f29ea6 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Tue, 10 Oct 2023 16:45:16 +0200 Subject: [PATCH 3/9] hide ui and remove audio effects/monitoring --- assets/template.html | 1 + bin/web/js/audio.js | 18 ++++++------------ bin/web/js/main.js | 33 +++++++++++++++++++++++++++++++-- bin/web/js/theatre-play.js | 2 +- 4 files changed, 39 insertions(+), 15 deletions(-) diff --git a/assets/template.html b/assets/template.html index 853d981..7fcf68c 100644 --- a/assets/template.html +++ b/assets/template.html @@ -290,6 +290,7 @@ + diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 57a47d7..57cdec0 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -621,12 +621,12 @@ const Audio = function(tp, record) { analyser.smoothingTimeConstant = 0.85; window.analyser = analyser; - const distortion = audioCtx.createWaveShaper(); - const gainNode = audioCtx.createGain(); - const biquadFilter = audioCtx.createBiquadFilter(); - const convolver = audioCtx.createConvolver(); + //const distortion = audioCtx.createWaveShaper(); + //const gainNode = audioCtx.createGain(); + //const biquadFilter = audioCtx.createBiquadFilter(); + //const convolver = audioCtx.createConvolver(); - const echoDelay = createEchoDelayEffect(audioCtx); + //const echoDelay = createEchoDelayEffect(audioCtx); // Distortion curve for the waveshaper, thanks to Kevin Ennis // http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion @@ -663,7 +663,6 @@ const Audio = function(tp, record) { audioData, function(buffer) { soundSource = audioCtx.createBufferSource(); - convolver.buffer = buffer; }, function(e) { console.log("Audio::audioCtx.decodeAudioData", "Error with decoding audio data" + e.err); @@ -692,12 +691,7 @@ const Audio = function(tp, record) { .getUserMedia(constraints) .then(function(stream) { source = audioCtx.createMediaStreamSource(stream); - source.connect(distortion); - distortion.connect(biquadFilter); - biquadFilter.connect(gainNode); - convolver.connect(gainNode); - echoDelay.placeBetween(gainNode, analyser); - analyser.connect(audioCtx.destination); + source.connect(analyser); visualize(); voiceChange(); diff --git a/bin/web/js/main.js b/bin/web/js/main.js index cbbf5ed..3719162 100644 --- a/bin/web/js/main.js +++ b/bin/web/js/main.js @@ -151,6 +151,11 @@ const findInjectPanel = () => { bottomButtonsContainer.classList.add("bottomButtonsContainer"); panel.append(bottomButtonsContainer); } + const hideuiButton = document.querySelector('#hide_ui'); + if (hideuiButton !== null) { + bottomButtonsContainer.append(hideuiButton); + hideuiButton.classList.add("main_panel_button"); + } const exportButton = document.querySelector('#exporter_open'); if (exportButton !== null) { bottomButtonsContainer.append(exportButton); @@ -531,6 +536,30 @@ window.renderFrames = exporter.renderFrames; const layer_panel = document.querySelector('#layer_panel'); -const initPanels = () => { - //makeDraggable(layer_panel); +const ui = (show) => { + if (show && tp.studio.ui.isHidden) { + tp.studio.ui.restore(); + } else if (!show && !tp.studio.ui.isHidden) { + tp.studio.ui.hide(); + } +}; + +const handleUiKeypress = (e) => { + if (e.key.toLowerCase() === 'q') { + document.removeEventListener('keypress', handleUiKeypress); + ui(true); + } +}; + +const initPanels = () => { + let hideuiButton = document.querySelector('#hide_ui'); + if (hideuiButton === null) { + hideuiButton = tp.getPanel().querySelector('#hide_ui'); + } + if (hideuiButton !== null) { + hideuiButton.addEventListener('click', () => { + ui(false); + document.addEventListener('keypress', handleUiKeypress); + }); + } }; diff --git a/bin/web/js/theatre-play.js b/bin/web/js/theatre-play.js index 0df8955..97e5b9a 100644 --- a/bin/web/js/theatre-play.js +++ b/bin/web/js/theatre-play.js @@ -97,7 +97,7 @@ const TheatrePlay = function(autoInit = false) { if (typeof value === 'undefined') { return false; } - }; + } return this.sheet.sequence.__experimental_getKeyframes(prop); }; // wtf, this function was being written in one go From b46d6bb5d42c7ebb894502121446c8f860d75b49 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Tue, 10 Oct 2023 17:00:34 +0200 Subject: [PATCH 4/9] almost multilayer recording --- bin/web/js/audio.js | 1 + bin/web/js/record.js | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 57cdec0..cb58974 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -950,6 +950,7 @@ const Audio = function(tp, record) { inputElement.dispatchEvent(new Event('change')); } } + console.log(p, clone(record.getHot())); record.addValue(p.id, p.title, p.value, position); if (p.title.indexOf('color') === 0) { if (!config.audio.colorSeparateRGBA || p.title === 'color.a') { diff --git a/bin/web/js/record.js b/bin/web/js/record.js index 6786c46..91db08b 100644 --- a/bin/web/js/record.js +++ b/bin/web/js/record.js @@ -255,12 +255,12 @@ const Record = function(tp) { // make all mapped props hot and Object.keys(audio.mapping) .forEach((layerID) => { - if (getLayer(layerID).isSelected()) { + //if (getLayer(layerID).isSelected()) { // NOTE: multilayer recording Object.keys(audio.mapping[layerID]) .forEach((propTitle) => { addHot(layerID, propTitle); }); - } + //} }); } else { // only make this propTitle hot and @@ -356,6 +356,10 @@ const Record = function(tp) { value, position = tp.sheet.sequence.position, lastPosition = buffy.NO_TIME) => { + // NOTE: multilayer recording + if (!hot.hasOwnProperty(layerID) || !hot[layerID].hasOwnProperty(propTitle)) { + return; + } hot[layerID][propTitle].recording.push({ position, value, From 5860343f70574b73bcff4982a6b0b35a561d4e8a Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Tue, 10 Oct 2023 17:01:28 +0200 Subject: [PATCH 5/9] remove logs --- bin/web/js/audio.js | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index cb58974..57cdec0 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -950,7 +950,6 @@ const Audio = function(tp, record) { inputElement.dispatchEvent(new Event('change')); } } - console.log(p, clone(record.getHot())); record.addValue(p.id, p.title, p.value, position); if (p.title.indexOf('color') === 0) { if (!config.audio.colorSeparateRGBA || p.title === 'color.a') { From e6e705f86f186a99c429ad7e08cc9c5bcc419efd Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Thu, 12 Oct 2023 18:04:48 +0200 Subject: [PATCH 6/9] add letterDelay audio, fix audio update when playing, misc --- bin/web/js/audio.js | 49 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 57cdec0..036856f 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -14,7 +14,7 @@ const AudioMappingOptions = function() { this.min_freq = 0.0; this.max_freq = config.audio.fftBandsUsed; this.min_in = 0.0; - this.max_in = 255.0 / 2; + this.max_in = 255.0; this.min_out = 0.0; this.max_out = 1.0; this.smoothing = config.audio.defaultSmoothing; @@ -217,6 +217,10 @@ const Audio = function(tp, record) { const createAudioOptions = (layer, propTitle, container) => { const mappingOptions = mapping[layer.id()][propTitle]; + let hasLetterDelay = config + .layer.letterDelayProps + .indexOf(propTitle.split('.')[0]) >= 0 + && tp.isSequenced([...[layer.id()], ...propTitle.split('.')]); const panel = tp.getPanel(); if (!areMutationsObserved) { mutationObserver.observe(panel, { childList: true, subtree: true }); @@ -262,6 +266,10 @@ const Audio = function(tp, record) { panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value; const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value; mappingOptions.smoothing = parseFloat(s); + if (hasLetterDelay) { + const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#')); + mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value); + } }; const min_max_Dom = document.createElement('div'); @@ -288,7 +296,7 @@ const Audio = function(tp, record) { max_inputDom.value = `${mappingOptions.max_out}`; const smoothing_inputDom_label = document.createElement('label'); smoothing_inputDom_label.for = 'audio_smoothing'; - smoothing_inputDom_label.innerHTML = 'audio smoothing '; + smoothing_inputDom_label.innerHTML = 'audio smoothing'; const smoothing_inputDom = document.createElement('input'); smoothing_inputDom.type = 'number'; smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`); @@ -305,6 +313,23 @@ const Audio = function(tp, record) { min_max_Dom.append(max_Cont); max_Cont.append(max_inputDom_label); max_Cont.append(max_inputDom); + if (hasLetterDelay) { + const letterDelayCont = document.createElement('div'); + const letterDelay_inputDom_label = document.createElement('label'); + letterDelay_inputDom_label.for = 'audio_letterDelay'; + letterDelay_inputDom_label.innerHTML = 'letterDelay'; + const letterDelay_inputDom = document.createElement('input'); + letterDelay_inputDom.type = 'number'; + letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`); + letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`); + letterDelay_inputDom.value = 0; + letterDelay_inputDom.min = 0; + letterDelay_inputDom.step = 1; + letterDelayCont.append(letterDelay_inputDom_label); + letterDelayCont.append(letterDelay_inputDom); + min_max_Dom.append(letterDelayCont); + letterDelay_inputDom.addEventListener('change', updateMappingOptions); + } audioOptions.append(min_max_Dom); const sync_Dom = document.createElement('div'); @@ -314,7 +339,6 @@ const Audio = function(tp, record) { sync_titleDom_Cont.classList.add('sync_titleDom_Cont'); sync_titleDom.innerHTML = 'sync with:'; sync_Dom.append(sync_titleDom); - audio_sync_options.forEach((o) => { const sync_inputDom_Cont = document.createElement('div'); @@ -530,9 +554,11 @@ const Audio = function(tp, record) { if (!isMapped(layer, propTitle)) { addAudioMapping(layer, propTitle); addAudioOptions(layer, propTitle); + layer.updateValuesViaTheatre(false); } else { removeAudioMapping(layer, propTitle); removeAudioOptions(layer, propTitle); + layer.updateValuesViaTheatre(true); } }); if (isActive) { @@ -884,7 +910,6 @@ const Audio = function(tp, record) { layer, id: layer.id(), title: propTitle, - prop: layer.theatreObject.props[propTitle], value: m.value, }); break; @@ -899,7 +924,6 @@ const Audio = function(tp, record) { layer, id: layer.id(), title: propTitle, - prop: layer.theatreObject.props[propTitle], value: m.value, }); break; @@ -907,13 +931,22 @@ const Audio = function(tp, record) { default: break; } + if (m.letterDelay) { + const pt = `letterDelays.${propTitle}`; + propsToSet.push({ + layer, + id: layer.id(), + title: pt, + value: m.letterDelay, + }); + } }); } }); if (propsToSet.length > 0 && frameCount % 2 === 0) { // this is when to monitor live if (!record.isRecording()) { - if (!tp.core.val(tp.sheet.sequence.pointer.playing)) { + //if (!tp.core.val(tp.sheet.sequence.pointer.playing)) { let values = {}; propsToSet.forEach((p) => { const newValues = { @@ -928,12 +961,10 @@ const Audio = function(tp, record) { }; }); Object.keys(values).forEach((layerID) => { - window.debugPreValues = clone(values[layerID]); deFlattenObject(values[layerID]); - window.debugValues = clone(values[layerID]); record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]); }); - } + //} } else { const position = tp.sheet.sequence.position; propsToSet.forEach((p) => { From 16124c755d0bf393e6b5bd356f50e661795190d1 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Thu, 12 Oct 2023 21:32:38 +0200 Subject: [PATCH 7/9] intermediate audiofile test hardcoded filepath watch out, also it automatically starts playing and the fft visualisation shows only one file --- assets/template.html | 1 + bin/web/css/demo.css | 3 ++ bin/web/js/audio.js | 78 +++++++++++++++++++++++++++++++++++++++++- bin/web/js/config.js | 1 + bin/web/js/main.js | 22 ++++++++++++ bin/web/js/moduleFS.js | 16 +++++++++ bin/web/js/utils.js | 2 +- 7 files changed, 121 insertions(+), 2 deletions(-) diff --git a/assets/template.html b/assets/template.html index 7fcf68c..ff07a2d 100644 --- a/assets/template.html +++ b/assets/template.html @@ -292,6 +292,7 @@ + diff --git a/bin/web/css/demo.css b/bin/web/css/demo.css index 9d6d978..7ebf71d 100755 --- a/bin/web/css/demo.css +++ b/bin/web/css/demo.css @@ -964,3 +964,6 @@ h4{ margin-bottom: -3px; box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.7), 0 3px 4px rgba(0, 0, 0, 0.7); } +.invisible { + display: none; +} diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 036856f..2dbfeaa 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -19,6 +19,7 @@ const AudioMappingOptions = function() { this.max_out = 1.0; this.smoothing = config.audio.defaultSmoothing; this.sync = 'volume'; + this.source = 'microphone'; this.value = 0.0; }; @@ -270,8 +271,26 @@ const Audio = function(tp, record) { const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#')); mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value); } + mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`, '#')).value; }; + const source_Dom = document.createElement('select'); + source_Dom.id = toCssClass(`audio_source${propTitle}`); + const source_mic = document.createElement('option'); + source_mic.value = 'microphone'; + source_mic.innerHTML = 'microphone'; + source_Dom.append(source_mic); + FS.readdir(config.fs.idbfsAudioDir) + .forEach((file) => { + if (file[0] !== '.') { + const source_file = document.createElement('option'); + source_file.value = file; + source_file.innerHTML = file; + source_Dom.append(source_file); + } + }); + audioOptions.append(source_Dom); + const min_max_Dom = document.createElement('div'); min_max_Dom.classList.add('audio_min_max'); const min_Cont = document.createElement('div'); @@ -391,6 +410,7 @@ const Audio = function(tp, record) { fft_Dom.append(fft_imgDom); fft_Dom.append(fft_selectDom); audioOptions.append(fft_Dom); + source_Dom.addEventListener('change', updateMappingOptions); min_inputDom.addEventListener('change', updateMappingOptions); max_inputDom.addEventListener('change', updateMappingOptions); smoothing_inputDom.addEventListener('change', updateMappingOptions); @@ -588,6 +608,56 @@ const Audio = function(tp, record) { } }); }; + const audioFileStuff = {}; + const readAudioFiles = () => { + FS.readdir(config.fs.idbfsAudioDir).forEach((file) => { + if (file.indexOf('.') !== 0 && !audioFileStuff.hasOwnProperty(file)) { + const audioElement = document.createElement('audio'); + audioElement.classList.add('invisible'); + audioElement.classList.add('audio_file'); + audioElement.classList.add(toCssClass(`audio_file${file}`)); + document.querySelector('body').append(audioElement); + + const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`); + let type = 'audio/wav'; + const filesplit = file.split('.'); + const extension = filesplit[filesplit.length - 1]; + if (extension === 'wav') { + type = 'audio/wav'; + } else if (extension === 'mp3') { + type = 'audio/mpeg'; + } else if (extension === 'ogg') { + type = 'audio/ogg'; + } + + const src = URL.createObjectURL( + new Blob([arr], { + type + }) + ); + + audioElement.src = src; + + const source = audioCtx.createMediaElementSource(audioElement); + source.connect(audioCtx.destination); + const analyser = audioCtx.createAnalyser(); + analyser.fftSize = config.audio.fftBandsAnalysed; + const bufferLength = analyser.frequencyBinCount; + const dataArray = new Uint8Array(bufferLength); + + source.connect(analyser); + + audioElement.play(); + + audioFileStuff[file] = { + dataArray, + analyser, + audioElement, + }; + } + }); + }; + const init = () => { if (!started) { @@ -647,6 +717,8 @@ const Audio = function(tp, record) { analyser.smoothingTimeConstant = 0.85; window.analyser = analyser; + readAudioFiles(); + //const distortion = audioCtx.createWaveShaper(); //const gainNode = audioCtx.createGain(); //const biquadFilter = audioCtx.createBiquadFilter(); @@ -800,7 +872,11 @@ const Audio = function(tp, record) { canvasKeys = Object.keys(canvasCombos); drawVisual = requestAnimationFrame(drawAlt); - analyser.getByteFrequencyData(dataArrayAlt); + //analyser.getByteFrequencyData(dataArrayAlt); + //Object.keys(audioFileStuff).forEach((afs) => { + //afs.analyser.ByteFrequencyData(afs.dataArray); + //}); + audioFileStuff['hito_steyerl_about_suicide_cameras.ogg'].analyser.getByteFrequencyData(dataArrayAlt); for (let i = 0; i < canvasKeys.length; i++) { canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR diff --git a/bin/web/js/config.js b/bin/web/js/config.js index 11f13b8..5da5973 100644 --- a/bin/web/js/config.js +++ b/bin/web/js/config.js @@ -114,6 +114,7 @@ const config = { fs: { idbfsDir: '/idbfs', idbfsFontDir: '/idbfs/fonts', + idbfsAudioDir: '/idbfs/audio', idbfsTmpDir: '/idbfs/tmp', }, timeline: { diff --git a/bin/web/js/main.js b/bin/web/js/main.js index 3719162..1b9a936 100644 --- a/bin/web/js/main.js +++ b/bin/web/js/main.js @@ -156,6 +156,11 @@ const findInjectPanel = () => { bottomButtonsContainer.append(hideuiButton); hideuiButton.classList.add("main_panel_button"); } + const audiofileButton = document.querySelector('#upload_audio'); + if (audiofileButton !== null) { + bottomButtonsContainer.append(audiofileButton); + audiofileButton.classList.add("main_panel_button"); + } const exportButton = document.querySelector('#exporter_open'); if (exportButton !== null) { bottomButtonsContainer.append(exportButton); @@ -562,4 +567,21 @@ const initPanels = () => { document.addEventListener('keypress', handleUiKeypress); }); } + let audiofileButton = document.querySelector('#upload_audio'); + if (audiofileButton === null) { + audiofileButton = tp.getPanel().querySelector('#upload_audio'); + } + if (audiofileButton !== null) { + audiofileButton.addEventListener('click', () => { + uploadFile('audio') + .then((file) => { + moduleFS + .save(file) + .then(() => { + console.log('ermh... done uploading?', file); + }); + }); + }); + } + }; diff --git a/bin/web/js/moduleFS.js b/bin/web/js/moduleFS.js index 3611916..a3d872d 100644 --- a/bin/web/js/moduleFS.js +++ b/bin/web/js/moduleFS.js @@ -13,6 +13,9 @@ const ModuleFS = function() { if (!FS.analyzePath(config.fs.idbfsFontDir).exists) { FS.mkdir(config.fs.idbfsFontDir); } + if (!FS.analyzePath(config.fs.idbfsAudioDir).exists) { + FS.mkdir(config.fs.idbfsAudioDir); + } if (!FS.analyzePath(config.fs.idbfsTmpDir).exists) { FS.mkdir(config.fs.idbfsTmpDir); } @@ -59,6 +62,19 @@ const ModuleFS = function() { .then(() => { resolve(filePath); }); + } else if (file.type.indexOf('audio') === 0) { + var uint8View = new Uint8Array(file.arrayBuffer); + console.log('trying to save the audio file, file, uint8View', file, uint8View); + if (!FS.analyzePath(`${config.fs.idbfsAudioDir}/${file.name}`).exists) { + FS.createDataFile(config.fs.idbfsAudioDir, file.name, uint8View, true, true); + this.syncfs(MODE_WRITE_TO_PERSISTENT) + .then(() => { + resolve(true); + }); + } else { + alert(`It seems as if an audiofile with the name "${file.name}" already exists. Please rename your file and upload again, thanks <3`); + resolve(false); + } } else { resolve(false); } diff --git a/bin/web/js/utils.js b/bin/web/js/utils.js index 958ce23..166753c 100644 --- a/bin/web/js/utils.js +++ b/bin/web/js/utils.js @@ -119,7 +119,7 @@ function uploadFile(expectedType = 'application/json') { let reader = new FileReader(); - if (expectedType === 'application/zip' || file.type === 'application/zip') { + if (expectedType === 'application/zip' || file.type === 'application/zip' || file.type.indexOf('audio') === 0) { reader.onload = (e) => { const f = e.target.result; console.log(e, file.name, file.size, file.type, f); From 748af243fafd000d0444ac519a437a830e0afac2 Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Fri, 13 Oct 2023 11:29:18 +0200 Subject: [PATCH 8/9] individual audiofiles <-> prop mapping --- bin/web/js/audio.js | 281 +++++++++++++++++++++---------------------- bin/web/js/config.js | 1 + bin/web/js/main.js | 1 + 3 files changed, 136 insertions(+), 147 deletions(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 2dbfeaa..92b7273 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -21,6 +21,7 @@ const AudioMappingOptions = function() { this.sync = 'volume'; this.source = 'microphone'; this.value = 0.0; + this.muted = true; }; const Audio = function(tp, record) { @@ -608,10 +609,10 @@ const Audio = function(tp, record) { } }); }; - const audioFileStuff = {}; + const audioSourceCombo = {}; const readAudioFiles = () => { FS.readdir(config.fs.idbfsAudioDir).forEach((file) => { - if (file.indexOf('.') !== 0 && !audioFileStuff.hasOwnProperty(file)) { + if (file.indexOf('.') !== 0 && !audioSourceCombo.hasOwnProperty(file)) { const audioElement = document.createElement('audio'); audioElement.classList.add('invisible'); audioElement.classList.add('audio_file'); @@ -637,19 +638,23 @@ const Audio = function(tp, record) { ); audioElement.src = src; + audioElement.loop = true; const source = audioCtx.createMediaElementSource(audioElement); source.connect(audioCtx.destination); const analyser = audioCtx.createAnalyser(); + analyser.minDecibels = -90; + analyser.maxDecibels = -10; + analyser.smoothingTimeConstant = 0.85; analyser.fftSize = config.audio.fftBandsAnalysed; - const bufferLength = analyser.frequencyBinCount; + const bufferLength = analyser.frequencyBinCount / 2; const dataArray = new Uint8Array(bufferLength); source.connect(analyser); audioElement.play(); - audioFileStuff[file] = { + audioSourceCombo[file] = { dataArray, analyser, audioElement, @@ -715,7 +720,14 @@ const Audio = function(tp, record) { analyser.minDecibels = -90; analyser.maxDecibels = -10; analyser.smoothingTimeConstant = 0.85; - window.analyser = analyser; + analyser.fftSize = config.audio.fftBandsAnalysed; + const bufferLength = analyser.frequencyBinCount / 2; + + audioSourceCombo['microphone'] = { + analyser, + dataArray: new Uint8Array(bufferLength), + audioElement: null, + }; readAudioFiles(); @@ -778,6 +790,7 @@ const Audio = function(tp, record) { canvas.setAttribute("width", config.audio.fftBandsUsed); const visualSelect = audioDom.querySelector("#visual"); let drawVisual; + let previousPosition = -1; // Main block for doing the audio recording if (navigator.mediaDevices.getUserMedia) { @@ -792,7 +805,6 @@ const Audio = function(tp, record) { source.connect(analyser); visualize(); - voiceChange(); }) .catch(function(err) { console.log("The following gUM error occured: " + err); @@ -802,57 +814,8 @@ const Audio = function(tp, record) { } const visualize = () => { - const WIDTH = canvas.width; - const HEIGHT = canvas.height; - const visualSetting = visualSelect.value; - - if (visualSetting === "sinewave") { - //analyser.fftSize = 2048; - //const bufferLength = analyser.fftSize; - - //// We can use Float32Array instead of Uint8Array if we want higher precision - //// const dataArray = new Float32Array(bufferLength); - //const dataArray = new Uint8Array(bufferLength); - - //canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); - - //const draw = function() { - //drawVisual = requestAnimationFrame(draw); - - //analyser.getByteTimeDomainData(dataArray); - - //canvasCtx.fillStyle = "rgb(200, 200, 200)"; - //canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); - - //canvasCtx.lineWidth = 2; - //canvasCtx.strokeStyle = "rgb(0, 0, 0)"; - - //canvasCtx.beginPath(); - - //const sliceWidth = (WIDTH * 1.0) / bufferLength; - //let x = 0; - - //for (let i = 0; i < bufferLength; i++) { - //let v = dataArray[i] / 128.0; - //let y = (v * HEIGHT) / 2; - - //if (i === 0) { - //canvasCtx.moveTo(x, y); - //} else { - //canvasCtx.lineTo(x, y); - //} - - //x += sliceWidth; - //} - - //canvasCtx.lineTo(canvas.width, canvas.height / 2); - //canvasCtx.stroke(); - //}; - - //draw(); - } else if (visualSetting == "frequencybars") { - analyser.fftSize = config.audio.fftBandsAnalysed; + //analyser.fftSize = config.audio.fftBandsAnalysed; const w = config.audio.fftBandsUsed; const h = config.audio.fftHeight; const verticalFactor = h / 256.0; @@ -869,110 +832,137 @@ const Audio = function(tp, record) { let frameCount = 0; const drawAlt = function() { + const position = tp.sheet.sequence.position; + let positionRollover = false; + if (config.audio.rolloverResetLoop && position < previousPosition) { + positionRollover = true; + } + previousPosition = position; canvasKeys = Object.keys(canvasCombos); drawVisual = requestAnimationFrame(drawAlt); + canvasKeys.forEach((k) => { + canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR + canvasCombos[k][1].fillRect(0, 0, w, h); + const layerID = canvasCombos[k][2]; + const m = mapping[layerID][k]; + if (m.sync === 'volume') { + const sx = m.min_freq; + const sw = m.max_freq - m.min_freq; + const sy = h - (m.max_in * verticalFactor); + const sh = (m.max_in - m.min_in) * verticalFactor; + canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR + canvasCombos[k][1].fillRect(sx, sy, sw, sh); + } else if (m.sync === 'pitch') { + const sx = m.min_freq; + const sw = m.max_freq - m.min_freq; + const sy = 0; + const sh = h; + canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR + canvasCombos[k][1].fillRect(sx, sy, sw, sh); + } + }); + //analyser.getByteFrequencyData(dataArrayAlt); - //Object.keys(audioFileStuff).forEach((afs) => { - //afs.analyser.ByteFrequencyData(afs.dataArray); - //}); - audioFileStuff['hito_steyerl_about_suicide_cameras.ogg'].analyser.getByteFrequencyData(dataArrayAlt); - - for (let i = 0; i < canvasKeys.length; i++) { - canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR - canvasCombos[canvasKeys[i]][1].fillRect(0, 0, w, h); - const layerID = canvasCombos[canvasKeys[i]][2]; - const m = mapping[layerID][canvasKeys[i]]; - if (m.sync === 'volume') { - const sx = m.min_freq; - const sw = m.max_freq - m.min_freq; - const sy = h - (m.max_in * verticalFactor); - const sh = (m.max_in - m.min_in) * verticalFactor; - canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR - canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh); - } else if (m.sync === 'pitch') { - const sx = m.min_freq; - const sw = m.max_freq - m.min_freq; - const sy = 0; - const sh = h; - canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR - canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh); - } - } - - const barWidth = 1;//(w / bufferLengthAlt) * 2.5; - let barHeight; - let x = 0; - - let max_i = 0; - let max_ri = 0; - let total_v = 0; - let max_v = 0; - for (let k = 0; k < canvasKeys.length; k++) { - const layerID = canvasCombos[canvasKeys[k]][2]; - const m = mapping[layerID][canvasKeys[k]]; - m.max_v = max_v; - m.max_i = max_i; - m.max_ri = max_ri; - m.total_v = total_v; - } - for (let i = 0; i < w; i++) { - barHeight = dataArrayAlt[i]; - total_v += barHeight; - max_ri = barHeight * i; - - if (barHeight > max_v) { - max_v = barHeight; - max_i = i; - } - for (let k = 0; k < canvasKeys.length; k++) { - const layerID = canvasCombos[canvasKeys[k]][2]; - const m = mapping[layerID][canvasKeys[k]]; - let fillStyle = "rgb(200,200,200)"; // AUDIO COLOR - if (m.min_freq <= i && m.max_freq >= i) { - m.total_v += barHeight; - if (barHeight > m.max_v) { - m.max_v = barHeight; - m.max_i = i; - m.max_ri = barHeight * i; - } - fillStyle = "rgb(255,255,255)"; // AUDIO COLOR + const usedSourceCombos = []; + const analysedResults = {}; + Object.keys(mapping).forEach((layerID) => { + Object.keys(mapping[layerID]).forEach((propTitle) => { + const m = mapping[layerID][propTitle]; + const source = m.source; + if (usedSourceCombos.indexOf(source) < 0) { + usedSourceCombos.push(source); + analysedResults[source] = { + max_i: 0, + max_ri: 0, + max_v: 0, + total_v: 0, + mappings: [], + }; + } + m.max_v = 0; + m.max_i = 0; + m.max_ri = 0; + m.total_v = 0; + analysedResults[source].mappings.push(m); + }); + }); + Object.keys(audioSourceCombo).forEach((k) => { + const asc = audioSourceCombo[k]; + if (asc.audioElement !== null) { + if (usedSourceCombos.indexOf(k) >= 0) { + if (positionRollover || asc.audioElement.paused) { + asc.audioElement.currentTime = position % asc.audioElement.duration; + asc.audioElement.play(); + } + } else if (!asc.audioElement.paused) { + asc.audioElement.pause(); } - canvasCombos[canvasKeys[k]][1].fillStyle = fillStyle; - canvasCombos[canvasKeys[k]][1].fillRect( - x, - h - (barHeight * verticalFactor), - barWidth, - (barHeight * verticalFactor) - ); } - - x += barWidth; - } - max_ri /= total_v; - for (let k = 0; k < canvasKeys.length; k++) { - const layerID = canvasCombos[canvasKeys[k]][2]; - const m = mapping[layerID][canvasKeys[k]]; - m.max_ri /= m.total_v; + }); + usedSourceCombos.forEach((source) => { + const afs = audioSourceCombo[source]; + const r = analysedResults[source]; + afs.analyser.getByteFrequencyData(afs.dataArray); + for (let f = 0; f < w; f++) { + const v = afs.dataArray[f]; + r.total_v += v; + if (r.max_v < v) { + r.max_v = v; + r.max_i = v; + } + r.max_ri += v * f; + let fillStyle = 'rgb(200,200,200)'; + for (let k_i = 0; k_i < canvasKeys.length; k_i++) { + const k = canvasKeys[k_i]; + const x = f; + canvasCombos[k][1].fillStyle = fillStyle; + canvasCombos[k][1].fillRect( + x, + h - (v * verticalFactor), + 1, + (v * verticalFactor) + ); + } + analysedResults[source].mappings.forEach((m) => { + if (m.min_freq <= f && m.max_freq >= f) { + m.total_v += v; + if (m.max_v < v) { + m.max_v = v; + m.max_i = f; + } + m.max_ri += v * f; + } + }); + } + r.max_ri /= r.total_v; + analysedResults[source].mappings.forEach((m) => { + m.max_ri /= m.total_v; + }); + }); + for (let k_i = 0; k_i < canvasKeys.length; k_i++) { + const k = canvasKeys[k_i]; + const layerID = canvasCombos[k][2]; + const m = mapping[layerID][k]; if (m.sync === 'volume') { const sx = m.min_freq; const sw = m.max_freq - m.min_freq; const sy = h - (m.max_in * verticalFactor); const sh = (m.max_in - m.min_in) * verticalFactor; - canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR - canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR - canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh); + canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR + canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR + canvasCombos[k][1].strokeRect(sx, sy, sw, sh); } else if (m.sync === 'pitch') { - const m = mapping[layerID][canvasKeys[k]]; const sx = m.min_freq; const sw = m.max_freq - m.min_freq; const sy = 0; const sh = h; - canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR - canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR - canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh); + canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR + canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR + canvasCombos[k][1].strokeRect(sx, sy, sw, sh); } } + const propsToSet = []; getLayers().forEach((layer) => { if (mapping.hasOwnProperty(layer.id())) { @@ -1079,11 +1069,6 @@ const Audio = function(tp, record) { frameCount++; }; drawAlt(); - } else if (visualSetting == "off") { - canvasCtx.clearRect(0, 0, WIDTH, HEIGHT); - canvasCtx.fillStyle = "red"; - canvasCtx.fillRect(0, 0, WIDTH, HEIGHT); - } } const voiceChange = () => { @@ -1204,9 +1189,11 @@ const Audio = function(tp, record) { this.addAudioOptions = addAudioOptions; this.removeAudioOptions = removeAudioOptions; this.AudioMappingOptions = AudioMappingOptions; + this.readAudioFiles = readAudioFiles; // debug this.canvasCombos = canvasCombos; + this.audioSourceCombo = audioSourceCombo; }; export { diff --git a/bin/web/js/config.js b/bin/web/js/config.js index 5da5973..83fe647 100644 --- a/bin/web/js/config.js +++ b/bin/web/js/config.js @@ -102,6 +102,7 @@ const config = { colorSeparateRGBA: true, ignoreOutboundFrequencies: true, pitchCombineFrequencies: false, + rolloverResetLoop: true, }, record: { ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'], diff --git a/bin/web/js/main.js b/bin/web/js/main.js index 1b9a936..f88690c 100644 --- a/bin/web/js/main.js +++ b/bin/web/js/main.js @@ -579,6 +579,7 @@ const initPanels = () => { .save(file) .then(() => { console.log('ermh... done uploading?', file); + audio.readAudioFiles(); }); }); }); From 0ca73bca0544b4f43e15ce56e4e35288b718ecfb Mon Sep 17 00:00:00 2001 From: themancalledjakob Date: Fri, 13 Oct 2023 16:24:34 +0200 Subject: [PATCH 9/9] mute, letterDelay and other fixes --- bin/web/js/audio.js | 354 +++++++++++++++++-------------------------- bin/web/js/config.js | 7 + bin/web/js/main.js | 14 +- bin/web/js/record.js | 6 +- 4 files changed, 158 insertions(+), 223 deletions(-) diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js index 92b7273..6157619 100644 --- a/bin/web/js/audio.js +++ b/bin/web/js/audio.js @@ -32,7 +32,7 @@ const Audio = function(tp, record) { heading.textContent = "CLICK HERE TO START"; // an array of possible sync options. - const audio_sync_options = ['volume', 'pitch', 'frequency']; + const audio_sync_options = ['volume', 'pitch', 'clarity']; // could also be an enum // like that //const AudioSyncOptions = Object.freeze({ @@ -139,8 +139,8 @@ const Audio = function(tp, record) { b.min_out = mm[0]; b.max_out = mm[1]; const a = new AudioMappingOptions(); - a.min_out = mm[0]; - a.max_out = mm[1]; + a.min_out = 1.0; // NOTE: dirty, dirty + a.max_out = 1.0; // hardcoded value, you return [{r}, {g}, {b}, {a}]; } else { const o = new AudioMappingOptions(); @@ -219,10 +219,11 @@ const Audio = function(tp, record) { const createAudioOptions = (layer, propTitle, container) => { const mappingOptions = mapping[layer.id()][propTitle]; - let hasLetterDelay = config + let hasLetterDelay = //false; + config .layer.letterDelayProps - .indexOf(propTitle.split('.')[0]) >= 0 - && tp.isSequenced([...[layer.id()], ...propTitle.split('.')]); + .indexOf(propTitle.split('.')[0]) >= 0 && propTitle.indexOf('color') < 0; + //&& tp.isSequenced([...[layer.id()], ...propTitle.split('.')]); const panel = tp.getPanel(); if (!areMutationsObserved) { mutationObserver.observe(panel, { childList: true, subtree: true }); @@ -272,7 +273,8 @@ const Audio = function(tp, record) { const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#')); mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value); } - mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`, '#')).value; + mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`,'#')).value; + mappingOptions.muted = panel.querySelector(toCssClass(`audio_mute${propTitle}`,'#')).checked; }; const source_Dom = document.createElement('select'); @@ -286,12 +288,27 @@ const Audio = function(tp, record) { if (file[0] !== '.') { const source_file = document.createElement('option'); source_file.value = file; - source_file.innerHTML = file; + if (file.length > config.audio.maxFilenameLength) { + source_file.innerHTML = file.substr(0,6) + '..' + file.substr(file.length - 6, 6); + } else { + source_file.innerHTML = file; + } source_Dom.append(source_file); } }); audioOptions.append(source_Dom); + const muteDom = document.createElement('input'); + const muteDom_label = document.createElement('label'); + muteDom.id = toCssClass(`audio_mute${propTitle}`); + muteDom.name = toCssClass(`audio_mute${propTitle}`); + muteDom.type = 'checkbox'; + muteDom.checked = true; + muteDom_label.for = toCssClass(`audio_mute${propTitle}`); + muteDom_label.innerHTML = 'muted'; + audioOptions.append(muteDom); + audioOptions.append(muteDom_label); + const min_max_Dom = document.createElement('div'); min_max_Dom.classList.add('audio_min_max'); const min_Cont = document.createElement('div'); @@ -412,6 +429,7 @@ const Audio = function(tp, record) { fft_Dom.append(fft_selectDom); audioOptions.append(fft_Dom); source_Dom.addEventListener('change', updateMappingOptions); + muteDom.addEventListener('change', updateMappingOptions); min_inputDom.addEventListener('change', updateMappingOptions); max_inputDom.addEventListener('change', updateMappingOptions); smoothing_inputDom.addEventListener('change', updateMappingOptions); @@ -609,10 +627,10 @@ const Audio = function(tp, record) { } }); }; - const audioSourceCombo = {}; + const audioSourceCombos = {}; const readAudioFiles = () => { FS.readdir(config.fs.idbfsAudioDir).forEach((file) => { - if (file.indexOf('.') !== 0 && !audioSourceCombo.hasOwnProperty(file)) { + if (file.indexOf('.') !== 0 && !audioSourceCombos.hasOwnProperty(file)) { const audioElement = document.createElement('audio'); audioElement.classList.add('invisible'); audioElement.classList.add('audio_file'); @@ -641,12 +659,12 @@ const Audio = function(tp, record) { audioElement.loop = true; const source = audioCtx.createMediaElementSource(audioElement); - source.connect(audioCtx.destination); - const analyser = audioCtx.createAnalyser(); - analyser.minDecibels = -90; - analyser.maxDecibels = -10; - analyser.smoothingTimeConstant = 0.85; - analyser.fftSize = config.audio.fftBandsAnalysed; + const gain = audioCtx.createGain(); + gain.gain.value = 0; + source.connect(gain); + gain.connect(audioCtx.destination); + //source.connect(audioCtx.destination); + const analyser = new AnalyserNode(audioCtx, config.audio.analyser); const bufferLength = analyser.frequencyBinCount / 2; const dataArray = new Uint8Array(bufferLength); @@ -654,7 +672,9 @@ const Audio = function(tp, record) { audioElement.play(); - audioSourceCombo[file] = { + audioSourceCombos[file] = { + gain, + source, dataArray, analyser, audioElement, @@ -709,25 +729,22 @@ const Audio = function(tp, record) { // window. is needed otherwise Safari explodes audioCtx = new(window.AudioContext || window.webkitAudioContext)(); const voiceSelect = audioDom.querySelector("#voice"); - let source; - let stream; // Grab the mute button to use below const mute = audioDom.querySelector(".mute"); // Set up the different audio nodes we will use for the app - const analyser = audioCtx.createAnalyser(); - analyser.minDecibels = -90; - analyser.maxDecibels = -10; - analyser.smoothingTimeConstant = 0.85; - analyser.fftSize = config.audio.fftBandsAnalysed; - const bufferLength = analyser.frequencyBinCount / 2; + { + const analyser = new AnalyserNode(audioCtx, config.audio.analyser); + const bufferLength = analyser.frequencyBinCount / 2; - audioSourceCombo['microphone'] = { - analyser, - dataArray: new Uint8Array(bufferLength), - audioElement: null, - }; + audioSourceCombos['microphone'] = { + // source: see below when we actually get the microphone + analyser, + dataArray: new Uint8Array(bufferLength), + audioElement: null, + }; + } readAudioFiles(); @@ -754,34 +771,6 @@ const Audio = function(tp, record) { return curve; } - // Grab audio track via XHR for convolver node - let soundSource; - const ajaxRequest = new XMLHttpRequest(); - - ajaxRequest.open( - "GET", - "https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg", - true - ); - - ajaxRequest.responseType = "arraybuffer"; - - ajaxRequest.onload = function() { - const audioData = ajaxRequest.response; - - audioCtx.decodeAudioData( - audioData, - function(buffer) { - soundSource = audioCtx.createBufferSource(); - }, - function(e) { - console.log("Audio::audioCtx.decodeAudioData", "Error with decoding audio data" + e.err); - } - ); - }; - - ajaxRequest.send(); - // Set up canvas context for visualizer const canvas = audioDom.querySelector(".visualizer"); const canvasCtx = canvas.getContext("2d"); @@ -801,8 +790,14 @@ const Audio = function(tp, record) { navigator.mediaDevices .getUserMedia(constraints) .then(function(stream) { - source = audioCtx.createMediaStreamSource(stream); - source.connect(analyser); + const source = audioCtx.createMediaStreamSource(stream); + const gain = audioCtx.createGain(); + gain.gain.value = 0; + source.connect(gain); + gain.connect(audioCtx.destination); + source.connect(audioSourceCombos['microphone'].analyser); + audioSourceCombos['microphone'].source = source; + audioSourceCombos['microphone'].gain = gain; visualize(); }) @@ -819,11 +814,8 @@ const Audio = function(tp, record) { const w = config.audio.fftBandsUsed; const h = config.audio.fftHeight; const verticalFactor = h / 256.0; - const bufferLengthAlt = analyser.frequencyBinCount / 2; // See comment above for Float32Array() - const dataArrayAlt = new Uint8Array(bufferLengthAlt); - let canvasKeys = Object.keys(canvasCombos); for (let i = 0; i < canvasKeys.length; i++) { @@ -853,7 +845,7 @@ const Audio = function(tp, record) { const sh = (m.max_in - m.min_in) * verticalFactor; canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR canvasCombos[k][1].fillRect(sx, sy, sw, sh); - } else if (m.sync === 'pitch') { + } else if (m.sync === 'pitch' || m.sync === 'clarity') { const sx = m.min_freq; const sw = m.max_freq - m.min_freq; const sy = 0; @@ -863,13 +855,18 @@ const Audio = function(tp, record) { } }); - //analyser.getByteFrequencyData(dataArrayAlt); const usedSourceCombos = []; const analysedResults = {}; + const unmuted = []; Object.keys(mapping).forEach((layerID) => { Object.keys(mapping[layerID]).forEach((propTitle) => { const m = mapping[layerID][propTitle]; const source = m.source; + if (!m.muted) { + if (unmuted.indexOf(source) < 0) { + unmuted.push(source); + } + } if (usedSourceCombos.indexOf(source) < 0) { usedSourceCombos.push(source); analysedResults[source] = { @@ -887,8 +884,8 @@ const Audio = function(tp, record) { analysedResults[source].mappings.push(m); }); }); - Object.keys(audioSourceCombo).forEach((k) => { - const asc = audioSourceCombo[k]; + Object.keys(audioSourceCombos).forEach((k) => { + const asc = audioSourceCombos[k]; if (asc.audioElement !== null) { if (usedSourceCombos.indexOf(k) >= 0) { if (positionRollover || asc.audioElement.paused) { @@ -899,9 +896,14 @@ const Audio = function(tp, record) { asc.audioElement.pause(); } } + if (unmuted.indexOf(k) < 0) { + asc.gain.gain.value = 0; + } else { + asc.gain.gain.value = 1; + } }); usedSourceCombos.forEach((source) => { - const afs = audioSourceCombo[source]; + const afs = audioSourceCombos[source]; const r = analysedResults[source]; afs.analyser.getByteFrequencyData(afs.dataArray); for (let f = 0; f < w; f++) { @@ -914,15 +916,18 @@ const Audio = function(tp, record) { r.max_ri += v * f; let fillStyle = 'rgb(200,200,200)'; for (let k_i = 0; k_i < canvasKeys.length; k_i++) { + // NOTE: this is not the most efficient way to do it const k = canvasKeys[k_i]; - const x = f; - canvasCombos[k][1].fillStyle = fillStyle; - canvasCombos[k][1].fillRect( - x, - h - (v * verticalFactor), - 1, - (v * verticalFactor) - ); + const layerID = canvasCombos[k][2]; + if (mapping[layerID][k].source === source) { + canvasCombos[k][1].fillStyle = fillStyle; + canvasCombos[k][1].fillRect( + f, + h - (v * verticalFactor), + 1, + (v * verticalFactor) + ); + } } analysedResults[source].mappings.forEach((m) => { if (m.min_freq <= f && m.max_freq >= f) { @@ -952,7 +957,7 @@ const Audio = function(tp, record) { canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR canvasCombos[k][1].strokeRect(sx, sy, sw, sh); - } else if (m.sync === 'pitch') { + } else if (m.sync === 'pitch' || m.sync === 'clarity') { const sx = m.min_freq; const sw = m.max_freq - m.min_freq; const sy = 0; @@ -964,50 +969,60 @@ const Audio = function(tp, record) { } const propsToSet = []; - getLayers().forEach((layer) => { - if (mapping.hasOwnProperty(layer.id())) { - Object.keys(mapping[layer.id()]).forEach((propTitle) => { - const m = mapping[layer.id()][propTitle]; - switch (m.sync) { - case 'volume': { - let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true); - m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; - propsToSet.push({ - layer, - id: layer.id(), - title: propTitle, - value: m.value, - }); - break; - } - case 'pitch': { - const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : max_i; - const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : max_ri; - const fi = config.audio.pitchCombineFrequencies ? ri : mi; - let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true); - m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; - propsToSet.push({ - layer, - id: layer.id(), - title: propTitle, - value: m.value, - }); - break; - } - default: - break; - } - if (m.letterDelay) { - const pt = `letterDelays.${propTitle}`; + Object.keys(mapping).forEach((layerID) => { + Object.keys(mapping[layerID]).forEach((propTitle) => { + const m = mapping[layerID][propTitle]; + switch (m.sync) { + case 'volume': { + let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true); + m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; propsToSet.push({ - layer, - id: layer.id(), - title: pt, - value: m.letterDelay, + id: layerID, + title: propTitle, + value: m.value, }); + break; } - }); - } + case 'pitch': { + const r = analysedResults[m.source]; + const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : r.max_i; + const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : r.max_ri; + const fi = config.audio.pitchCombineFrequencies ? ri : mi; + let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true); + if (!isNaN(a)) { + m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; + propsToSet.push({ + id: layerID, + title: propTitle, + value: m.value, + }); + } + break; + } + case 'clarity': { + const clarity = m.max_v / m.total_v; + const a = mapValue(clarity, 0.01, 0.05, m.min_out, m.max_out, true); + if (!isNaN(a)) { + m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a; + propsToSet.push({ + id: layerID, + title: propTitle, + value: m.value, + }); + } + } + default: + break; + } + if (m.letterDelay) { + const pt = `letterDelays.${propTitle}`; + propsToSet.push({ + id: layerID, + title: pt, + value: m.letterDelay, + }); + } + }); }); if (propsToSet.length > 0 && frameCount % 2 === 0) { // this is when to monitor live @@ -1036,6 +1051,7 @@ const Audio = function(tp, record) { propsToSet.forEach((p) => { const title = tp .getPanelPropTitle(p.title); + const layer = getLayer(p.id); if (title !== null) { const inputElement = title @@ -1050,10 +1066,10 @@ const Audio = function(tp, record) { record.addValue(p.id, p.title, p.value, position); if (p.title.indexOf('color') === 0) { if (!config.audio.colorSeparateRGBA || p.title === 'color.a') { - record.liveUpdate(p.layer, position); + record.liveUpdate(layer, position); } } else { - record.liveUpdate(p.layer, position); + record.liveUpdate(layer, position); } }); } @@ -1070,102 +1086,6 @@ const Audio = function(tp, record) { }; drawAlt(); } - - const voiceChange = () => { - distortion.oversample = "4x"; - biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0); - - const voiceSetting = voiceSelect.value; - - if (echoDelay.isApplied()) { - echoDelay.discard(); - } - - // When convolver is selected it is connected back into the audio path - if (voiceSetting == "convolver") { - biquadFilter.disconnect(0); - biquadFilter.connect(convolver); - } else { - biquadFilter.disconnect(0); - biquadFilter.connect(gainNode); - - if (voiceSetting == "distortion") { - distortion.curve = makeDistortionCurve(400); - } else if (voiceSetting == "biquad") { - biquadFilter.type = "lowshelf"; - biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0); - biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0); - } else if (voiceSetting == "delay") { - echoDelay.apply(); - } else if (voiceSetting == "off") { - console.log("Voice settings turned off"); - } - } - } - - function createEchoDelayEffect(audioContext) { - const delay = audioContext.createDelay(1); - const dryNode = audioContext.createGain(); - const wetNode = audioContext.createGain(); - const mixer = audioContext.createGain(); - const filter = audioContext.createBiquadFilter(); - - delay.delayTime.value = 0.75; - dryNode.gain.value = 1; - wetNode.gain.value = 0; - filter.frequency.value = 1100; - filter.type = "highpass"; - - return { - apply: function() { - wetNode.gain.setValueAtTime(0.75, audioContext.currentTime); - }, - discard: function() { - wetNode.gain.setValueAtTime(0, audioContext.currentTime); - }, - isApplied: function() { - return wetNode.gain.value > 0; - }, - placeBetween: function(inputNode, outputNode) { - inputNode.connect(delay); - delay.connect(wetNode); - wetNode.connect(filter); - filter.connect(delay); - - inputNode.connect(dryNode); - dryNode.connect(mixer); - wetNode.connect(mixer); - mixer.connect(outputNode); - }, - }; - } - - // Event listeners to change visualize and voice settings - visualSelect.onchange = function() { - window.cancelAnimationFrame(drawVisual); - visualize(); - }; - - voiceSelect.onchange = function() { - voiceChange(); - }; - - mute.onclick = voiceMute; - - let previousGain; - - function voiceMute() { - if (mute.id === "") { - previousGain = gainNode.gain.value; - gainNode.gain.value = 0; - mute.id = "activated"; - mute.innerHTML = "Unmute"; - } else { - gainNode.gain.value = previousGain; - mute.id = ""; - mute.innerHTML = "Mute"; - } - } } } const deinit = () => { @@ -1193,7 +1113,7 @@ const Audio = function(tp, record) { // debug this.canvasCombos = canvasCombos; - this.audioSourceCombo = audioSourceCombo; + this.audioSourceCombos = audioSourceCombos; }; export { diff --git a/bin/web/js/config.js b/bin/web/js/config.js index 83fe647..9411388 100644 --- a/bin/web/js/config.js +++ b/bin/web/js/config.js @@ -95,7 +95,14 @@ const config = { 'letterDelays': [0, 1000], }, ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy', 'height'], + maxFilenameLength: 24, defaultSmoothing: 0.7, + analyser: { + fftSize: 256 * 8, + minDecibels: -90, + maxDecibels: -10, + smoothingTimeConstant: 0.85, + }, fftBandsAnalysed: 256 * 8, fftBandsUsed: 256 / 2, fftHeight: 256 / 4, diff --git a/bin/web/js/main.js b/bin/web/js/main.js index f88690c..b243ad4 100644 --- a/bin/web/js/main.js +++ b/bin/web/js/main.js @@ -416,12 +416,20 @@ const listAvailableFontsAndAxes = () => { window.listAvailableFontsAndAxes = listAvailableFontsAndAxes; window.getFontsAndAxes = getFontsAndAxes; +window.getArtboard = () => { + return artboard; +}; + window.getLayers = () => { return layers; }; window.getLayer = (layerID) => { - return layers.find((layer) => layer.id() === layerID); + if (layerID === 'artboard') { + return artboard; + } else { + return layers.find((layer) => layer.id() === layerID); + } }; window.moveLayerUp = (layerID) => { @@ -432,10 +440,6 @@ window.moveLayerDown = (layerID) => { layerOrder.moveDown(layerID); }; -window.getArtboard = () => { - return artboard; -}; - const addLayer = (autoInit = true) => { const layerID = Module.addNewLayer(); const layer = new Layer(tp, layerID, fontsAndAxes, autoInit); diff --git a/bin/web/js/record.js b/bin/web/js/record.js index 91db08b..ae77ffe 100644 --- a/bin/web/js/record.js +++ b/bin/web/js/record.js @@ -117,6 +117,7 @@ const LiveUpdater = function(tp, buffy) { }; this.immediateUpdate = (layer, values) => { const cv = clone(values); + const ctv = clone(layer.theatreObject.value); if (cv.hasOwnProperty('color.r')) { cv['color'] = { r: cv['color.r'], @@ -129,7 +130,10 @@ const LiveUpdater = function(tp, buffy) { delete cv['color.b']; delete cv['color.a']; } - const v = {...layer.theatreObject.value, ...cv}; + flattenObject(cv, ['color']); + flattenObject(ctv, ['color']); + const v = {...ctv, ...cv}; + deFlattenObject(v, ['color']); const p = layer.values2cppProps(v); if (p !== false) { const id = layer.id();