diff --git a/assets/template.html b/assets/template.html
index bdf963d..ff07a2d 100644
--- a/assets/template.html
+++ b/assets/template.html
@@ -290,7 +290,9 @@
+
+
@@ -359,6 +361,12 @@
diff --git a/bin/web/css/demo.css b/bin/web/css/demo.css
index 6012523..7ebf71d 100755
--- a/bin/web/css/demo.css
+++ b/bin/web/css/demo.css
@@ -223,6 +223,29 @@ body.debug div:not(.centerLine) {
#notice .content .details p {
color: black;
}
+#notice_recording {
+ position: fixed;
+ top: 0px;
+ left: 0px;
+ width: 100%;
+ height: 100%;
+ background-color: rgba(0,0,0,0.1);
+ z-index: 2000;
+ display: none;
+ justify-content: center;
+ align-items: center;
+ font-family: "Tonka";
+ font-variation-settings: 'wght' 500;
+ font-size: 0.8em;
+ pointer-events: none;
+}
+#notice_recording.visible {
+ display: flex;
+}
+#notice_recording.impenetrable {
+ pointer-events: all;
+ background-color: rgba(0,0,0,0.5);
+}
.exporterChild * {
font-family: "Tonka";
@@ -941,3 +964,6 @@ h4{
margin-bottom: -3px;
box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.7), 0 3px 4px rgba(0, 0, 0, 0.7);
}
+.invisible {
+ display: none;
+}
diff --git a/bin/web/js/audio.js b/bin/web/js/audio.js
index 11546bc..6157619 100644
--- a/bin/web/js/audio.js
+++ b/bin/web/js/audio.js
@@ -14,12 +14,14 @@ const AudioMappingOptions = function() {
this.min_freq = 0.0;
this.max_freq = config.audio.fftBandsUsed;
this.min_in = 0.0;
- this.max_in = 255.0 / 2;
+ this.max_in = 255.0;
this.min_out = 0.0;
this.max_out = 1.0;
this.smoothing = config.audio.defaultSmoothing;
this.sync = 'volume';
+ this.source = 'microphone';
this.value = 0.0;
+ this.muted = true;
};
const Audio = function(tp, record) {
@@ -30,7 +32,7 @@ const Audio = function(tp, record) {
heading.textContent = "CLICK HERE TO START";
// an array of possible sync options.
- const audio_sync_options = ['volume', 'pitch', 'frequency'];
+ const audio_sync_options = ['volume', 'pitch', 'clarity'];
// could also be an enum
// like that
//const AudioSyncOptions = Object.freeze({
@@ -50,16 +52,20 @@ const Audio = function(tp, record) {
const mutationObserver = new MutationObserver(function(e) {
if (e[0].removedNodes) {
e[0].removedNodes.forEach((n) => {
- if (n.hasAttribute('data-propTitle')) {
- const propTitle = n.getAttribute('data-propTitle');
- delete canvasCombos[propTitle];
- } else {
- const subProps = n.querySelectorAll('[data-propTitle]');
- if (subProps.length > 0) {
- subProps.forEach((sp) => {
- const propTitle = sp.getAttribute('data-propTitle');
- delete canvasCombos[propTitle];
- });
+ if (typeof n === 'object' &&
+ n.hasOwnProperty('hasAttribute') &&
+ n.hasOwnProperty('querySelectorAll')) {
+ if (n.hasAttribute('data-propTitle')) {
+ const propTitle = n.getAttribute('data-propTitle');
+ delete canvasCombos[propTitle];
+ } else {
+ const subProps = n.querySelectorAll('[data-propTitle]');
+ if (subProps.length > 0) {
+ subProps.forEach((sp) => {
+ const propTitle = sp.getAttribute('data-propTitle');
+ delete canvasCombos[propTitle];
+ });
+ }
}
}
});
@@ -85,24 +91,68 @@ const Audio = function(tp, record) {
return true;
};
+ const getDefaultRange = (layer, propTitle) => {
+ if (config.audio.defaultRange.hasOwnProperty(propTitle)) {
+ return config.audio.defaultRange[propTitle];
+ } else if (propTitle.indexOf('width') === 0) {
+ return [
+ getArtboard().theatreObject.value.width / 2,
+ getArtboard().theatreObject.value.width
+ ];
+ } else if (propTitle.indexOf('y') === 0) {
+ return [
+ 0,
+ getArtboard().theatreObject.value.height / 2
+ ];
+ } else if (propTitle.indexOf('x') === 0) {
+ return [
+ 0,
+ getArtboard().theatreObject.value.width / 2
+ ];
+ } else if (propTitle.indexOf('y') === 0) {
+ return [
+ 0,
+ getArtboard().theatreObject.value.height / 2
+ ];
+ } else if (propTitle.indexOf('letterDelay') === 0) {
+ return [
+ config.audio.defaultRange.letterDelay[0],
+ config.audio.defaultRange.letterDelay[1]
+ ];
+ } else if (propTitle.split('.')[0] === 'fontVariationAxes') {
+ return layer.props.fontVariationAxes
+ .props[propTitle.split('.')[1]].range;
+ }
+ };
+
const getAudioMappingOptions = (layer, propTitle) => {
if (propTitle === 'color') {
+ const mm = getDefaultRange(layer, 'color');
if (config.audio.colorSeparateRGBA) {
const r = new AudioMappingOptions();
+ r.min_out = mm[0];
+ r.max_out = mm[1];
const g = new AudioMappingOptions();
+ g.min_out = mm[0];
+ g.max_out = mm[1];
const b = new AudioMappingOptions();
+ b.min_out = mm[0];
+ b.max_out = mm[1];
const a = new AudioMappingOptions();
+ a.min_out = 1.0; // NOTE: dirty, dirty
+ a.max_out = 1.0; // hardcoded value, you
return [{r}, {g}, {b}, {a}];
} else {
- const rgba = new AudioMappingOptions();
- rgba.min_out = {r: 0, b: 0, g: 0, a: 0};
- rgba.max_out = {r: 1, b: 1, g: 1, a: 1};
- return rgba;
+ const o = new AudioMappingOptions();
+ o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]};
+ o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]};
+ return o;
}
} else {
const o = new AudioMappingOptions();
- // TODO: get min_out, max_out from layer.props
- // check for typeof layer.props[propTitle.split('.')[0]] blabla
+ const mm = getDefaultRange(layer, propTitle);
+ o.min_out = mm[0];
+ o.max_out = mm[1];
return o;
}
};
@@ -169,6 +219,11 @@ const Audio = function(tp, record) {
const createAudioOptions = (layer, propTitle, container) => {
const mappingOptions = mapping[layer.id()][propTitle];
+ let hasLetterDelay = //false;
+ config
+ .layer.letterDelayProps
+ .indexOf(propTitle.split('.')[0]) >= 0 && propTitle.indexOf('color') < 0;
+ //&& tp.isSequenced([...[layer.id()], ...propTitle.split('.')]);
const panel = tp.getPanel();
if (!areMutationsObserved) {
mutationObserver.observe(panel, { childList: true, subtree: true });
@@ -214,8 +269,46 @@ const Audio = function(tp, record) {
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
mappingOptions.smoothing = parseFloat(s);
+ if (hasLetterDelay) {
+ const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#'));
+ mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
+ }
+ mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`,'#')).value;
+ mappingOptions.muted = panel.querySelector(toCssClass(`audio_mute${propTitle}`,'#')).checked;
};
+ const source_Dom = document.createElement('select');
+ source_Dom.id = toCssClass(`audio_source${propTitle}`);
+ const source_mic = document.createElement('option');
+ source_mic.value = 'microphone';
+ source_mic.innerHTML = 'microphone';
+ source_Dom.append(source_mic);
+ FS.readdir(config.fs.idbfsAudioDir)
+ .forEach((file) => {
+ if (file[0] !== '.') {
+ const source_file = document.createElement('option');
+ source_file.value = file;
+ if (file.length > config.audio.maxFilenameLength) {
+ source_file.innerHTML = file.substr(0,6) + '..' + file.substr(file.length - 6, 6);
+ } else {
+ source_file.innerHTML = file;
+ }
+ source_Dom.append(source_file);
+ }
+ });
+ audioOptions.append(source_Dom);
+
+ const muteDom = document.createElement('input');
+ const muteDom_label = document.createElement('label');
+ muteDom.id = toCssClass(`audio_mute${propTitle}`);
+ muteDom.name = toCssClass(`audio_mute${propTitle}`);
+ muteDom.type = 'checkbox';
+ muteDom.checked = true;
+ muteDom_label.for = toCssClass(`audio_mute${propTitle}`);
+ muteDom_label.innerHTML = 'muted';
+ audioOptions.append(muteDom);
+ audioOptions.append(muteDom_label);
+
const min_max_Dom = document.createElement('div');
min_max_Dom.classList.add('audio_min_max');
const min_Cont = document.createElement('div');
@@ -240,7 +333,7 @@ const Audio = function(tp, record) {
max_inputDom.value = `${mappingOptions.max_out}`;
const smoothing_inputDom_label = document.createElement('label');
smoothing_inputDom_label.for = 'audio_smoothing';
- smoothing_inputDom_label.innerHTML = 'audio smoothing ';
+ smoothing_inputDom_label.innerHTML = 'audio smoothing';
const smoothing_inputDom = document.createElement('input');
smoothing_inputDom.type = 'number';
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
@@ -257,6 +350,23 @@ const Audio = function(tp, record) {
min_max_Dom.append(max_Cont);
max_Cont.append(max_inputDom_label);
max_Cont.append(max_inputDom);
+ if (hasLetterDelay) {
+ const letterDelayCont = document.createElement('div');
+ const letterDelay_inputDom_label = document.createElement('label');
+ letterDelay_inputDom_label.for = 'audio_letterDelay';
+ letterDelay_inputDom_label.innerHTML = 'letterDelay';
+ const letterDelay_inputDom = document.createElement('input');
+ letterDelay_inputDom.type = 'number';
+ letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`);
+ letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`);
+ letterDelay_inputDom.value = 0;
+ letterDelay_inputDom.min = 0;
+ letterDelay_inputDom.step = 1;
+ letterDelayCont.append(letterDelay_inputDom_label);
+ letterDelayCont.append(letterDelay_inputDom);
+ min_max_Dom.append(letterDelayCont);
+ letterDelay_inputDom.addEventListener('change', updateMappingOptions);
+ }
audioOptions.append(min_max_Dom);
const sync_Dom = document.createElement('div');
@@ -266,7 +376,6 @@ const Audio = function(tp, record) {
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
sync_titleDom.innerHTML = 'sync with:';
sync_Dom.append(sync_titleDom);
-
audio_sync_options.forEach((o) => {
const sync_inputDom_Cont = document.createElement('div');
@@ -319,6 +428,8 @@ const Audio = function(tp, record) {
fft_Dom.append(fft_imgDom);
fft_Dom.append(fft_selectDom);
audioOptions.append(fft_Dom);
+ source_Dom.addEventListener('change', updateMappingOptions);
+ muteDom.addEventListener('change', updateMappingOptions);
min_inputDom.addEventListener('change', updateMappingOptions);
max_inputDom.addEventListener('change', updateMappingOptions);
smoothing_inputDom.addEventListener('change', updateMappingOptions);
@@ -361,12 +472,24 @@ const Audio = function(tp, record) {
mappingOptions.max_in = (bb.height - min_y) * y_factor;
}
});
- fft_Dom.addEventListener('mouseup', (e) => {
+ const unset = (e) => {
setFrequency = false;
+ };
+ const unsetFromOutside = (e) => {
+ document.removeEventListener('mouseup', unsetFromOutside);
+ unset(e);
+ };
+ fft_Dom.addEventListener('mouseup', unset);
+ fft_Dom.addEventListener('mouseleave', (e) => {
+ if (setFrequency) {
+ document.addEventListener('mouseup', unsetFromOutside);
+ }
+ });
+ fft_Dom.addEventListener('mouseenter', (e) => {
+ if (setFrequency) {
+ document.removeEventListener('mouseup', unsetFromOutside);
+ }
});
- //fft_Dom.addEventListener('mouseout', (e) => {
- //setFrequency = false;
- //});
container.after(audioOptions);
@@ -470,9 +593,11 @@ const Audio = function(tp, record) {
if (!isMapped(layer, propTitle)) {
addAudioMapping(layer, propTitle);
addAudioOptions(layer, propTitle);
+ layer.updateValuesViaTheatre(false);
} else {
removeAudioMapping(layer, propTitle);
removeAudioOptions(layer, propTitle);
+ layer.updateValuesViaTheatre(true);
}
});
if (isActive) {
@@ -502,6 +627,62 @@ const Audio = function(tp, record) {
}
});
};
+ const audioSourceCombos = {};
+ const readAudioFiles = () => {
+ FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
+ if (file.indexOf('.') !== 0 && !audioSourceCombos.hasOwnProperty(file)) {
+ const audioElement = document.createElement('audio');
+ audioElement.classList.add('invisible');
+ audioElement.classList.add('audio_file');
+ audioElement.classList.add(toCssClass(`audio_file${file}`));
+ document.querySelector('body').append(audioElement);
+
+ const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`);
+ let type = 'audio/wav';
+ const filesplit = file.split('.');
+ const extension = filesplit[filesplit.length - 1];
+ if (extension === 'wav') {
+ type = 'audio/wav';
+ } else if (extension === 'mp3') {
+ type = 'audio/mpeg';
+ } else if (extension === 'ogg') {
+ type = 'audio/ogg';
+ }
+
+ const src = URL.createObjectURL(
+ new Blob([arr], {
+ type
+ })
+ );
+
+ audioElement.src = src;
+ audioElement.loop = true;
+
+ const source = audioCtx.createMediaElementSource(audioElement);
+ const gain = audioCtx.createGain();
+ gain.gain.value = 0;
+ source.connect(gain);
+ gain.connect(audioCtx.destination);
+ //source.connect(audioCtx.destination);
+ const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
+ const bufferLength = analyser.frequencyBinCount / 2;
+ const dataArray = new Uint8Array(bufferLength);
+
+ source.connect(analyser);
+
+ audioElement.play();
+
+ audioSourceCombos[file] = {
+ gain,
+ source,
+ dataArray,
+ analyser,
+ audioElement,
+ };
+ }
+ });
+ };
+
const init = () => {
if (!started) {
@@ -548,25 +729,31 @@ const Audio = function(tp, record) {
// window. is needed otherwise Safari explodes
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
const voiceSelect = audioDom.querySelector("#voice");
- let source;
- let stream;
// Grab the mute button to use below
const mute = audioDom.querySelector(".mute");
// Set up the different audio nodes we will use for the app
- const analyser = audioCtx.createAnalyser();
- analyser.minDecibels = -90;
- analyser.maxDecibels = -10;
- analyser.smoothingTimeConstant = 0.85;
- window.analyser = analyser;
+ {
+ const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
+ const bufferLength = analyser.frequencyBinCount / 2;
- const distortion = audioCtx.createWaveShaper();
- const gainNode = audioCtx.createGain();
- const biquadFilter = audioCtx.createBiquadFilter();
- const convolver = audioCtx.createConvolver();
+ audioSourceCombos['microphone'] = {
+ // source: see below when we actually get the microphone
+ analyser,
+ dataArray: new Uint8Array(bufferLength),
+ audioElement: null,
+ };
+ }
- const echoDelay = createEchoDelayEffect(audioCtx);
+ readAudioFiles();
+
+ //const distortion = audioCtx.createWaveShaper();
+ //const gainNode = audioCtx.createGain();
+ //const biquadFilter = audioCtx.createBiquadFilter();
+ //const convolver = audioCtx.createConvolver();
+
+ //const echoDelay = createEchoDelayEffect(audioCtx);
// Distortion curve for the waveshaper, thanks to Kevin Ennis
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
@@ -584,35 +771,6 @@ const Audio = function(tp, record) {
return curve;
}
- // Grab audio track via XHR for convolver node
- let soundSource;
- const ajaxRequest = new XMLHttpRequest();
-
- ajaxRequest.open(
- "GET",
- "https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg",
- true
- );
-
- ajaxRequest.responseType = "arraybuffer";
-
- ajaxRequest.onload = function() {
- const audioData = ajaxRequest.response;
-
- audioCtx.decodeAudioData(
- audioData,
- function(buffer) {
- soundSource = audioCtx.createBufferSource();
- convolver.buffer = buffer;
- },
- function(e) {
- console.log("Audio::audioCtx.decodeAudioData", "Error with decoding audio data" + e.err);
- }
- );
- };
-
- ajaxRequest.send();
-
// Set up canvas context for visualizer
const canvas = audioDom.querySelector(".visualizer");
const canvasCtx = canvas.getContext("2d");
@@ -621,6 +779,7 @@ const Audio = function(tp, record) {
canvas.setAttribute("width", config.audio.fftBandsUsed);
const visualSelect = audioDom.querySelector("#visual");
let drawVisual;
+ let previousPosition = -1;
// Main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
@@ -631,16 +790,16 @@ const Audio = function(tp, record) {
navigator.mediaDevices
.getUserMedia(constraints)
.then(function(stream) {
- source = audioCtx.createMediaStreamSource(stream);
- source.connect(distortion);
- distortion.connect(biquadFilter);
- biquadFilter.connect(gainNode);
- convolver.connect(gainNode);
- echoDelay.placeBetween(gainNode, analyser);
- analyser.connect(audioCtx.destination);
+ const source = audioCtx.createMediaStreamSource(stream);
+ const gain = audioCtx.createGain();
+ gain.gain.value = 0;
+ source.connect(gain);
+ gain.connect(audioCtx.destination);
+ source.connect(audioSourceCombos['microphone'].analyser);
+ audioSourceCombos['microphone'].source = source;
+ audioSourceCombos['microphone'].gain = gain;
visualize();
- voiceChange();
})
.catch(function(err) {
console.log("The following gUM error occured: " + err);
@@ -650,65 +809,13 @@ const Audio = function(tp, record) {
}
const visualize = () => {
- const WIDTH = canvas.width;
- const HEIGHT = canvas.height;
- const visualSetting = visualSelect.value;
-
- if (visualSetting === "sinewave") {
- //analyser.fftSize = 2048;
- //const bufferLength = analyser.fftSize;
-
- //// We can use Float32Array instead of Uint8Array if we want higher precision
- //// const dataArray = new Float32Array(bufferLength);
- //const dataArray = new Uint8Array(bufferLength);
-
- //canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
-
- //const draw = function() {
- //drawVisual = requestAnimationFrame(draw);
-
- //analyser.getByteTimeDomainData(dataArray);
-
- //canvasCtx.fillStyle = "rgb(200, 200, 200)";
- //canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
-
- //canvasCtx.lineWidth = 2;
- //canvasCtx.strokeStyle = "rgb(0, 0, 0)";
-
- //canvasCtx.beginPath();
-
- //const sliceWidth = (WIDTH * 1.0) / bufferLength;
- //let x = 0;
-
- //for (let i = 0; i < bufferLength; i++) {
- //let v = dataArray[i] / 128.0;
- //let y = (v * HEIGHT) / 2;
-
- //if (i === 0) {
- //canvasCtx.moveTo(x, y);
- //} else {
- //canvasCtx.lineTo(x, y);
- //}
-
- //x += sliceWidth;
- //}
-
- //canvasCtx.lineTo(canvas.width, canvas.height / 2);
- //canvasCtx.stroke();
- //};
-
- //draw();
- } else if (visualSetting == "frequencybars") {
- analyser.fftSize = config.audio.fftBandsAnalysed;
+ //analyser.fftSize = config.audio.fftBandsAnalysed;
const w = config.audio.fftBandsUsed;
const h = config.audio.fftHeight;
const verticalFactor = h / 256.0;
- const bufferLengthAlt = analyser.frequencyBinCount / 2;
// See comment above for Float32Array()
- const dataArrayAlt = new Uint8Array(bufferLengthAlt);
-
let canvasKeys = Object.keys(canvasCombos);
for (let i = 0; i < canvasKeys.length; i++) {
@@ -717,149 +824,210 @@ const Audio = function(tp, record) {
let frameCount = 0;
const drawAlt = function() {
+ const position = tp.sheet.sequence.position;
+ let positionRollover = false;
+ if (config.audio.rolloverResetLoop && position < previousPosition) {
+ positionRollover = true;
+ }
+ previousPosition = position;
canvasKeys = Object.keys(canvasCombos);
drawVisual = requestAnimationFrame(drawAlt);
- analyser.getByteFrequencyData(dataArrayAlt);
-
- for (let i = 0; i < canvasKeys.length; i++) {
- canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
- canvasCombos[canvasKeys[i]][1].fillRect(0, 0, w, h);
- const layerID = canvasCombos[canvasKeys[i]][2];
- const m = mapping[layerID][canvasKeys[i]];
+ canvasKeys.forEach((k) => {
+ canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
+ canvasCombos[k][1].fillRect(0, 0, w, h);
+ const layerID = canvasCombos[k][2];
+ const m = mapping[layerID][k];
if (m.sync === 'volume') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = h - (m.max_in * verticalFactor);
const sh = (m.max_in - m.min_in) * verticalFactor;
- canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
- canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
- } else if (m.sync === 'pitch') {
+ canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
+ canvasCombos[k][1].fillRect(sx, sy, sw, sh);
+ } else if (m.sync === 'pitch' || m.sync === 'clarity') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = 0;
const sh = h;
- canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
- canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
+ canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
+ canvasCombos[k][1].fillRect(sx, sy, sw, sh);
}
- }
+ });
- const barWidth = 1;//(w / bufferLengthAlt) * 2.5;
- let barHeight;
- let x = 0;
-
- let max_i = 0;
- let max_ri = 0;
- let total_v = 0;
- let max_v = 0;
- for (let k = 0; k < canvasKeys.length; k++) {
- const layerID = canvasCombos[canvasKeys[k]][2];
- const m = mapping[layerID][canvasKeys[k]];
- m.max_v = max_v;
- m.max_i = max_i;
- m.max_ri = max_ri;
- m.total_v = total_v;
- }
- for (let i = 0; i < w; i++) {
- barHeight = dataArrayAlt[i];
- total_v += barHeight;
- max_ri = barHeight * i;
-
- if (barHeight > max_v) {
- max_v = barHeight;
- max_i = i;
- }
- for (let k = 0; k < canvasKeys.length; k++) {
- const layerID = canvasCombos[canvasKeys[k]][2];
- const m = mapping[layerID][canvasKeys[k]];
- let fillStyle = "rgb(200,200,200)"; // AUDIO COLOR
- if (m.min_freq <= i && m.max_freq >= i) {
- m.total_v += barHeight;
- if (barHeight > m.max_v) {
- m.max_v = barHeight;
- m.max_i = i;
- m.max_ri = barHeight * i;
+ const usedSourceCombos = [];
+ const analysedResults = {};
+ const unmuted = [];
+ Object.keys(mapping).forEach((layerID) => {
+ Object.keys(mapping[layerID]).forEach((propTitle) => {
+ const m = mapping[layerID][propTitle];
+ const source = m.source;
+ if (!m.muted) {
+ if (unmuted.indexOf(source) < 0) {
+ unmuted.push(source);
}
- fillStyle = "rgb(255,255,255)"; // AUDIO COLOR
}
- canvasCombos[canvasKeys[k]][1].fillStyle = fillStyle;
- canvasCombos[canvasKeys[k]][1].fillRect(
- x,
- h - (barHeight * verticalFactor),
- barWidth,
- (barHeight * verticalFactor)
- );
+ if (usedSourceCombos.indexOf(source) < 0) {
+ usedSourceCombos.push(source);
+ analysedResults[source] = {
+ max_i: 0,
+ max_ri: 0,
+ max_v: 0,
+ total_v: 0,
+ mappings: [],
+ };
+ }
+ m.max_v = 0;
+ m.max_i = 0;
+ m.max_ri = 0;
+ m.total_v = 0;
+ analysedResults[source].mappings.push(m);
+ });
+ });
+ Object.keys(audioSourceCombos).forEach((k) => {
+ const asc = audioSourceCombos[k];
+ if (asc.audioElement !== null) {
+ if (usedSourceCombos.indexOf(k) >= 0) {
+ if (positionRollover || asc.audioElement.paused) {
+ asc.audioElement.currentTime = position % asc.audioElement.duration;
+ asc.audioElement.play();
+ }
+ } else if (!asc.audioElement.paused) {
+ asc.audioElement.pause();
+ }
}
-
- x += barWidth;
- }
- max_ri /= total_v;
- for (let k = 0; k < canvasKeys.length; k++) {
- const layerID = canvasCombos[canvasKeys[k]][2];
- const m = mapping[layerID][canvasKeys[k]];
- m.max_ri /= m.total_v;
- if (m.sync === 'volume') {
- const sx = m.min_freq;
- const sw = m.max_freq - m.min_freq;
- const sy = h - (m.max_in * verticalFactor);
- const sh = (m.max_in - m.min_in) * verticalFactor;
- canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
- canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
- canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
- } else if (m.sync === 'pitch') {
- const m = mapping[layerID][canvasKeys[k]];
- const sx = m.min_freq;
- const sw = m.max_freq - m.min_freq;
- const sy = 0;
- const sh = h;
- canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
- canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
- canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
+ if (unmuted.indexOf(k) < 0) {
+ asc.gain.gain.value = 0;
+ } else {
+ asc.gain.gain.value = 1;
}
- }
- const propsToSet = [];
- getLayers().forEach((layer) => {
- if (mapping.hasOwnProperty(layer.id())) {
- Object.keys(mapping[layer.id()]).forEach((propTitle) => {
- const m = mapping[layer.id()][propTitle];
- switch (m.sync) {
- case 'volume': {
- let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
- m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
- propsToSet.push({
- layer,
- id: layer.id(),
- title: propTitle,
- prop: layer.theatreObject.props[propTitle],
- value: m.value,
- });
- break;
+ });
+ usedSourceCombos.forEach((source) => {
+ const afs = audioSourceCombos[source];
+ const r = analysedResults[source];
+ afs.analyser.getByteFrequencyData(afs.dataArray);
+ for (let f = 0; f < w; f++) {
+ const v = afs.dataArray[f];
+ r.total_v += v;
+ if (r.max_v < v) {
+ r.max_v = v;
+ r.max_i = v;
+ }
+ r.max_ri += v * f;
+ let fillStyle = 'rgb(200,200,200)';
+ for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
+ // NOTE: this is not the most efficient way to do it
+ const k = canvasKeys[k_i];
+ const layerID = canvasCombos[k][2];
+ if (mapping[layerID][k].source === source) {
+ canvasCombos[k][1].fillStyle = fillStyle;
+ canvasCombos[k][1].fillRect(
+ f,
+ h - (v * verticalFactor),
+ 1,
+ (v * verticalFactor)
+ );
+ }
+ }
+ analysedResults[source].mappings.forEach((m) => {
+ if (m.min_freq <= f && m.max_freq >= f) {
+ m.total_v += v;
+ if (m.max_v < v) {
+ m.max_v = v;
+ m.max_i = f;
}
- case 'pitch': {
- const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : max_i;
- const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : max_ri;
- const fi = config.audio.pitchCombineFrequencies ? ri : mi;
- let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
- m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
- propsToSet.push({
- layer,
- id: layer.id(),
- title: propTitle,
- prop: layer.theatreObject.props[propTitle],
- value: m.value,
- });
- break;
- }
- default:
- break;
+ m.max_ri += v * f;
}
});
}
+ r.max_ri /= r.total_v;
+ analysedResults[source].mappings.forEach((m) => {
+ m.max_ri /= m.total_v;
+ });
+ });
+ for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
+ const k = canvasKeys[k_i];
+ const layerID = canvasCombos[k][2];
+ const m = mapping[layerID][k];
+ if (m.sync === 'volume') {
+ const sx = m.min_freq;
+ const sw = m.max_freq - m.min_freq;
+ const sy = h - (m.max_in * verticalFactor);
+ const sh = (m.max_in - m.min_in) * verticalFactor;
+ canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
+ canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
+ canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
+ } else if (m.sync === 'pitch' || m.sync === 'clarity') {
+ const sx = m.min_freq;
+ const sw = m.max_freq - m.min_freq;
+ const sy = 0;
+ const sh = h;
+ canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
+ canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
+ canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
+ }
+ }
+
+ const propsToSet = [];
+ Object.keys(mapping).forEach((layerID) => {
+ Object.keys(mapping[layerID]).forEach((propTitle) => {
+ const m = mapping[layerID][propTitle];
+ switch (m.sync) {
+ case 'volume': {
+ let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
+ m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
+ propsToSet.push({
+ id: layerID,
+ title: propTitle,
+ value: m.value,
+ });
+ break;
+ }
+ case 'pitch': {
+ const r = analysedResults[m.source];
+ const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : r.max_i;
+ const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : r.max_ri;
+ const fi = config.audio.pitchCombineFrequencies ? ri : mi;
+ let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
+ if (!isNaN(a)) {
+ m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
+ propsToSet.push({
+ id: layerID,
+ title: propTitle,
+ value: m.value,
+ });
+ }
+ break;
+ }
+ case 'clarity': {
+ const clarity = m.max_v / m.total_v;
+ const a = mapValue(clarity, 0.01, 0.05, m.min_out, m.max_out, true);
+ if (!isNaN(a)) {
+ m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
+ propsToSet.push({
+ id: layerID,
+ title: propTitle,
+ value: m.value,
+ });
+ }
+ }
+ default:
+ break;
+ }
+ if (m.letterDelay) {
+ const pt = `letterDelays.${propTitle}`;
+ propsToSet.push({
+ id: layerID,
+ title: pt,
+ value: m.letterDelay,
+ });
+ }
+ });
});
if (propsToSet.length > 0 && frameCount % 2 === 0) {
// this is when to monitor live
if (!record.isRecording()) {
- if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
+ //if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
let values = {};
propsToSet.forEach((p) => {
const newValues = {
@@ -874,17 +1042,16 @@ const Audio = function(tp, record) {
};
});
Object.keys(values).forEach((layerID) => {
- window.debugPreValues = clone(values[layerID]);
deFlattenObject(values[layerID]);
- window.debugValues = clone(values[layerID]);
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
});
- }
+ //}
} else {
const position = tp.sheet.sequence.position;
propsToSet.forEach((p) => {
const title = tp
.getPanelPropTitle(p.title);
+ const layer = getLayer(p.id);
if (title !== null) {
const inputElement = title
@@ -897,8 +1064,12 @@ const Audio = function(tp, record) {
}
}
record.addValue(p.id, p.title, p.value, position);
- if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
- record.liveUpdate(p.layer, position);
+ if (p.title.indexOf('color') === 0) {
+ if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
+ record.liveUpdate(layer, position);
+ }
+ } else {
+ record.liveUpdate(layer, position);
}
});
}
@@ -914,107 +1085,6 @@ const Audio = function(tp, record) {
frameCount++;
};
drawAlt();
- } else if (visualSetting == "off") {
- canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
- canvasCtx.fillStyle = "red";
- canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
- }
- }
-
- const voiceChange = () => {
- distortion.oversample = "4x";
- biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0);
-
- const voiceSetting = voiceSelect.value;
-
- if (echoDelay.isApplied()) {
- echoDelay.discard();
- }
-
- // When convolver is selected it is connected back into the audio path
- if (voiceSetting == "convolver") {
- biquadFilter.disconnect(0);
- biquadFilter.connect(convolver);
- } else {
- biquadFilter.disconnect(0);
- biquadFilter.connect(gainNode);
-
- if (voiceSetting == "distortion") {
- distortion.curve = makeDistortionCurve(400);
- } else if (voiceSetting == "biquad") {
- biquadFilter.type = "lowshelf";
- biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0);
- biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0);
- } else if (voiceSetting == "delay") {
- echoDelay.apply();
- } else if (voiceSetting == "off") {
- console.log("Voice settings turned off");
- }
- }
- }
-
- function createEchoDelayEffect(audioContext) {
- const delay = audioContext.createDelay(1);
- const dryNode = audioContext.createGain();
- const wetNode = audioContext.createGain();
- const mixer = audioContext.createGain();
- const filter = audioContext.createBiquadFilter();
-
- delay.delayTime.value = 0.75;
- dryNode.gain.value = 1;
- wetNode.gain.value = 0;
- filter.frequency.value = 1100;
- filter.type = "highpass";
-
- return {
- apply: function() {
- wetNode.gain.setValueAtTime(0.75, audioContext.currentTime);
- },
- discard: function() {
- wetNode.gain.setValueAtTime(0, audioContext.currentTime);
- },
- isApplied: function() {
- return wetNode.gain.value > 0;
- },
- placeBetween: function(inputNode, outputNode) {
- inputNode.connect(delay);
- delay.connect(wetNode);
- wetNode.connect(filter);
- filter.connect(delay);
-
- inputNode.connect(dryNode);
- dryNode.connect(mixer);
- wetNode.connect(mixer);
- mixer.connect(outputNode);
- },
- };
- }
-
- // Event listeners to change visualize and voice settings
- visualSelect.onchange = function() {
- window.cancelAnimationFrame(drawVisual);
- visualize();
- };
-
- voiceSelect.onchange = function() {
- voiceChange();
- };
-
- mute.onclick = voiceMute;
-
- let previousGain;
-
- function voiceMute() {
- if (mute.id === "") {
- previousGain = gainNode.gain.value;
- gainNode.gain.value = 0;
- mute.id = "activated";
- mute.innerHTML = "Unmute";
- } else {
- gainNode.gain.value = previousGain;
- mute.id = "";
- mute.innerHTML = "Mute";
- }
}
}
}
@@ -1039,9 +1109,11 @@ const Audio = function(tp, record) {
this.addAudioOptions = addAudioOptions;
this.removeAudioOptions = removeAudioOptions;
this.AudioMappingOptions = AudioMappingOptions;
+ this.readAudioFiles = readAudioFiles;
// debug
this.canvasCombos = canvasCombos;
+ this.audioSourceCombos = audioSourceCombos;
};
export {
diff --git a/bin/web/js/config.js b/bin/web/js/config.js
index 3d2ecec..9411388 100644
--- a/bin/web/js/config.js
+++ b/bin/web/js/config.js
@@ -83,14 +83,33 @@ const config = {
zoomDynamicMax: 42,
},
audio: {
- ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],
+ defaultRange: { // check audio.getDefaultRange for dynamic defaults
+ 'textAlignment': [0, 1],
+ 'fontSize_px': [42, 100],
+ 'letterSpacing': [0, 1],
+ 'lineHeight': [0, 1],
+ 'rotation': [0, 180],
+ 'mirror_x_distance': [0, 200],
+ 'mirror_y_distance': [0, 70],
+ 'color': [0, 1],
+ 'letterDelays': [0, 1000],
+ },
+ ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy', 'height'],
+ maxFilenameLength: 24,
defaultSmoothing: 0.7,
+ analyser: {
+ fftSize: 256 * 8,
+ minDecibels: -90,
+ maxDecibels: -10,
+ smoothingTimeConstant: 0.85,
+ },
fftBandsAnalysed: 256 * 8,
fftBandsUsed: 256 / 2,
fftHeight: 256 / 4,
colorSeparateRGBA: true,
ignoreOutboundFrequencies: true,
pitchCombineFrequencies: false,
+ rolloverResetLoop: true,
},
record: {
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],
@@ -103,6 +122,7 @@ const config = {
fs: {
idbfsDir: '/idbfs',
idbfsFontDir: '/idbfs/fonts',
+ idbfsAudioDir: '/idbfs/audio',
idbfsTmpDir: '/idbfs/tmp',
},
timeline: {
diff --git a/bin/web/js/main.js b/bin/web/js/main.js
index cbbf5ed..b243ad4 100644
--- a/bin/web/js/main.js
+++ b/bin/web/js/main.js
@@ -151,6 +151,16 @@ const findInjectPanel = () => {
bottomButtonsContainer.classList.add("bottomButtonsContainer");
panel.append(bottomButtonsContainer);
}
+ const hideuiButton = document.querySelector('#hide_ui');
+ if (hideuiButton !== null) {
+ bottomButtonsContainer.append(hideuiButton);
+ hideuiButton.classList.add("main_panel_button");
+ }
+ const audiofileButton = document.querySelector('#upload_audio');
+ if (audiofileButton !== null) {
+ bottomButtonsContainer.append(audiofileButton);
+ audiofileButton.classList.add("main_panel_button");
+ }
const exportButton = document.querySelector('#exporter_open');
if (exportButton !== null) {
bottomButtonsContainer.append(exportButton);
@@ -406,12 +416,20 @@ const listAvailableFontsAndAxes = () => {
window.listAvailableFontsAndAxes = listAvailableFontsAndAxes;
window.getFontsAndAxes = getFontsAndAxes;
+window.getArtboard = () => {
+ return artboard;
+};
+
window.getLayers = () => {
return layers;
};
window.getLayer = (layerID) => {
- return layers.find((layer) => layer.id() === layerID);
+ if (layerID === 'artboard') {
+ return artboard;
+ } else {
+ return layers.find((layer) => layer.id() === layerID);
+ }
};
window.moveLayerUp = (layerID) => {
@@ -422,10 +440,6 @@ window.moveLayerDown = (layerID) => {
layerOrder.moveDown(layerID);
};
-window.getArtboard = () => {
- return artboard;
-};
-
const addLayer = (autoInit = true) => {
const layerID = Module.addNewLayer();
const layer = new Layer(tp, layerID, fontsAndAxes, autoInit);
@@ -531,6 +545,48 @@ window.renderFrames = exporter.renderFrames;
const layer_panel = document.querySelector('#layer_panel');
-const initPanels = () => {
- //makeDraggable(layer_panel);
+const ui = (show) => {
+ if (show && tp.studio.ui.isHidden) {
+ tp.studio.ui.restore();
+ } else if (!show && !tp.studio.ui.isHidden) {
+ tp.studio.ui.hide();
+ }
+};
+
+const handleUiKeypress = (e) => {
+ if (e.key.toLowerCase() === 'q') {
+ document.removeEventListener('keypress', handleUiKeypress);
+ ui(true);
+ }
+};
+
+const initPanels = () => {
+ let hideuiButton = document.querySelector('#hide_ui');
+ if (hideuiButton === null) {
+ hideuiButton = tp.getPanel().querySelector('#hide_ui');
+ }
+ if (hideuiButton !== null) {
+ hideuiButton.addEventListener('click', () => {
+ ui(false);
+ document.addEventListener('keypress', handleUiKeypress);
+ });
+ }
+ let audiofileButton = document.querySelector('#upload_audio');
+ if (audiofileButton === null) {
+ audiofileButton = tp.getPanel().querySelector('#upload_audio');
+ }
+ if (audiofileButton !== null) {
+ audiofileButton.addEventListener('click', () => {
+ uploadFile('audio')
+ .then((file) => {
+ moduleFS
+ .save(file)
+ .then(() => {
+ console.log('ermh... done uploading?', file);
+ audio.readAudioFiles();
+ });
+ });
+ });
+ }
+
};
diff --git a/bin/web/js/moduleFS.js b/bin/web/js/moduleFS.js
index 3611916..a3d872d 100644
--- a/bin/web/js/moduleFS.js
+++ b/bin/web/js/moduleFS.js
@@ -13,6 +13,9 @@ const ModuleFS = function() {
if (!FS.analyzePath(config.fs.idbfsFontDir).exists) {
FS.mkdir(config.fs.idbfsFontDir);
}
+ if (!FS.analyzePath(config.fs.idbfsAudioDir).exists) {
+ FS.mkdir(config.fs.idbfsAudioDir);
+ }
if (!FS.analyzePath(config.fs.idbfsTmpDir).exists) {
FS.mkdir(config.fs.idbfsTmpDir);
}
@@ -59,6 +62,19 @@ const ModuleFS = function() {
.then(() => {
resolve(filePath);
});
+ } else if (file.type.indexOf('audio') === 0) {
+ var uint8View = new Uint8Array(file.arrayBuffer);
+ console.log('trying to save the audio file, file, uint8View', file, uint8View);
+ if (!FS.analyzePath(`${config.fs.idbfsAudioDir}/${file.name}`).exists) {
+ FS.createDataFile(config.fs.idbfsAudioDir, file.name, uint8View, true, true);
+ this.syncfs(MODE_WRITE_TO_PERSISTENT)
+ .then(() => {
+ resolve(true);
+ });
+ } else {
+ alert(`It seems as if an audiofile with the name "${file.name}" already exists. Please rename your file and upload again, thanks <3`);
+ resolve(false);
+ }
} else {
resolve(false);
}
diff --git a/bin/web/js/record.js b/bin/web/js/record.js
index 5ba73bc..ae77ffe 100644
--- a/bin/web/js/record.js
+++ b/bin/web/js/record.js
@@ -117,6 +117,7 @@ const LiveUpdater = function(tp, buffy) {
};
this.immediateUpdate = (layer, values) => {
const cv = clone(values);
+ const ctv = clone(layer.theatreObject.value);
if (cv.hasOwnProperty('color.r')) {
cv['color'] = {
r: cv['color.r'],
@@ -129,7 +130,10 @@ const LiveUpdater = function(tp, buffy) {
delete cv['color.b'];
delete cv['color.a'];
}
- const v = {...layer.theatreObject.value, ...cv};
+ flattenObject(cv, ['color']);
+ flattenObject(ctv, ['color']);
+ const v = {...ctv, ...cv};
+ deFlattenObject(v, ['color']);
const p = layer.values2cppProps(v);
if (p !== false) {
const id = layer.id();
@@ -144,10 +148,27 @@ const LiveUpdater = function(tp, buffy) {
const Record = function(tp) {
+ const NOT_RECORDING = 0;
+ const STARTING_RECORDING = 1;
+ const RECORDING = 2;
+ const STOPPING_RECORDING = 3;
+
const hot = {};
- let isRecording = false;
+ let isRecording = NOT_RECORDING;
const buffy = new LiveBuffer();
const liveUpdater = new LiveUpdater(tp, buffy);
+ let isInitialized = false;
+
+ const init = () => {
+ if (!isInitialized) {
+ tp.core.onChange(tp.sheet.sequence.pointer.playing, (playing) => {
+ if (isRecording === RECORDING && !playing) {
+ stopRecording();
+ }
+ });
+ isInitialized = true;
+ }
+ };
const isHot = (layerID, propTitle) => {
return hot.hasOwnProperty(layerID)
@@ -231,19 +252,19 @@ const Record = function(tp) {
button.innerHTML = `

`;
container.append(button);
button.addEventListener('click', () => {
- if(isRecording) {
+ if(isRecording === RECORDING) {
stopRecording();
} else {
if (config.record.recordMapped) {
// make all mapped props hot and
Object.keys(audio.mapping)
.forEach((layerID) => {
- if (getLayer(layerID).isSelected()) {
+ //if (getLayer(layerID).isSelected()) { // NOTE: multilayer recording
Object.keys(audio.mapping[layerID])
.forEach((propTitle) => {
addHot(layerID, propTitle);
});
- }
+ //}
});
} else {
// only make this propTitle hot and
@@ -339,6 +360,10 @@ const Record = function(tp) {
value,
position = tp.sheet.sequence.position,
lastPosition = buffy.NO_TIME) => {
+ // NOTE: multilayer recording
+ if (!hot.hasOwnProperty(layerID) || !hot[layerID].hasOwnProperty(propTitle)) {
+ return;
+ }
hot[layerID][propTitle].recording.push({
position,
value,
@@ -369,7 +394,17 @@ const Record = function(tp) {
};
const startRecording = () => {
+ isRecording = STARTING_RECORDING;
console.log('Record::startRecording');
+ document.querySelector('#notice_recording')
+ .classList.add('visible');
+ document.querySelector('#notice_recording')
+ .classList.remove('imprenetrable');
+ document.querySelector('#notice_recording .what p').innerHTML = 'recording';
+ document.querySelector('#notice_recording .details p').innerHTML = '';
+ if (!isInitialized) {
+ init();
+ }
lastPositions = {};
tp.sheet.sequence.pause();
const layerKeys = Object.keys(hot);
@@ -398,9 +433,16 @@ const Record = function(tp) {
tp.sheet.sequence.position = 0;
tp.sheet.sequence.play();
});
- isRecording = true;
+ isRecording = RECORDING;
};
const stopRecording = () => {
+ document.querySelector('#notice_recording')
+ .classList.add('visible');
+ document.querySelector('#notice_recording')
+ .classList.add('imprenetrable');
+ document.querySelector('#notice_recording .what p').innerHTML = 'digesting recording';
+ document.querySelector('#notice_recording .details p').innerHTML = 'please wait';
+ isRecording = STOPPING_RECORDING;
return new Promise((resolve) => {
const layerKeys = Object.keys(hot);
const promises = [];
@@ -474,8 +516,10 @@ const Record = function(tp) {
});
buffy.deregister(layerID);
});
+ document.querySelector('#notice_recording')
+ .classList.remove('visible');
console.log('Record::stopRecording', 'stopped recording');
- isRecording = false;
+ isRecording = NOT_RECORDING;
resolve();
});
});
@@ -493,7 +537,7 @@ const Record = function(tp) {
return hot;
};
this.isRecording = () => {
- return isRecording;
+ return isRecording != NOT_RECORDING;
};
this.injectPanel = injectPanel;
this.startRecording = startRecording;
diff --git a/bin/web/js/theatre-play.js b/bin/web/js/theatre-play.js
index 0df8955..97e5b9a 100644
--- a/bin/web/js/theatre-play.js
+++ b/bin/web/js/theatre-play.js
@@ -97,7 +97,7 @@ const TheatrePlay = function(autoInit = false) {
if (typeof value === 'undefined') {
return false;
}
- };
+ }
return this.sheet.sequence.__experimental_getKeyframes(prop);
};
// wtf, this function was being written in one go
diff --git a/bin/web/js/utils.js b/bin/web/js/utils.js
index 958ce23..166753c 100644
--- a/bin/web/js/utils.js
+++ b/bin/web/js/utils.js
@@ -119,7 +119,7 @@ function uploadFile(expectedType = 'application/json') {
let reader = new FileReader();
- if (expectedType === 'application/zip' || file.type === 'application/zip') {
+ if (expectedType === 'application/zip' || file.type === 'application/zip' || file.type.indexOf('audio') === 0) {
reader.onload = (e) => {
const f = e.target.result;
console.log(e, file.name, file.size, file.type, f);