variabletime/bin/web/js/audio.js

1169 lines
53 KiB
JavaScript
Raw Normal View History

2023-09-27 13:10:28 +02:00
import {
mapValue,
mix,
2023-10-07 18:01:00 +02:00
toCssClass,
flattenObject,
deFlattenObject,
clone,
2023-09-27 13:10:28 +02:00
} from './utils.js';
window.mapValue = mapValue;
const AudioMappingOptions = function() {
2023-10-08 21:26:22 +02:00
this.min_freq = 0.0;
this.max_freq = config.audio.fftBandsUsed;
this.min_in = 0.0;
this.max_in = 255.0;
this.min_out = 0.0;
this.max_out = 1.0;
this.smoothing = config.audio.defaultSmoothing;
this.sync = 'volume';
this.source = 'microphone';
this.value = 0.0;
2023-10-13 11:29:18 +02:00
this.muted = true;
};
2023-09-27 13:10:28 +02:00
const Audio = function(tp, record) {
2023-09-27 13:10:28 +02:00
const audioDom = document.querySelector('.audioWrapper');
let audioCtx = false;
2023-09-27 13:10:28 +02:00
const heading = audioDom.querySelector("h1");
heading.textContent = "CLICK HERE TO START";
// an array of possible sync options.
2023-10-13 16:24:34 +02:00
const audio_sync_options = ['volume', 'pitch', 'clarity'];
// could also be an enum
// like that
//const AudioSyncOptions = Object.freeze({
//RED: Symbol("volume"),
//BLUE: Symbol("pitch"),
//GREEN: Symbol("frequency"),
//toString: (e) => {
//return e.toString.match(/\(([\S\s]*)\)/)[1]
//},
//});
2023-09-27 13:10:28 +02:00
//document.body.addEventListener("click", init);
let started = false;
let mapping = {};
let savedMapping = {};
2023-10-08 21:26:22 +02:00
//const canvass = [];
let canvasCombos = {};
const mutationObserver = new MutationObserver(function(e) {
if (e[0].removedNodes) {
e[0].removedNodes.forEach((n) => {
if (typeof n === 'object' &&
n.hasOwnProperty('hasAttribute') &&
n.hasOwnProperty('querySelectorAll')) {
if (n.hasAttribute('data-propTitle')) {
const propTitle = n.getAttribute('data-propTitle');
delete canvasCombos[propTitle];
} else {
const subProps = n.querySelectorAll('[data-propTitle]');
if (subProps.length > 0) {
subProps.forEach((sp) => {
const propTitle = sp.getAttribute('data-propTitle');
delete canvasCombos[propTitle];
});
}
2023-10-08 21:26:22 +02:00
}
}
});
}
});
let areMutationsObserved = false;
2023-09-27 13:10:28 +02:00
const isMapped = (layer, propTitle) => {
if (!mapping.hasOwnProperty(layer.id())) {
return false;
}
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
2023-10-08 17:28:15 +02:00
if (propTitle === 'color' &&
config.audio.colorSeparateRGBA &&
mapping[layer.id()].hasOwnProperty('color.r') &&
mapping[layer.id()].hasOwnProperty('color.g') &&
mapping[layer.id()].hasOwnProperty('color.b') &&
mapping[layer.id()].hasOwnProperty('color.a')) {
return true;
}
return false;
}
return true;
};
const getDefaultRange = (layer, propTitle) => {
if (config.audio.defaultRange.hasOwnProperty(propTitle)) {
return config.audio.defaultRange[propTitle];
} else if (propTitle.indexOf('width') === 0) {
return [
getArtboard().theatreObject.value.width / 2,
getArtboard().theatreObject.value.width
];
} else if (propTitle.indexOf('y') === 0) {
return [
0,
getArtboard().theatreObject.value.height / 2
];
} else if (propTitle.indexOf('x') === 0) {
return [
0,
getArtboard().theatreObject.value.width / 2
];
} else if (propTitle.indexOf('y') === 0) {
return [
0,
getArtboard().theatreObject.value.height / 2
];
} else if (propTitle.indexOf('letterDelay') === 0) {
return [
2023-10-13 17:11:52 +02:00
config.audio.defaultRange.letterDelays[0],
config.audio.defaultRange.letterDelays[1]
];
} else if (propTitle.split('.')[0] === 'fontVariationAxes') {
return layer.props.fontVariationAxes
.props[propTitle.split('.')[1]].range;
}
};
2023-10-08 15:57:29 +02:00
const getAudioMappingOptions = (layer, propTitle) => {
if (savedMapping.hasOwnProperty(layer.id()) && savedMapping[layer.id()].hasOwnProperty(propTitle)) {
return savedMapping[layer.id()][propTitle];
} else if (propTitle === 'color') {
const mm = getDefaultRange(layer, 'color');
2023-10-08 15:57:29 +02:00
if (config.audio.colorSeparateRGBA) {
const r = new AudioMappingOptions();
r.min_out = mm[0];
r.max_out = mm[1];
2023-10-08 15:57:29 +02:00
const g = new AudioMappingOptions();
g.min_out = mm[0];
g.max_out = mm[1];
2023-10-08 15:57:29 +02:00
const b = new AudioMappingOptions();
b.min_out = mm[0];
b.max_out = mm[1];
2023-10-08 15:57:29 +02:00
const a = new AudioMappingOptions();
2023-10-13 16:24:34 +02:00
a.min_out = 1.0; // NOTE: dirty, dirty
a.max_out = 1.0; // hardcoded value, you
2023-10-08 15:57:29 +02:00
return [{r}, {g}, {b}, {a}];
} else {
const o = new AudioMappingOptions();
o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]};
o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]};
return o;
2023-10-08 15:57:29 +02:00
}
} else {
const o = new AudioMappingOptions();
const mm = getDefaultRange(layer, propTitle);
o.min_out = mm[0];
o.max_out = mm[1];
2023-10-08 15:57:29 +02:00
return o;
}
};
// potentially recursive
const addAudioMapping = (layer, propTitle, options = false) => {
if (!options) {
options = getAudioMappingOptions(layer, propTitle);
if (Array.isArray(options)) {
let isGood = true;
options.forEach((o) => {
const subPropKey = Object.keys(o)[0];
const subPropTitle = `${propTitle}.${subPropKey}`;
isGood = addAudioMapping(layer, subPropTitle, o[subPropKey]) ? isGood : false;
});
return isGood;
}
}
if (!mapping.hasOwnProperty(layer.id())) {
mapping[layer.id()] = {};
}
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
mapping[layer.id()][propTitle] = options;
return true;
} else {
// already there
return false;
}
};
const removeAudioMapping = (layer = false, propTitle = false) => {
if (!layer && !propTitle) {
Object.keys(mapping).forEach((layerID) => {
Object.keys(mapping[layerID]).forEach((propTitle) => {
delete mapping[layerID][propTitle];
});
delete mapping[layerID];
});
return true;
}
if (!mapping.hasOwnProperty(layer.id())) {
// no layer
return false;
}
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
// no propTitle
2023-10-08 15:57:29 +02:00
// perhaps color?
2023-10-08 17:28:15 +02:00
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
2023-10-08 15:57:29 +02:00
let isGood = true;
isGood = removeAudioMapping(layer, 'color.r');
isGood = removeAudioMapping(layer, 'color.g');
isGood = removeAudioMapping(layer, 'color.b');
isGood = removeAudioMapping(layer, 'color.a');
return isGood;
}
return false;
}
delete mapping[layer.id()][propTitle];
if (Object.keys(mapping[layer.id()]).length === 0) {
delete mapping[layer.id()];
}
return true;
}
2023-10-08 07:19:27 +02:00
const createAudioOptions = (layer, propTitle, container) => {
2023-09-27 13:10:28 +02:00
const mappingOptions = mapping[layer.id()][propTitle];
2023-10-13 16:24:34 +02:00
let hasLetterDelay = //false;
config
.layer.letterDelayProps
2023-10-13 17:11:52 +02:00
.indexOf(propTitle.split('.')[0]) >= 0 && propTitle.indexOf('color') < 0
&& !tp.isSequenced(propTitle);
2023-09-27 13:10:28 +02:00
const panel = tp.getPanel();
2023-10-08 21:26:22 +02:00
if (!areMutationsObserved) {
mutationObserver.observe(panel, { childList: true, subtree: true });
areMutationsObserved = true;
}
2023-09-27 13:10:28 +02:00
const audioOptions = document.createElement('div');
2023-10-08 21:26:22 +02:00
audioOptions.setAttribute('data-propTitle',propTitle);
2023-09-27 13:10:28 +02:00
audioOptions.classList.add('audioOptions');
audioOptions.classList.add('audioOptionsTypeDefault');
2023-10-07 18:01:00 +02:00
audioOptions.classList.add(toCssClass(`audioOptions${propTitle}`));
2023-09-27 13:10:28 +02:00
audioOptions.style.position = 'relative';
audioOptions.style.width = '100%';
2023-10-08 15:57:29 +02:00
if (propTitle.split('.')[0] === 'color' && propTitle.split('.').length > 1) {
2023-10-08 17:28:15 +02:00
audioOptions.classList.add(toCssClass('audioOptionscolor'));
2023-10-08 15:57:29 +02:00
switch(propTitle.split('.')[1]) {
case 'r': {
2023-10-08 21:26:22 +02:00
audioOptions.style.background = 'rgba(255,0,0,0.2)'; // AUDIO COLOR
2023-10-08 15:57:29 +02:00
break;
}
case 'g': {
2023-10-08 21:26:22 +02:00
audioOptions.style.background = 'rgba(0,255,0,0.2)'; // AUDIO COLOR
2023-10-08 15:57:29 +02:00
break;
}
case 'b': {
2023-10-08 21:26:22 +02:00
audioOptions.style.background = 'rgba(0,0,255,0.2)'; // AUDIO COLOR
2023-10-08 15:57:29 +02:00
break;
}
case 'a': {
2023-10-08 21:26:22 +02:00
audioOptions.style.background = 'rgba(255,255,255,0.2)'; // AUDIO COLOR
2023-10-08 15:57:29 +02:00
break;
}
}
} else {
2023-10-10 12:05:40 +02:00
audioOptions.style.background = 'rgba(163, 163, 163, 0.2)'; // AUDIO COLOR
2023-10-08 15:57:29 +02:00
}
audioOptions.style.order = parseInt(container.style.order) + 1;
2023-09-27 13:10:28 +02:00
const updateMappingOptions = () => {
2023-10-07 18:01:00 +02:00
mappingOptions.min_out = parseFloat(panel.querySelector(toCssClass(`audio_min${propTitle}`,'#')).value);
mappingOptions.max_out = parseFloat(panel.querySelector(toCssClass(`audio_max${propTitle}`,'#')).value);
2023-09-27 13:10:28 +02:00
mappingOptions.sync =
2023-10-07 18:01:00 +02:00
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
2023-09-27 13:10:28 +02:00
mappingOptions.smoothing = parseFloat(s);
if (hasLetterDelay) {
const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#'));
mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
}
2023-10-13 16:24:34 +02:00
mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`,'#')).value;
mappingOptions.muted = panel.querySelector(toCssClass(`audio_mute${propTitle}`,'#')).checked;
if (!savedMapping.hasOwnProperty(layer.id())) {
savedMapping[layer.id()] = {};
}
savedMapping[layer.id()][propTitle] = mappingOptions;
2023-09-27 13:10:28 +02:00
};
2023-10-13 17:01:11 +02:00
const source_Dom_Cont = document.createElement('div');
source_Dom_Cont.classList.add('source_Dom_Cont');
const source_Dom = document.createElement('select');
source_Dom.id = toCssClass(`audio_source${propTitle}`);
const source_mic = document.createElement('option');
source_mic.value = 'microphone';
source_mic.innerHTML = 'microphone';
source_Dom.append(source_mic);
FS.readdir(config.fs.idbfsAudioDir)
.forEach((file) => {
if (file[0] !== '.') {
const source_file = document.createElement('option');
source_file.value = file;
2023-10-13 16:24:34 +02:00
if (file.length > config.audio.maxFilenameLength) {
source_file.innerHTML = file.substr(0,6) + '..' + file.substr(file.length - 6, 6);
} else {
source_file.innerHTML = file;
}
source_Dom.append(source_file);
}
});
2023-10-13 17:01:11 +02:00
source_Dom_Cont.append(source_Dom);
audioOptions.append(source_Dom_Cont);
2023-10-13 16:24:34 +02:00
const muteDom = document.createElement('input');
const muteDom_label = document.createElement('label');
muteDom.id = toCssClass(`audio_mute${propTitle}`);
muteDom.name = toCssClass(`audio_mute${propTitle}`);
muteDom.type = 'checkbox';
muteDom.checked = true;
muteDom_label.for = toCssClass(`audio_mute${propTitle}`);
muteDom_label.innerHTML = 'muted';
2023-10-13 17:01:11 +02:00
source_Dom_Cont.append(muteDom);
source_Dom_Cont.append(muteDom_label);
2023-10-13 16:24:34 +02:00
2023-09-27 13:10:28 +02:00
const min_max_Dom = document.createElement('div');
min_max_Dom.classList.add('audio_min_max');
2023-10-10 12:05:40 +02:00
const min_Cont = document.createElement('div');
min_Cont.classList.add('audio_min_Cont');
2023-09-27 13:10:28 +02:00
const min_inputDom_label = document.createElement('label');
min_inputDom_label.for = 'audio_min';
2023-10-10 12:05:40 +02:00
min_inputDom_label.innerHTML = 'min ';
2023-09-27 13:10:28 +02:00
const min_inputDom = document.createElement('input');
min_inputDom.type = 'number';
2023-10-07 18:01:00 +02:00
min_inputDom.name = toCssClass(`audio_min${propTitle}`);
min_inputDom.id = toCssClass(`audio_min${propTitle}`);
min_inputDom.value = `${mappingOptions.min_out}`;
2023-10-10 12:05:40 +02:00
const max_Cont = document.createElement('div');
max_Cont.classList.add('audio_max_Cont');
2023-09-27 13:10:28 +02:00
const max_inputDom_label = document.createElement('label');
max_inputDom_label.for = 'audio_max';
2023-10-10 12:05:40 +02:00
max_inputDom_label.innerHTML = 'max ';
2023-09-27 13:10:28 +02:00
const max_inputDom = document.createElement('input');
max_inputDom.type = 'number';
2023-10-07 18:01:00 +02:00
max_inputDom.name = toCssClass(`audio_max${propTitle}`);
max_inputDom.id = toCssClass(`audio_max${propTitle}`);
max_inputDom.value = `${mappingOptions.max_out}`;
2023-09-27 13:10:28 +02:00
const smoothing_inputDom_label = document.createElement('label');
smoothing_inputDom_label.for = 'audio_smoothing';
smoothing_inputDom_label.innerHTML = 'audio smoothing';
2023-09-27 13:10:28 +02:00
const smoothing_inputDom = document.createElement('input');
smoothing_inputDom.type = 'number';
2023-10-07 18:01:00 +02:00
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
smoothing_inputDom.id = toCssClass(`audio_smoothing${propTitle}`);
smoothing_inputDom.value = mappingOptions.smoothing;
2023-09-27 13:10:28 +02:00
smoothing_inputDom.min = 0;
smoothing_inputDom.max = 1;
smoothing_inputDom.step = 0.01;
min_max_Dom.append(smoothing_inputDom_label);
min_max_Dom.append(smoothing_inputDom);
2023-10-10 12:05:40 +02:00
min_max_Dom.append(min_Cont);
min_Cont.append(min_inputDom_label);
min_Cont.append(min_inputDom);
min_max_Dom.append(max_Cont);
max_Cont.append(max_inputDom_label);
max_Cont.append(max_inputDom);
if (hasLetterDelay) {
const letterDelayCont = document.createElement('div');
2023-10-13 17:01:11 +02:00
letterDelayCont.classList.add("letterDelayCont");
const letterDelay_inputDom_label = document.createElement('label');
letterDelay_inputDom_label.for = 'audio_letterDelay';
letterDelay_inputDom_label.innerHTML = 'letterDelay';
const letterDelay_inputDom = document.createElement('input');
letterDelay_inputDom.type = 'number';
letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`);
letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`);
letterDelay_inputDom.value = 0;
letterDelay_inputDom.min = 0;
letterDelay_inputDom.step = 1;
letterDelayCont.append(letterDelay_inputDom_label);
letterDelayCont.append(letterDelay_inputDom);
min_max_Dom.append(letterDelayCont);
letterDelay_inputDom.addEventListener('change', updateMappingOptions);
}
2023-09-27 13:10:28 +02:00
audioOptions.append(min_max_Dom);
const sync_Dom = document.createElement('div');
2023-10-10 12:05:40 +02:00
sync_Dom.classList.add('sync_Dom');
2023-09-27 13:10:28 +02:00
const sync_titleDom = document.createElement('p');
2023-10-10 12:05:40 +02:00
const sync_titleDom_Cont = document.createElement('div');
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
2023-09-27 13:10:28 +02:00
sync_titleDom.innerHTML = 'sync with:';
sync_Dom.append(sync_titleDom);
2023-10-08 15:57:29 +02:00
audio_sync_options.forEach((o) => {
2023-10-10 12:05:40 +02:00
const sync_inputDom_Cont = document.createElement('div');
sync_inputDom_Cont.classList.add('sync_inputDom_Cont');
2023-09-27 13:10:28 +02:00
const sync_inputDom_label = document.createElement('label');
sync_inputDom_label.for = `audio_sync${o}`;
sync_inputDom_label.innerHTML = o;
const sync_inputDom = document.createElement('input');
sync_inputDom.type = 'radio';
2023-10-07 18:01:00 +02:00
sync_inputDom.name = toCssClass(`audio_sync${propTitle}`);
sync_inputDom.id = toCssClass(`audio_sync${propTitle}${o}`);
2023-09-27 13:10:28 +02:00
sync_inputDom.value = o;
// default select first option
if (o === mappingOptions.sync) {
2023-09-27 13:10:28 +02:00
sync_inputDom.checked = '1';
}
2023-10-10 12:05:40 +02:00
sync_inputDom_Cont.append(sync_inputDom_label);
sync_inputDom_Cont.append(sync_inputDom);
sync_titleDom_Cont.append(sync_inputDom_Cont);
sync_Dom.append(sync_titleDom_Cont);
// sync_Dom.append(sync_inputDom);
2023-09-27 13:10:28 +02:00
sync_inputDom.addEventListener('change', updateMappingOptions);
});
2023-10-10 12:05:40 +02:00
2023-09-27 13:10:28 +02:00
audioOptions.append(sync_Dom);
const fft_Dom = document.createElement('div');
const fft_imgDom = document.createElement('canvas');
2023-09-27 13:10:28 +02:00
const fft_selectDom = document.createElement('div');
fft_Dom.style.position = 'relative';
fft_Dom.style.top = '0px';
fft_Dom.style.left = '0px';
fft_imgDom.classList.add('audio_fft');
2023-10-08 21:26:22 +02:00
fft_imgDom.classList.add(toCssClass(`audio_fft${propTitle}`));
2023-09-27 13:10:28 +02:00
fft_imgDom.style.width = '100%';
fft_imgDom.style.userDrag = 'none';
fft_imgDom.style.userSelect = 'none';
fft_imgDom.style.pointerEvents = 'none';
fft_imgDom.setAttribute('width', config.audio.fftBandsUsed);
fft_imgDom.setAttribute('height', config.audio.fftHeight);
2023-09-27 13:10:28 +02:00
fft_selectDom.style.position = 'absolute';
fft_selectDom.style.top = '0px';
fft_selectDom.style.left = '0px';
fft_selectDom.style.width = '100%';
fft_selectDom.style.height = '100%';
fft_selectDom.style.pointerEvents = 'none';
2023-10-10 12:05:40 +02:00
fft_selectDom.style.backgroundColor = 'rgba(28, 186, 148,0.4)'; // AUDIO COLOR
fft_selectDom.style.border = 'none'; // AUDIO COLOR
2023-09-27 13:10:28 +02:00
fft_Dom.append(fft_imgDom);
fft_Dom.append(fft_selectDom);
audioOptions.append(fft_Dom);
source_Dom.addEventListener('change', updateMappingOptions);
2023-10-13 16:24:34 +02:00
muteDom.addEventListener('change', updateMappingOptions);
2023-09-27 13:10:28 +02:00
min_inputDom.addEventListener('change', updateMappingOptions);
max_inputDom.addEventListener('change', updateMappingOptions);
smoothing_inputDom.addEventListener('change', updateMappingOptions);
let setFrequency = false;
let freq_down = 0;
let freq_up = 0;
2023-10-08 21:26:22 +02:00
let xy_start;
2023-09-27 13:10:28 +02:00
fft_Dom.addEventListener('mousedown', (e) => {
setFrequency = true;
2023-10-08 21:26:22 +02:00
const bb = fft_imgDom.getBoundingClientRect();
2023-09-27 13:10:28 +02:00
const x = e.clientX - bb.x;
2023-10-08 21:26:22 +02:00
const y = e.clientY - bb.y;
xy_start = {x, y};
});
fft_Dom.addEventListener('mousemove', (e) => {
if (setFrequency) {
const bb = fft_imgDom.getBoundingClientRect();
const x_factor = config.audio.fftBandsUsed / bb.width;
const y_factor = 256.0 / bb.height;
const x = e.clientX - bb.x;
const y = e.clientY - bb.y;
let min_x, max_x, min_y, max_y;
if (x > xy_start.x) {
min_x = xy_start.x;
max_x = x;
} else {
min_x = x;
max_x = xy_start.x;
}
if (y > xy_start.y) {
min_y = xy_start.y;
max_y = y;
} else {
min_y = y;
max_y = xy_start.y;
}
mappingOptions.min_freq = min_x * x_factor;
mappingOptions.max_freq = max_x * x_factor;
mappingOptions.min_in = (bb.height - max_y) * y_factor;
mappingOptions.max_in = (bb.height - min_y) * y_factor;
}
2023-09-27 13:10:28 +02:00
});
2023-10-10 13:28:40 +02:00
const unset = (e) => {
2023-09-27 13:10:28 +02:00
setFrequency = false;
2023-10-10 13:28:40 +02:00
};
const unsetFromOutside = (e) => {
document.removeEventListener('mouseup', unsetFromOutside);
unset(e);
};
fft_Dom.addEventListener('mouseup', unset);
fft_Dom.addEventListener('mouseleave', (e) => {
if (setFrequency) {
document.addEventListener('mouseup', unsetFromOutside);
}
});
fft_Dom.addEventListener('mouseenter', (e) => {
if (setFrequency) {
document.removeEventListener('mouseup', unsetFromOutside);
}
2023-09-27 13:10:28 +02:00
});
container.after(audioOptions);
2023-10-08 21:26:22 +02:00
//canvass.push(fft_imgDom);
canvasCombos[propTitle] = [fft_imgDom, fft_imgDom.getContext("2d"), layer.id()];
2023-09-27 13:10:28 +02:00
updateMappingOptions();
2023-10-08 07:19:27 +02:00
return audioOptions;
};
const addAudioOptions = (layer, propTitle) => {
if (!started) {
// audioOptions need a started init
init();
}
const panelPropTitle = tp.getPanelPropTitle(propTitle);
if (panelPropTitle === null) {
console.log('Audio::addAudioOptions::error',`cannot find panelPropTitle "${propTitle}"`);
return;
}
if (tp.getPanel().querySelector(toCssClass(`audioOptions${propTitle}`, '.')) !== null) {
//console.log('Audio::addAudioOptions::error',`audioOptions already exist for "${propTitle}"`);
return;
}
const container = panelPropTitle.parentNode.parentNode;
2023-10-08 15:57:29 +02:00
if (propTitle === 'color' && config.audio.colorSeparateRGBA) {
// NOTE: attach reversed, because container.after(audioOptions)
2023-10-08 07:19:27 +02:00
createAudioOptions(layer, `${propTitle}.a`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
2023-10-08 15:57:29 +02:00
createAudioOptions(layer, `${propTitle}.b`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
createAudioOptions(layer, `${propTitle}.g`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
createAudioOptions(layer, `${propTitle}.r`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
2023-10-08 07:19:27 +02:00
} else {
createAudioOptions(layer, propTitle, container);
}
const audioButton = container.querySelector('.audioButton');
audioButton.classList.add('active');
2023-09-27 13:10:28 +02:00
};
const removeAudioOptions = (layer = false, propTitle = false) => {
2023-09-27 13:10:28 +02:00
const panel = tp.getPanel();
if (!layer && !propTitle) {
const allAudioOptions = panel.querySelectorAll('.audioOptions');
if (allAudioOptions !== null) {
for (let i = 0; i < allAudioOptions.length; i++) {
allAudioOptions[i].remove();
2023-09-27 13:10:28 +02:00
}
}
2023-10-08 21:26:22 +02:00
canvasCombos = {};
panel.querySelectorAll('.audioButton').forEach((button) => {
button.classList.remove('active');
});
2023-09-27 13:10:28 +02:00
} else {
// only selected layers have options
// otherwise the ui is not there
if (layer.isSelected()) {
2023-10-08 21:26:22 +02:00
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
delete canvasCombos['color.r'];
delete canvasCombos['color.g'];
delete canvasCombos['color.b'];
delete canvasCombos['color.a'];
2023-10-08 21:26:22 +02:00
} else {
delete canvasCombos[propTitle];
2023-10-08 21:26:22 +02:00
}
2023-10-08 07:19:27 +02:00
const audioOptions = panel.querySelectorAll(toCssClass(`audioOptions${propTitle}`,'.'));
if (audioOptions.length > 0) {
audioOptions.forEach((e) => { e.remove(); });
}
2023-10-07 18:01:00 +02:00
const audioButton = panel.querySelector(toCssClass(`audioButton${propTitle}`,'.'));
if (audioButton !== null) {
audioButton.classList.remove('active');
}
2023-09-27 13:10:28 +02:00
}
}
};
const addAudioButton = (layer, propTitle, isActive) => {
const panel = tp.getPanel();
const panelPropTitle = tp.getPanelPropTitle(propTitle);
if (panelPropTitle !== null) {
2023-10-07 18:01:00 +02:00
//const container = tp.getPanelPropContainer(panelPropTitle);
const container = panelPropTitle.parentNode.parentNode;
2023-09-27 13:10:28 +02:00
if (container === null) {
console.log("Audio::addAudioButton",
`impossible! cannot find panelPropContainer for ${propTitle}`);
} else if (container.querySelector('.audioButton') !== null) {
// this is super verbose, let's not log by default
//console.log("Audio::addAudioButton",
//`already added an audio button for ${propTitle}`);
2023-09-27 13:10:28 +02:00
} else {
const button = document.createElement('div');
button.classList.add('audioButton');
2023-10-07 18:01:00 +02:00
button.classList.add(toCssClass(`audioButton${propTitle}`));
2023-09-27 13:10:28 +02:00
button.innerHTML = `<img src="/web/assets/sound.svg" alt="audio" />`;
container.append(button);
button.addEventListener('click', () => {
if (!started) {
init();
}
if (!isMapped(layer, propTitle)) {
addAudioMapping(layer, propTitle);
2023-09-27 13:10:28 +02:00
addAudioOptions(layer, propTitle);
layer.updateValuesViaTheatre(false);
2023-09-27 13:10:28 +02:00
} else {
removeAudioMapping(layer, propTitle);
removeAudioOptions(layer, propTitle);
layer.updateValuesViaTheatre(true);
2023-09-27 13:10:28 +02:00
}
});
if (isActive) {
addAudioMapping(layer, propTitle);
2023-09-27 13:10:28 +02:00
addAudioOptions(layer, propTitle);
}
}
} else {
console.log("Audio::addAudioButton",
`cannot find panelPropTitle for ${propTitle}`);
}
};
const injectPanel = (layer) => {
console.log('injecting panel');
2023-10-07 18:01:00 +02:00
const flatValues = clone(layer.theatreObject.value);
flattenObject(flatValues, ['color']);
const layerType = layer.id().split('-')[0];
2023-10-07 18:01:00 +02:00
const props = Object.keys(flatValues);
2023-09-27 13:10:28 +02:00
props.forEach((propTitle) => {
console.log('injecting prop', propTitle);
if (config.audio.ignoreProps[layerType].indexOf(propTitle) < 0) {
2023-09-27 13:10:28 +02:00
let isActive = false;
if (mapping.hasOwnProperty(layer.id())) {
if (mapping[layer.id()].hasOwnProperty(propTitle)) {
isActive = true;
}
}
addAudioButton(layer, propTitle, isActive);
}
});
};
2023-10-13 16:24:34 +02:00
const audioSourceCombos = {};
const readAudioFiles = () => {
FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
2023-10-13 16:24:34 +02:00
if (file.indexOf('.') !== 0 && !audioSourceCombos.hasOwnProperty(file)) {
const audioElement = document.createElement('audio');
audioElement.classList.add('invisible');
audioElement.classList.add('audio_file');
audioElement.classList.add(toCssClass(`audio_file${file}`));
document.querySelector('body').append(audioElement);
const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`);
let type = 'audio/wav';
const filesplit = file.split('.');
const extension = filesplit[filesplit.length - 1];
if (extension === 'wav') {
type = 'audio/wav';
} else if (extension === 'mp3') {
type = 'audio/mpeg';
} else if (extension === 'ogg') {
type = 'audio/ogg';
}
const src = URL.createObjectURL(
new Blob([arr], {
type
})
);
audioElement.src = src;
2023-10-13 11:29:18 +02:00
audioElement.loop = true;
const source = audioCtx.createMediaElementSource(audioElement);
2023-10-13 16:24:34 +02:00
const gain = audioCtx.createGain();
gain.gain.value = 0;
source.connect(gain);
gain.connect(audioCtx.destination);
//source.connect(audioCtx.destination);
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
2023-10-13 11:29:18 +02:00
const bufferLength = analyser.frequencyBinCount / 2;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
audioElement.play();
2023-10-13 16:24:34 +02:00
audioSourceCombos[file] = {
gain,
source,
dataArray,
analyser,
audioElement,
};
}
});
};
2023-09-27 13:10:28 +02:00
const init = () => {
if (!started) {
started = true;
if (audioCtx !== false && audioCtx.state === 'suspended') {
if (confirm('It looks as if your project has audio.'
+ 'Should we start audio now?'
+ 'It is possible that you get a request that Variable Time may use your microphone.'
+ 'Note: all data / microphone stream will stay on your device. If you don\'t believe us you can disconnect from the internet and it will still work. :-)')) {
audioCtx.resume();
} else {
return;
}
}
heading.textContent = "Voice-change-O-matic";
//document.body.removeEventListener("click", init);
2023-09-27 13:10:28 +02:00
// Older browsers might not implement mediaDevices at all, so we set an empty object first
if (navigator.mediaDevices === undefined) {
navigator.mediaDevices = {};
}
2023-09-27 13:10:28 +02:00
// Some browsers partially implement mediaDevices. We can't assign an object
// with getUserMedia as it would overwrite existing properties.
// Add the getUserMedia property if it's missing.
if (navigator.mediaDevices.getUserMedia === undefined) {
navigator.mediaDevices.getUserMedia = function(constraints) {
// First get ahold of the legacy getUserMedia, if present
const getUserMedia =
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
// Some browsers just don't implement it - return a rejected promise with an error
// to keep a consistent interface
if (!getUserMedia) {
return Promise.reject(
new Error("getUserMedia is not implemented in this browser")
);
}
2023-09-27 13:10:28 +02:00
// Otherwise, wrap the call to the old navigator.getUserMedia with a Promise
return new Promise(function(resolve, reject) {
getUserMedia.call(navigator, constraints, resolve, reject);
});
};
2023-09-27 13:10:28 +02:00
}
// Set up forked web audio context, for multiple browsers
// window. is needed otherwise Safari explodes
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
const voiceSelect = audioDom.querySelector("#voice");
// Grab the mute button to use below
const mute = audioDom.querySelector(".mute");
// Set up the different audio nodes we will use for the app
2023-10-13 16:24:34 +02:00
{
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
const bufferLength = analyser.frequencyBinCount / 2;
audioSourceCombos['microphone'] = {
// source: see below when we actually get the microphone
analyser,
dataArray: new Uint8Array(bufferLength),
audioElement: null,
};
}
readAudioFiles();
//const distortion = audioCtx.createWaveShaper();
//const gainNode = audioCtx.createGain();
//const biquadFilter = audioCtx.createBiquadFilter();
//const convolver = audioCtx.createConvolver();
//const echoDelay = createEchoDelayEffect(audioCtx);
// Distortion curve for the waveshaper, thanks to Kevin Ennis
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
2023-10-08 15:57:29 +02:00
const makeDistortionCurve = (amount) => {
let k = typeof amount === "number" ? amount : 50,
n_samples = 44100,
curve = new Float32Array(n_samples),
deg = Math.PI / 180,
i = 0,
x;
for (; i < n_samples; ++i) {
x = (i * 2) / n_samples - 1;
curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x));
2023-09-27 13:10:28 +02:00
}
return curve;
}
// Set up canvas context for visualizer
const canvas = audioDom.querySelector(".visualizer");
const canvasCtx = canvas.getContext("2d");
2023-09-27 13:10:28 +02:00
const intendedWidth = audioDom.clientWidth;
canvas.setAttribute("width", config.audio.fftBandsUsed);
const visualSelect = audioDom.querySelector("#visual");
let drawVisual;
2023-10-13 11:29:18 +02:00
let previousPosition = -1;
2023-09-27 13:10:28 +02:00
// Main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported.");
const constraints = {
audio: true
};
navigator.mediaDevices
.getUserMedia(constraints)
.then(function(stream) {
2023-10-13 16:24:34 +02:00
const source = audioCtx.createMediaStreamSource(stream);
const gain = audioCtx.createGain();
gain.gain.value = 0;
source.connect(gain);
gain.connect(audioCtx.destination);
source.connect(audioSourceCombos['microphone'].analyser);
audioSourceCombos['microphone'].source = source;
audioSourceCombos['microphone'].gain = gain;
visualize();
})
.catch(function(err) {
console.log("The following gUM error occured: " + err);
});
} else {
console.log("getUserMedia not supported on your browser!");
}
2023-09-27 13:10:28 +02:00
2023-10-08 15:57:29 +02:00
const visualize = () => {
2023-09-27 13:10:28 +02:00
2023-10-13 11:29:18 +02:00
//analyser.fftSize = config.audio.fftBandsAnalysed;
const w = config.audio.fftBandsUsed;
const h = config.audio.fftHeight;
2023-10-08 21:26:22 +02:00
const verticalFactor = h / 256.0;
2023-09-27 13:10:28 +02:00
// See comment above for Float32Array()
2023-10-10 07:26:12 +02:00
let canvasKeys = Object.keys(canvasCombos);
2023-10-08 21:26:22 +02:00
for (let i = 0; i < canvasKeys.length; i++) {
canvasCombos[canvasKeys[i]][1].clearRect(0, 0, w, h);
}
2023-09-27 13:10:28 +02:00
let frameCount = 0;
const drawAlt = function() {
2023-10-13 11:29:18 +02:00
const position = tp.sheet.sequence.position;
let positionRollover = false;
if (config.audio.rolloverResetLoop && position < previousPosition) {
positionRollover = true;
}
previousPosition = position;
2023-10-10 07:26:12 +02:00
canvasKeys = Object.keys(canvasCombos);
drawVisual = requestAnimationFrame(drawAlt);
2023-09-27 13:10:28 +02:00
2023-10-13 11:29:18 +02:00
canvasKeys.forEach((k) => {
canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(0, 0, w, h);
const layerID = canvasCombos[k][2];
const m = mapping[layerID][k];
2023-10-08 21:26:22 +02:00
if (m.sync === 'volume') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = h - (m.max_in * verticalFactor);
const sh = (m.max_in - m.min_in) * verticalFactor;
2023-10-13 11:29:18 +02:00
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
2023-10-13 16:24:34 +02:00
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
2023-10-08 21:26:22 +02:00
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = 0;
const sh = h;
2023-10-13 11:29:18 +02:00
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
2023-10-08 21:26:22 +02:00
}
2023-10-13 11:29:18 +02:00
});
2023-09-27 13:10:28 +02:00
2023-10-13 11:29:18 +02:00
const usedSourceCombos = [];
const analysedResults = {};
2023-10-13 16:24:34 +02:00
const unmuted = [];
2023-10-13 11:29:18 +02:00
Object.keys(mapping).forEach((layerID) => {
Object.keys(mapping[layerID]).forEach((propTitle) => {
const m = mapping[layerID][propTitle];
const source = m.source;
2023-10-13 16:24:34 +02:00
if (!m.muted) {
if (unmuted.indexOf(source) < 0) {
unmuted.push(source);
}
}
2023-10-13 11:29:18 +02:00
if (usedSourceCombos.indexOf(source) < 0) {
usedSourceCombos.push(source);
analysedResults[source] = {
max_i: 0,
max_ri: 0,
max_v: 0,
total_v: 0,
mappings: [],
};
}
m.max_v = 0;
m.max_i = 0;
m.max_ri = 0;
m.total_v = 0;
analysedResults[source].mappings.push(m);
});
});
2023-10-13 16:24:34 +02:00
Object.keys(audioSourceCombos).forEach((k) => {
const asc = audioSourceCombos[k];
2023-10-13 11:29:18 +02:00
if (asc.audioElement !== null) {
if (usedSourceCombos.indexOf(k) >= 0) {
if (positionRollover || asc.audioElement.paused) {
asc.audioElement.currentTime = position % asc.audioElement.duration;
asc.audioElement.play();
}
2023-10-13 11:29:18 +02:00
} else if (!asc.audioElement.paused) {
asc.audioElement.pause();
}
}
2023-10-13 16:24:34 +02:00
if (unmuted.indexOf(k) < 0) {
asc.gain.gain.value = 0;
} else {
asc.gain.gain.value = 1;
}
2023-10-13 11:29:18 +02:00
});
usedSourceCombos.forEach((source) => {
2023-10-13 16:24:34 +02:00
const afs = audioSourceCombos[source];
2023-10-13 11:29:18 +02:00
const r = analysedResults[source];
afs.analyser.getByteFrequencyData(afs.dataArray);
for (let f = 0; f < w; f++) {
const v = afs.dataArray[f];
r.total_v += v;
if (r.max_v < v) {
r.max_v = v;
r.max_i = v;
}
r.max_ri += v * f;
let fillStyle = 'rgb(200,200,200)';
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
2023-10-13 16:24:34 +02:00
// NOTE: this is not the most efficient way to do it
2023-10-13 11:29:18 +02:00
const k = canvasKeys[k_i];
2023-10-13 16:24:34 +02:00
const layerID = canvasCombos[k][2];
if (mapping[layerID][k].source === source) {
canvasCombos[k][1].fillStyle = fillStyle;
canvasCombos[k][1].fillRect(
f,
h - (v * verticalFactor),
1,
(v * verticalFactor)
);
}
2023-10-13 11:29:18 +02:00
}
analysedResults[source].mappings.forEach((m) => {
if (m.min_freq <= f && m.max_freq >= f) {
m.total_v += v;
if (m.max_v < v) {
m.max_v = v;
m.max_i = f;
}
m.max_ri += v * f;
}
});
}
r.max_ri /= r.total_v;
analysedResults[source].mappings.forEach((m) => {
m.max_ri /= m.total_v;
});
});
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
const k = canvasKeys[k_i];
const layerID = canvasCombos[k][2];
const m = mapping[layerID][k];
2023-10-08 21:26:22 +02:00
if (m.sync === 'volume') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = h - (m.max_in * verticalFactor);
const sh = (m.max_in - m.min_in) * verticalFactor;
2023-10-13 11:29:18 +02:00
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
2023-10-13 16:24:34 +02:00
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
2023-10-08 21:26:22 +02:00
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = 0;
const sh = h;
2023-10-13 11:29:18 +02:00
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
2023-10-08 21:26:22 +02:00
}
2023-09-27 13:10:28 +02:00
}
2023-10-13 11:29:18 +02:00
const propsToSet = [];
2023-10-13 16:24:34 +02:00
Object.keys(mapping).forEach((layerID) => {
Object.keys(mapping[layerID]).forEach((propTitle) => {
const m = mapping[layerID][propTitle];
switch (m.sync) {
case 'volume': {
let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
propsToSet.push({
id: layerID,
title: propTitle,
value: m.value,
});
break;
}
case 'pitch': {
const r = analysedResults[m.source];
const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : r.max_i;
const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : r.max_ri;
const fi = config.audio.pitchCombineFrequencies ? ri : mi;
let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
if (!isNaN(a)) {
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
propsToSet.push({
2023-10-13 16:24:34 +02:00
id: layerID,
title: propTitle,
value: m.value,
});
}
2023-10-13 16:24:34 +02:00
break;
}
case 'clarity': {
const clarity = m.max_v / m.total_v;
const a = mapValue(clarity, 0.01, 0.05, m.min_out, m.max_out, true);
if (!isNaN(a)) {
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
propsToSet.push({
2023-10-13 16:24:34 +02:00
id: layerID,
title: propTitle,
value: m.value,
});
}
}
2023-10-13 16:24:34 +02:00
default:
break;
}
if (m.letterDelay) {
const pt = `letterDelays.${propTitle}`;
propsToSet.push({
id: layerID,
title: pt,
value: m.letterDelay,
});
}
});
});
if (propsToSet.length > 0 && frameCount % 2 === 0) {
// this is when to monitor live
if (!record.isRecording()) {
//if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
let values = {};
propsToSet.forEach((p) => {
const newValues = {
[p.title]: p.value
};
if (!values.hasOwnProperty(p.id)) {
values[p.id] = {};
}
values[p.id] = {
...values[p.id],
...newValues,
};
});
Object.keys(values).forEach((layerID) => {
2023-10-07 18:01:00 +02:00
deFlattenObject(values[layerID]);
2023-10-07 13:29:25 +02:00
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
});
//}
} else {
2023-10-08 15:57:29 +02:00
const position = tp.sheet.sequence.position;
propsToSet.forEach((p) => {
const title = tp
2023-10-08 15:57:29 +02:00
.getPanelPropTitle(p.title);
2023-10-13 16:24:34 +02:00
const layer = getLayer(p.id);
if (title !== null) {
const inputElement = title
2023-10-08 15:57:29 +02:00
.parentNode.parentNode
.querySelector('input.recording');
if (inputElement !== null) {
inputElement.value = p.value;
inputElement.dispatchEvent(new Event('change'));
}
}
2023-10-08 15:57:29 +02:00
record.addValue(p.id, p.title, p.value, position);
if (p.title.indexOf('color') === 0) {
if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
2023-10-13 16:24:34 +02:00
record.liveUpdate(layer, position);
}
} else {
2023-10-13 16:24:34 +02:00
record.liveUpdate(layer, position);
2023-10-08 15:57:29 +02:00
}
});
}
}
//const panel = tp.getPanel();
//const fft_images = panel.querySelectorAll('.audio_fft');
//if (fft_images !== null) {
//const src = canvas.toDataURL();
//fft_images.forEach((e) => {
//e.src = src;
//});
//}
frameCount++;
};
drawAlt();
2023-09-27 13:10:28 +02:00
}
if (audioCtx === false || audioCtx.state === 'suspended') {
const notice = document.querySelector('#notice');
const button = notice.querySelector('.button');
const buttonP = button.querySelector('p');
const whatP = notice.querySelector('.what p');
const detailsP = notice.querySelector('.details p');
button.classList.add('visible');
notice.classList.add('visible');
whatP.innerHTML = 'Start AudioContext';
detailsP.innerHTML = 'This project has audio. For audio to be allowed to start, we need a user interaction.<br>You are the user. If you click the button below, you interacted with Variable Time, and we can start audio.<br>Also, if you have not previously allowed Variable Time to use the microphone, there might be another notification asking you for permission.<br>Sounds good?';
buttonP.innerHTML = 'Yeah, absolutely, I love audio!';
const alright = () => {
audioCtx.resume();
button.classList.remove('visible');
notice.classList.remove('visible');
detailsP.innerHTML = '';
whatP.innerHTML = '';
buttonP.innerHTML = 'OK';
button.removeEventListener('click', alright);
};
button.addEventListener('click', alright);
}
2023-09-27 13:10:28 +02:00
}
}
const deinit = () => {
if (started) {
if (audioCtx !== false) {
audioCtx.suspend();
}
started = false;
}
};
2023-09-27 13:10:28 +02:00
this.getContext = () => {
return audioCtx;
};
2023-09-27 13:10:28 +02:00
this.init = init;
this.deinit = deinit;
2023-09-27 13:10:28 +02:00
this.injectPanel = injectPanel;
this.getMapping = () => { return mapping; };
this.getSavedMapping = () => { return savedMapping };
this.setMapping = (m) => { mapping = m; };
this.setSavedMapping = (m) => { savedMapping = m; };
this.addAudioMapping = addAudioMapping;
this.removeAudioMapping = removeAudioMapping;
this.addAudioOptions = addAudioOptions;
this.removeAudioOptions = removeAudioOptions;
this.AudioMappingOptions = AudioMappingOptions;
2023-10-13 11:29:18 +02:00
this.readAudioFiles = readAudioFiles;
// debug
this.canvasCombos = canvasCombos;
2023-10-13 16:24:34 +02:00
this.audioSourceCombos = audioSourceCombos;
2023-09-27 13:10:28 +02:00
};
export {
Audio
}