1203 lines
55 KiB
JavaScript
1203 lines
55 KiB
JavaScript
import {
|
|
mapValue,
|
|
mix,
|
|
toCssClass,
|
|
flattenObject,
|
|
deFlattenObject,
|
|
clone,
|
|
} from './utils.js';
|
|
|
|
window.mapValue = mapValue;
|
|
|
|
|
|
const AudioMappingOptions = function() {
|
|
this.min_freq = 0.0;
|
|
this.max_freq = config.audio.fftBandsUsed;
|
|
this.min_in = 0.0;
|
|
this.max_in = 255.0;
|
|
this.min_out = 0.0;
|
|
this.max_out = 1.0;
|
|
this.smoothing = config.audio.defaultSmoothing;
|
|
this.sync = 'volume';
|
|
this.source = 'microphone';
|
|
this.value = 0.0;
|
|
this.muted = true;
|
|
};
|
|
|
|
const Audio = function(tp, record) {
|
|
|
|
const audioDom = document.querySelector('.audioWrapper');
|
|
let audioCtx = false;
|
|
const heading = audioDom.querySelector("h1");
|
|
heading.textContent = "CLICK HERE TO START";
|
|
|
|
// an array of possible sync options.
|
|
const audio_sync_options = ['volume', 'pitch', 'clarity'];
|
|
// could also be an enum
|
|
// like that
|
|
//const AudioSyncOptions = Object.freeze({
|
|
//RED: Symbol("volume"),
|
|
//BLUE: Symbol("pitch"),
|
|
//GREEN: Symbol("frequency"),
|
|
//toString: (e) => {
|
|
//return e.toString.match(/\(([\S\s]*)\)/)[1]
|
|
//},
|
|
//});
|
|
//document.body.addEventListener("click", init);
|
|
let started = false;
|
|
|
|
let mapping = {};
|
|
let savedMapping = {};
|
|
//const canvass = [];
|
|
let canvasCombos = {};
|
|
const mutationObserver = new MutationObserver(function(e) {
|
|
if (e[0].removedNodes) {
|
|
e[0].removedNodes.forEach((n) => {
|
|
if (typeof n === 'object' &&
|
|
n.hasOwnProperty('hasAttribute') &&
|
|
n.hasOwnProperty('querySelectorAll')) {
|
|
if (n.hasAttribute('data-propTitle')) {
|
|
const propTitle = n.getAttribute('data-propTitle');
|
|
delete canvasCombos[propTitle];
|
|
} else {
|
|
const subProps = n.querySelectorAll('[data-propTitle]');
|
|
if (subProps.length > 0) {
|
|
subProps.forEach((sp) => {
|
|
const propTitle = sp.getAttribute('data-propTitle');
|
|
delete canvasCombos[propTitle];
|
|
});
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
});
|
|
let areMutationsObserved = false;
|
|
|
|
const isMapped = (layer, propTitle) => {
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
return false;
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
if (propTitle === 'color' &&
|
|
config.audio.colorSeparateRGBA &&
|
|
mapping[layer.id()].hasOwnProperty('color.r') &&
|
|
mapping[layer.id()].hasOwnProperty('color.g') &&
|
|
mapping[layer.id()].hasOwnProperty('color.b') &&
|
|
mapping[layer.id()].hasOwnProperty('color.a')) {
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
return true;
|
|
};
|
|
|
|
const getDefaultRange = (layer, propTitle) => {
|
|
if (config.audio.defaultRange.hasOwnProperty(propTitle)) {
|
|
return config.audio.defaultRange[propTitle];
|
|
} else if (propTitle.indexOf('width') === 0) {
|
|
return [
|
|
getArtboard().theatreObject.value.width / 2,
|
|
getArtboard().theatreObject.value.width
|
|
];
|
|
} else if (propTitle.indexOf('y') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.height / 2
|
|
];
|
|
} else if (propTitle.indexOf('x') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.width / 2
|
|
];
|
|
} else if (propTitle.indexOf('y') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.height / 2
|
|
];
|
|
} else if (propTitle.indexOf('letterDelay') === 0) {
|
|
return [
|
|
config.audio.defaultRange.letterDelays[0],
|
|
config.audio.defaultRange.letterDelays[1]
|
|
];
|
|
} else if (propTitle.split('.')[0] === 'fontVariationAxes') {
|
|
return layer.props.fontVariationAxes
|
|
.props[propTitle.split('.')[1]].range;
|
|
}
|
|
};
|
|
|
|
const getAudioMappingOptions = (layer, propTitle) => {
|
|
if (savedMapping.hasOwnProperty(layer.id()) && savedMapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
return savedMapping[layer.id()][propTitle];
|
|
} else if (propTitle === 'color') {
|
|
const mm = getDefaultRange(layer, 'color');
|
|
if (config.audio.colorSeparateRGBA) {
|
|
const r = new AudioMappingOptions();
|
|
r.min_out = mm[0];
|
|
r.max_out = mm[1];
|
|
const g = new AudioMappingOptions();
|
|
g.min_out = mm[0];
|
|
g.max_out = mm[1];
|
|
const b = new AudioMappingOptions();
|
|
b.min_out = mm[0];
|
|
b.max_out = mm[1];
|
|
const a = new AudioMappingOptions();
|
|
a.min_out = 1.0; // NOTE: dirty, dirty
|
|
a.max_out = 1.0; // hardcoded value, you
|
|
return [{r}, {g}, {b}, {a}];
|
|
} else {
|
|
const o = new AudioMappingOptions();
|
|
o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]};
|
|
o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]};
|
|
return o;
|
|
}
|
|
} else {
|
|
const o = new AudioMappingOptions();
|
|
const mm = getDefaultRange(layer, propTitle);
|
|
o.min_out = mm[0];
|
|
o.max_out = mm[1];
|
|
return o;
|
|
}
|
|
};
|
|
|
|
// potentially recursive
|
|
const addAudioMapping = (layer, propTitle, options = false) => {
|
|
if (!options) {
|
|
options = getAudioMappingOptions(layer, propTitle);
|
|
if (Array.isArray(options)) {
|
|
let isGood = true;
|
|
options.forEach((o) => {
|
|
const subPropKey = Object.keys(o)[0];
|
|
const subPropTitle = `${propTitle}.${subPropKey}`;
|
|
isGood = addAudioMapping(layer, subPropTitle, o[subPropKey]) ? isGood : false;
|
|
});
|
|
return isGood;
|
|
}
|
|
}
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
mapping[layer.id()] = {};
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
mapping[layer.id()][propTitle] = options;
|
|
return true;
|
|
} else {
|
|
// already there
|
|
return false;
|
|
}
|
|
};
|
|
|
|
const removeAudioMapping = (layer = false, propTitle = false) => {
|
|
if (!layer && !propTitle) {
|
|
Object.keys(mapping).forEach((layerID) => {
|
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
|
delete mapping[layerID][propTitle];
|
|
});
|
|
delete mapping[layerID];
|
|
});
|
|
return true;
|
|
}
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
// no layer
|
|
return false;
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
// no propTitle
|
|
// perhaps color?
|
|
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
|
|
let isGood = true;
|
|
isGood = removeAudioMapping(layer, 'color.r');
|
|
isGood = removeAudioMapping(layer, 'color.g');
|
|
isGood = removeAudioMapping(layer, 'color.b');
|
|
isGood = removeAudioMapping(layer, 'color.a');
|
|
return isGood;
|
|
}
|
|
return false;
|
|
}
|
|
delete mapping[layer.id()][propTitle];
|
|
if (Object.keys(mapping[layer.id()]).length === 0) {
|
|
delete mapping[layer.id()];
|
|
}
|
|
return true;
|
|
}
|
|
|
|
const createAudioOptions = (layer, propTitle, container) => {
|
|
const mappingOptions = mapping[layer.id()][propTitle];
|
|
let hasLetterDelay = //false;
|
|
config
|
|
.layer.letterDelayProps
|
|
.indexOf(propTitle.split('.')[0]) >= 0 && propTitle.indexOf('color') < 0
|
|
&& !tp.isSequenced(propTitle);
|
|
const panel = tp.getPanel();
|
|
if (!areMutationsObserved) {
|
|
mutationObserver.observe(panel, { childList: true, subtree: true });
|
|
areMutationsObserved = true;
|
|
}
|
|
const audioOptions = document.createElement('div');
|
|
audioOptions.setAttribute('data-propTitle',propTitle);
|
|
audioOptions.classList.add('audioOptions');
|
|
audioOptions.classList.add('audioOptionsTypeDefault');
|
|
audioOptions.classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
audioOptions.style.position = 'relative';
|
|
audioOptions.style.width = '100%';
|
|
if (propTitle.split('.')[0] === 'color' && propTitle.split('.').length > 1) {
|
|
audioOptions.classList.add(toCssClass('audioOptionscolor'));
|
|
switch(propTitle.split('.')[1]) {
|
|
case 'r': {
|
|
audioOptions.style.background = 'rgba(255,0,0,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'g': {
|
|
audioOptions.style.background = 'rgba(0,255,0,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'b': {
|
|
audioOptions.style.background = 'rgba(0,0,255,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'a': {
|
|
audioOptions.style.background = 'rgba(255,255,255,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
}
|
|
} else {
|
|
audioOptions.style.background = 'rgba(163, 163, 163, 0.2)'; // AUDIO COLOR
|
|
|
|
}
|
|
audioOptions.style.order = parseInt(container.style.order) + 1;
|
|
|
|
const updateMappingOptions = () => {
|
|
mappingOptions.min_out = parseFloat(panel.querySelector(toCssClass(`audio_min${propTitle}`,'#')).value);
|
|
mappingOptions.max_out = parseFloat(panel.querySelector(toCssClass(`audio_max${propTitle}`,'#')).value);
|
|
mappingOptions.sync =
|
|
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
|
|
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
|
|
mappingOptions.smoothing = parseFloat(s);
|
|
if (hasLetterDelay) {
|
|
const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#'));
|
|
mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
|
|
}
|
|
mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`,'#')).value;
|
|
mappingOptions.muted = panel.querySelector(toCssClass(`audio_mute${propTitle}`,'#')).checked;
|
|
|
|
if (!savedMapping.hasOwnProperty(layer.id())) {
|
|
savedMapping[layer.id()] = {};
|
|
}
|
|
savedMapping[layer.id()][propTitle] = mappingOptions;
|
|
};
|
|
const source_Dom_Cont = document.createElement('div');
|
|
source_Dom_Cont.classList.add('source_Dom_Cont');
|
|
const source_Dom = document.createElement('select');
|
|
source_Dom.id = toCssClass(`audio_source${propTitle}`);
|
|
const source_mic = document.createElement('option');
|
|
source_mic.value = 'microphone';
|
|
source_mic.innerHTML = 'microphone';
|
|
source_Dom.append(source_mic);
|
|
let selectedSource = 'microphone';
|
|
if (typeof audio.getSavedMapping()[layer.id()] === 'object') {
|
|
if (typeof audio.getSavedMapping()[layer.id()][propTitle] === 'object') {
|
|
selectedSource = audio.getSavedMapping()[layer.id()][propTitle].source;
|
|
}
|
|
}
|
|
FS.readdir(config.fs.idbfsAudioDir)
|
|
.forEach((file) => {
|
|
if (file[0] !== '.') {
|
|
const source_file = document.createElement('option');
|
|
if (file === selectedSource) {
|
|
source_file.setAttribute('selected','true');
|
|
}
|
|
source_file.value = file;
|
|
if (file.length > config.audio.maxFilenameLength) {
|
|
source_file.innerHTML = file.substr(0,6) + '..' + file.substr(file.length - 6, 6);
|
|
} else {
|
|
source_file.innerHTML = file;
|
|
}
|
|
source_Dom.append(source_file);
|
|
}
|
|
});
|
|
source_Dom_Cont.append(source_Dom);
|
|
audioOptions.append(source_Dom_Cont);
|
|
|
|
const muteDom = document.createElement('input');
|
|
const muteDom_label = document.createElement('label');
|
|
muteDom.id = toCssClass(`audio_mute${propTitle}`);
|
|
muteDom.name = toCssClass(`audio_mute${propTitle}`);
|
|
muteDom.type = 'checkbox';
|
|
muteDom.checked = true;
|
|
muteDom_label.for = toCssClass(`audio_mute${propTitle}`);
|
|
muteDom_label.innerHTML = 'muted';
|
|
source_Dom_Cont.append(muteDom);
|
|
source_Dom_Cont.append(muteDom_label);
|
|
|
|
const defaultRange = getDefaultRange(layer, propTitle);
|
|
const min_max_Dom = document.createElement('div');
|
|
min_max_Dom.classList.add('audio_min_max');
|
|
const min_Cont = document.createElement('div');
|
|
min_Cont.classList.add('audio_min_Cont');
|
|
const min_inputDom_label = document.createElement('label');
|
|
min_inputDom_label.for = 'audio_min';
|
|
min_inputDom_label.innerHTML = 'min ';
|
|
const min_inputDom = document.createElement('input');
|
|
min_inputDom.type = 'number';
|
|
min_inputDom.name = toCssClass(`audio_min${propTitle}`);
|
|
min_inputDom.id = toCssClass(`audio_min${propTitle}`);
|
|
if (Array.isArray(defaultRange) && defaultRange.length >= 2) {
|
|
min_inputDom.title = `default: ${defaultRange[0]}`;
|
|
}
|
|
min_inputDom.value = `${mappingOptions.min_out}`;
|
|
const max_Cont = document.createElement('div');
|
|
max_Cont.classList.add('audio_max_Cont');
|
|
const max_inputDom_label = document.createElement('label');
|
|
max_inputDom_label.for = 'audio_max';
|
|
max_inputDom_label.innerHTML = 'max ';
|
|
const max_inputDom = document.createElement('input');
|
|
max_inputDom.type = 'number';
|
|
max_inputDom.name = toCssClass(`audio_max${propTitle}`);
|
|
max_inputDom.id = toCssClass(`audio_max${propTitle}`);
|
|
if (Array.isArray(defaultRange) && defaultRange.length >= 2) {
|
|
max_inputDom.title = `default: ${defaultRange[1]}`;
|
|
}
|
|
max_inputDom.value = `${mappingOptions.max_out}`;
|
|
const smoothing_inputDom_label = document.createElement('label');
|
|
smoothing_inputDom_label.for = 'audio_smoothing';
|
|
smoothing_inputDom_label.innerHTML = 'audio smoothing';
|
|
const smoothing_inputDom = document.createElement('input');
|
|
smoothing_inputDom.type = 'number';
|
|
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
|
|
smoothing_inputDom.id = toCssClass(`audio_smoothing${propTitle}`);
|
|
smoothing_inputDom.value = mappingOptions.smoothing;
|
|
smoothing_inputDom.min = 0;
|
|
smoothing_inputDom.max = 1;
|
|
smoothing_inputDom.step = 0.01;
|
|
min_max_Dom.append(smoothing_inputDom_label);
|
|
min_max_Dom.append(smoothing_inputDom);
|
|
min_max_Dom.append(min_Cont);
|
|
min_Cont.append(min_inputDom_label);
|
|
min_Cont.append(min_inputDom);
|
|
min_max_Dom.append(max_Cont);
|
|
max_Cont.append(max_inputDom_label);
|
|
max_Cont.append(max_inputDom);
|
|
if (hasLetterDelay) {
|
|
const letterDelayCont = document.createElement('div');
|
|
letterDelayCont.classList.add("letterDelayCont");
|
|
const letterDelay_inputDom_label = document.createElement('label');
|
|
letterDelay_inputDom_label.for = 'audio_letterDelay';
|
|
letterDelay_inputDom_label.innerHTML = 'letterDelay';
|
|
const letterDelay_inputDom = document.createElement('input');
|
|
letterDelay_inputDom.type = 'number';
|
|
letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`);
|
|
letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`);
|
|
letterDelay_inputDom.value = mappingOptions.letterDelay ? mappingOptions.letterDelay : 0;
|
|
letterDelay_inputDom.min = 0;
|
|
letterDelay_inputDom.step = 1;
|
|
letterDelayCont.append(letterDelay_inputDom_label);
|
|
letterDelayCont.append(letterDelay_inputDom);
|
|
min_max_Dom.append(letterDelayCont);
|
|
letterDelay_inputDom.addEventListener('change', updateMappingOptions);
|
|
}
|
|
audioOptions.append(min_max_Dom);
|
|
|
|
const sync_Dom = document.createElement('div');
|
|
sync_Dom.classList.add('sync_Dom');
|
|
const sync_titleDom = document.createElement('p');
|
|
const sync_titleDom_Cont = document.createElement('div');
|
|
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
|
|
sync_titleDom.innerHTML = 'sync with:';
|
|
sync_Dom.append(sync_titleDom);
|
|
|
|
audio_sync_options.forEach((o) => {
|
|
const sync_inputDom_Cont = document.createElement('div');
|
|
sync_inputDom_Cont.classList.add('sync_inputDom_Cont');
|
|
const sync_inputDom_label = document.createElement('label');
|
|
sync_inputDom_label.for = `audio_sync${o}`;
|
|
sync_inputDom_label.innerHTML = o;
|
|
const sync_inputDom = document.createElement('input');
|
|
sync_inputDom.type = 'radio';
|
|
sync_inputDom.name = toCssClass(`audio_sync${propTitle}`);
|
|
sync_inputDom.id = toCssClass(`audio_sync${propTitle}${o}`);
|
|
sync_inputDom.value = o;
|
|
// default select first option
|
|
if (o === mappingOptions.sync) {
|
|
sync_inputDom.checked = '1';
|
|
}
|
|
sync_inputDom_Cont.append(sync_inputDom_label);
|
|
sync_inputDom_Cont.append(sync_inputDom);
|
|
sync_titleDom_Cont.append(sync_inputDom_Cont);
|
|
sync_Dom.append(sync_titleDom_Cont);
|
|
// sync_Dom.append(sync_inputDom);
|
|
sync_inputDom.addEventListener('change', updateMappingOptions);
|
|
});
|
|
|
|
audioOptions.append(sync_Dom);
|
|
|
|
const fft_Dom = document.createElement('div');
|
|
const fft_imgDom = document.createElement('canvas');
|
|
const fft_selectDom = document.createElement('div');
|
|
fft_Dom.style.position = 'relative';
|
|
fft_Dom.style.top = '0px';
|
|
fft_Dom.style.left = '0px';
|
|
fft_imgDom.classList.add('audio_fft');
|
|
fft_imgDom.classList.add(toCssClass(`audio_fft${propTitle}`));
|
|
fft_imgDom.style.width = '100%';
|
|
fft_imgDom.style.userDrag = 'none';
|
|
fft_imgDom.style.userSelect = 'none';
|
|
fft_imgDom.style.pointerEvents = 'none';
|
|
fft_imgDom.setAttribute('width', config.audio.fftBandsUsed);
|
|
fft_imgDom.setAttribute('height', config.audio.fftHeight);
|
|
fft_selectDom.style.position = 'absolute';
|
|
fft_selectDom.style.top = '0px';
|
|
fft_selectDom.style.left = '0px';
|
|
fft_selectDom.style.width = '100%';
|
|
fft_selectDom.style.height = '100%';
|
|
fft_selectDom.style.pointerEvents = 'none';
|
|
fft_selectDom.style.backgroundColor = 'rgba(28, 186, 148,0.4)'; // AUDIO COLOR
|
|
fft_selectDom.style.border = 'none'; // AUDIO COLOR
|
|
|
|
fft_Dom.append(fft_imgDom);
|
|
fft_Dom.append(fft_selectDom);
|
|
audioOptions.append(fft_Dom);
|
|
source_Dom.addEventListener('change', updateMappingOptions);
|
|
muteDom.addEventListener('change', updateMappingOptions);
|
|
min_inputDom.addEventListener('change', updateMappingOptions);
|
|
max_inputDom.addEventListener('change', updateMappingOptions);
|
|
smoothing_inputDom.addEventListener('change', updateMappingOptions);
|
|
let setFrequency = false;
|
|
let wasMoved = false;
|
|
let freq_down = 0;
|
|
let freq_up = 0;
|
|
let xy_start;
|
|
fft_Dom.addEventListener('mousedown', (e) => {
|
|
setFrequency = true;
|
|
wasMoved = false;
|
|
const bb = fft_imgDom.getBoundingClientRect();
|
|
const x = e.clientX - bb.x;
|
|
const y = e.clientY - bb.y;
|
|
xy_start = {
|
|
x,
|
|
y
|
|
};
|
|
});
|
|
fft_Dom.addEventListener('mousemove', (e) => {
|
|
if (setFrequency) {
|
|
wasMoved = true;
|
|
const bb = fft_imgDom.getBoundingClientRect();
|
|
const x_factor = config.audio.fftBandsUsed / bb.width;
|
|
const y_factor = 256.0 / bb.height;
|
|
const x = e.clientX - bb.x;
|
|
const y = e.clientY - bb.y;
|
|
let min_x, max_x, min_y, max_y;
|
|
if (x > xy_start.x) {
|
|
min_x = xy_start.x;
|
|
max_x = x;
|
|
} else {
|
|
min_x = x;
|
|
max_x = xy_start.x;
|
|
}
|
|
if (y > xy_start.y) {
|
|
min_y = xy_start.y;
|
|
max_y = y;
|
|
} else {
|
|
min_y = y;
|
|
max_y = xy_start.y;
|
|
}
|
|
mappingOptions.min_freq = min_x * x_factor;
|
|
mappingOptions.max_freq = max_x * x_factor;
|
|
mappingOptions.min_in = (bb.height - max_y) * y_factor;
|
|
mappingOptions.max_in = (bb.height - min_y) * y_factor;
|
|
}
|
|
});
|
|
const unset = (e) => {
|
|
setFrequency = false;
|
|
if (!wasMoved) {
|
|
const bb = fft_imgDom.getBoundingClientRect();
|
|
const x_factor = config.audio.fftBandsUsed / bb.width;
|
|
const y_factor = 256.0 / bb.height;
|
|
let min_x, max_x, min_y, max_y;
|
|
min_x = 0;
|
|
min_y = 0;
|
|
max_x = bb.width;
|
|
max_y = bb.height;
|
|
mappingOptions.min_freq = min_x * x_factor;
|
|
mappingOptions.max_freq = max_x * x_factor;
|
|
mappingOptions.min_in = (bb.height - max_y) * y_factor;
|
|
mappingOptions.max_in = (bb.height - min_y) * y_factor;
|
|
}
|
|
};
|
|
const unsetFromOutside = (e) => {
|
|
document.removeEventListener('mouseup', unsetFromOutside);
|
|
unset(e);
|
|
};
|
|
fft_Dom.addEventListener('mouseup', unset);
|
|
fft_Dom.addEventListener('mouseleave', (e) => {
|
|
if (setFrequency) {
|
|
document.addEventListener('mouseup', unsetFromOutside);
|
|
}
|
|
});
|
|
fft_Dom.addEventListener('mouseenter', (e) => {
|
|
if (setFrequency) {
|
|
document.removeEventListener('mouseup', unsetFromOutside);
|
|
}
|
|
});
|
|
|
|
container.after(audioOptions);
|
|
|
|
//canvass.push(fft_imgDom);
|
|
canvasCombos[propTitle] = [fft_imgDom, fft_imgDom.getContext("2d"), layer.id()];
|
|
updateMappingOptions();
|
|
return audioOptions;
|
|
};
|
|
|
|
const addAudioOptions = (layer, propTitle) => {
|
|
if (!started) {
|
|
// audioOptions need a started init
|
|
init();
|
|
}
|
|
const panelPropTitle = tp.getPanelPropTitle(propTitle);
|
|
if (panelPropTitle === null) {
|
|
console.log('Audio::addAudioOptions::error',`cannot find panelPropTitle "${propTitle}"`);
|
|
return;
|
|
}
|
|
if (tp.getPanel().querySelector(toCssClass(`audioOptions${propTitle}`, '.')) !== null) {
|
|
//console.log('Audio::addAudioOptions::error',`audioOptions already exist for "${propTitle}"`);
|
|
return;
|
|
}
|
|
const container = panelPropTitle.parentNode.parentNode;
|
|
|
|
if (propTitle === 'color' && config.audio.colorSeparateRGBA) {
|
|
// NOTE: attach reversed, because container.after(audioOptions)
|
|
createAudioOptions(layer, `${propTitle}.a`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.b`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.g`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.r`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
} else {
|
|
createAudioOptions(layer, propTitle, container);
|
|
}
|
|
|
|
const audioButton = container.querySelector('.audioButton');
|
|
audioButton.classList.add('active');
|
|
};
|
|
|
|
const removeAudioOptions = (layer = false, propTitle = false) => {
|
|
const panel = tp.getPanel();
|
|
if (!layer && !propTitle) {
|
|
const allAudioOptions = panel.querySelectorAll('.audioOptions');
|
|
if (allAudioOptions !== null) {
|
|
for (let i = 0; i < allAudioOptions.length; i++) {
|
|
allAudioOptions[i].remove();
|
|
}
|
|
}
|
|
canvasCombos = {};
|
|
panel.querySelectorAll('.audioButton').forEach((button) => {
|
|
button.classList.remove('active');
|
|
});
|
|
} else {
|
|
// only selected layers have options
|
|
// otherwise the ui is not there
|
|
if (layer.isSelected()) {
|
|
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
|
|
delete canvasCombos['color.r'];
|
|
delete canvasCombos['color.g'];
|
|
delete canvasCombos['color.b'];
|
|
delete canvasCombos['color.a'];
|
|
} else {
|
|
delete canvasCombos[propTitle];
|
|
}
|
|
const audioOptions = panel.querySelectorAll(toCssClass(`audioOptions${propTitle}`,'.'));
|
|
if (audioOptions.length > 0) {
|
|
audioOptions.forEach((e) => { e.remove(); });
|
|
}
|
|
const audioButton = panel.querySelector(toCssClass(`audioButton${propTitle}`,'.'));
|
|
if (audioButton !== null) {
|
|
audioButton.classList.remove('active');
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
const addAudioButton = (layer, propTitle, isActive) => {
|
|
const panel = tp.getPanel();
|
|
const panelPropTitle = tp.getPanelPropTitle(propTitle);
|
|
if (panelPropTitle !== null) {
|
|
//const container = tp.getPanelPropContainer(panelPropTitle);
|
|
const container = panelPropTitle.parentNode.parentNode;
|
|
|
|
if (container === null) {
|
|
console.log("Audio::addAudioButton",
|
|
`impossible! cannot find panelPropContainer for ${propTitle}`);
|
|
} else if (container.querySelector('.audioButton') !== null) {
|
|
// this is super verbose, let's not log by default
|
|
//console.log("Audio::addAudioButton",
|
|
//`already added an audio button for ${propTitle}`);
|
|
} else {
|
|
const button = document.createElement('div');
|
|
button.classList.add('audioButton');
|
|
button.classList.add(toCssClass(`audioButton${propTitle}`));
|
|
button.innerHTML = `<img src="/web/assets/sound.svg" alt="audio" />`;
|
|
container.append(button);
|
|
button.addEventListener('click', () => {
|
|
if (!started) {
|
|
init();
|
|
}
|
|
if (!isMapped(layer, propTitle)) {
|
|
addAudioMapping(layer, propTitle);
|
|
addAudioOptions(layer, propTitle);
|
|
layer.updateValuesViaTheatre(false);
|
|
} else {
|
|
removeAudioMapping(layer, propTitle);
|
|
removeAudioOptions(layer, propTitle);
|
|
layer.updateValuesViaTheatre(true);
|
|
}
|
|
});
|
|
if (isActive) {
|
|
addAudioMapping(layer, propTitle);
|
|
addAudioOptions(layer, propTitle);
|
|
}
|
|
}
|
|
} else {
|
|
console.log("Audio::addAudioButton",
|
|
`cannot find panelPropTitle for ${propTitle}`);
|
|
}
|
|
};
|
|
|
|
const injectPanel = (layer) => {
|
|
console.log('injecting panel');
|
|
const flatValues = clone(layer.theatreObject.value);
|
|
flattenObject(flatValues, ['color']);
|
|
const layerType = layer.id().split('-')[0];
|
|
const props = Object.keys(flatValues);
|
|
props.forEach((propTitle) => {
|
|
if (config.audio.ignoreProps[layerType].indexOf(propTitle) < 0) {
|
|
let isActive = false;
|
|
if (mapping.hasOwnProperty(layer.id())) {
|
|
if (mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
isActive = true;
|
|
}
|
|
}
|
|
addAudioButton(layer, propTitle, isActive);
|
|
}
|
|
});
|
|
};
|
|
const audioSourceCombos = {};
|
|
const readAudioFiles = () => {
|
|
FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
|
|
if (file.indexOf('.') !== 0 && !audioSourceCombos.hasOwnProperty(file)) {
|
|
const audioElement = document.createElement('audio');
|
|
audioElement.classList.add('invisible');
|
|
audioElement.classList.add('audio_file');
|
|
audioElement.classList.add(toCssClass(`audio_file${file}`));
|
|
document.querySelector('body').append(audioElement);
|
|
|
|
const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`);
|
|
let type = 'audio/wav';
|
|
const filesplit = file.split('.');
|
|
const extension = filesplit[filesplit.length - 1];
|
|
if (extension === 'wav') {
|
|
type = 'audio/wav';
|
|
} else if (extension === 'mp3') {
|
|
type = 'audio/mpeg';
|
|
} else if (extension === 'ogg') {
|
|
type = 'audio/ogg';
|
|
}
|
|
|
|
const src = URL.createObjectURL(
|
|
new Blob([arr], {
|
|
type
|
|
})
|
|
);
|
|
|
|
audioElement.src = src;
|
|
audioElement.loop = true;
|
|
|
|
const source = audioCtx.createMediaElementSource(audioElement);
|
|
const gain = audioCtx.createGain();
|
|
gain.gain.value = 0;
|
|
source.connect(gain);
|
|
gain.connect(audioCtx.destination);
|
|
//source.connect(audioCtx.destination);
|
|
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
|
|
const bufferLength = analyser.frequencyBinCount / 2;
|
|
const dataArray = new Uint8Array(bufferLength);
|
|
|
|
source.connect(analyser);
|
|
|
|
audioElement.play();
|
|
|
|
audioSourceCombos[file] = {
|
|
gain,
|
|
source,
|
|
dataArray,
|
|
analyser,
|
|
audioElement,
|
|
};
|
|
}
|
|
});
|
|
};
|
|
|
|
|
|
const init = () => {
|
|
if (!started) {
|
|
started = true;
|
|
if (audioCtx !== false && audioCtx.state === 'suspended') {
|
|
if (confirm('It looks as if your project has audio.'
|
|
+ 'Should we start audio now?'
|
|
+ 'It is possible that you get a request that Variable Time may use your microphone.'
|
|
+ 'Note: all data / microphone stream will stay on your device. If you don\'t believe us you can disconnect from the internet and it will still work. :-)')) {
|
|
audioCtx.resume();
|
|
} else {
|
|
return;
|
|
}
|
|
}
|
|
heading.textContent = "Voice-change-O-matic";
|
|
//document.body.removeEventListener("click", init);
|
|
|
|
// Older browsers might not implement mediaDevices at all, so we set an empty object first
|
|
if (navigator.mediaDevices === undefined) {
|
|
navigator.mediaDevices = {};
|
|
}
|
|
|
|
// Some browsers partially implement mediaDevices. We can't assign an object
|
|
// with getUserMedia as it would overwrite existing properties.
|
|
// Add the getUserMedia property if it's missing.
|
|
if (navigator.mediaDevices.getUserMedia === undefined) {
|
|
navigator.mediaDevices.getUserMedia = function(constraints) {
|
|
// First get ahold of the legacy getUserMedia, if present
|
|
const getUserMedia =
|
|
navigator.webkitGetUserMedia ||
|
|
navigator.mozGetUserMedia ||
|
|
navigator.msGetUserMedia;
|
|
|
|
// Some browsers just don't implement it - return a rejected promise with an error
|
|
// to keep a consistent interface
|
|
if (!getUserMedia) {
|
|
return Promise.reject(
|
|
new Error("getUserMedia is not implemented in this browser")
|
|
);
|
|
}
|
|
|
|
// Otherwise, wrap the call to the old navigator.getUserMedia with a Promise
|
|
return new Promise(function(resolve, reject) {
|
|
getUserMedia.call(navigator, constraints, resolve, reject);
|
|
});
|
|
};
|
|
}
|
|
|
|
// Set up forked web audio context, for multiple browsers
|
|
// window. is needed otherwise Safari explodes
|
|
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
|
|
const voiceSelect = audioDom.querySelector("#voice");
|
|
|
|
// Grab the mute button to use below
|
|
const mute = audioDom.querySelector(".mute");
|
|
|
|
// Set up the different audio nodes we will use for the app
|
|
{
|
|
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
|
|
const bufferLength = analyser.frequencyBinCount / 2;
|
|
|
|
audioSourceCombos['microphone'] = {
|
|
// source: see below when we actually get the microphone
|
|
analyser,
|
|
dataArray: new Uint8Array(bufferLength),
|
|
audioElement: null,
|
|
};
|
|
}
|
|
|
|
readAudioFiles();
|
|
|
|
//const distortion = audioCtx.createWaveShaper();
|
|
//const gainNode = audioCtx.createGain();
|
|
//const biquadFilter = audioCtx.createBiquadFilter();
|
|
//const convolver = audioCtx.createConvolver();
|
|
|
|
//const echoDelay = createEchoDelayEffect(audioCtx);
|
|
|
|
// Distortion curve for the waveshaper, thanks to Kevin Ennis
|
|
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
|
|
const makeDistortionCurve = (amount) => {
|
|
let k = typeof amount === "number" ? amount : 50,
|
|
n_samples = 44100,
|
|
curve = new Float32Array(n_samples),
|
|
deg = Math.PI / 180,
|
|
i = 0,
|
|
x;
|
|
for (; i < n_samples; ++i) {
|
|
x = (i * 2) / n_samples - 1;
|
|
curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x));
|
|
}
|
|
return curve;
|
|
}
|
|
|
|
// Set up canvas context for visualizer
|
|
const canvas = audioDom.querySelector(".visualizer");
|
|
const canvasCtx = canvas.getContext("2d");
|
|
|
|
const intendedWidth = audioDom.clientWidth;
|
|
canvas.setAttribute("width", config.audio.fftBandsUsed);
|
|
const visualSelect = audioDom.querySelector("#visual");
|
|
let drawVisual;
|
|
let previousPosition = -1;
|
|
|
|
// Main block for doing the audio recording
|
|
if (navigator.mediaDevices.getUserMedia) {
|
|
console.log("getUserMedia supported.");
|
|
const constraints = {
|
|
audio: true
|
|
};
|
|
navigator.mediaDevices
|
|
.getUserMedia(constraints)
|
|
.then(function(stream) {
|
|
const source = audioCtx.createMediaStreamSource(stream);
|
|
const gain = audioCtx.createGain();
|
|
gain.gain.value = 0;
|
|
source.connect(gain);
|
|
gain.connect(audioCtx.destination);
|
|
source.connect(audioSourceCombos['microphone'].analyser);
|
|
audioSourceCombos['microphone'].source = source;
|
|
audioSourceCombos['microphone'].gain = gain;
|
|
|
|
visualize();
|
|
})
|
|
.catch(function(err) {
|
|
console.log("The following gUM error occured: " + err);
|
|
});
|
|
} else {
|
|
console.log("getUserMedia not supported on your browser!");
|
|
}
|
|
|
|
const visualize = () => {
|
|
|
|
//analyser.fftSize = config.audio.fftBandsAnalysed;
|
|
const w = config.audio.fftBandsUsed;
|
|
const h = config.audio.fftHeight;
|
|
const verticalFactor = h / 256.0;
|
|
|
|
// See comment above for Float32Array()
|
|
let canvasKeys = Object.keys(canvasCombos);
|
|
|
|
for (let i = 0; i < canvasKeys.length; i++) {
|
|
canvasCombos[canvasKeys[i]][1].clearRect(0, 0, w, h);
|
|
}
|
|
|
|
let frameCount = 0;
|
|
const drawAlt = function() {
|
|
const position = tp.sheet.sequence.position;
|
|
let positionRollover = false;
|
|
if (config.audio.rolloverResetLoop && position < previousPosition) {
|
|
positionRollover = true;
|
|
}
|
|
previousPosition = position;
|
|
canvasKeys = Object.keys(canvasCombos);
|
|
drawVisual = requestAnimationFrame(drawAlt);
|
|
|
|
canvasKeys.forEach((k) => {
|
|
canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
|
|
canvasCombos[k][1].fillRect(0, 0, w, h);
|
|
const layerID = canvasCombos[k][2];
|
|
const m = mapping[layerID][k];
|
|
if (m.sync === 'volume') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = h - (m.max_in * verticalFactor);
|
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
|
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
|
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
|
|
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = 0;
|
|
const sh = h;
|
|
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
|
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
|
|
}
|
|
});
|
|
|
|
const usedSourceCombos = [];
|
|
const analysedResults = {};
|
|
const unmuted = [];
|
|
Object.keys(mapping).forEach((layerID) => {
|
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
|
const m = mapping[layerID][propTitle];
|
|
const source = m.source;
|
|
if (!m.muted) {
|
|
if (unmuted.indexOf(source) < 0) {
|
|
unmuted.push(source);
|
|
}
|
|
}
|
|
if (usedSourceCombos.indexOf(source) < 0) {
|
|
usedSourceCombos.push(source);
|
|
analysedResults[source] = {
|
|
max_i: 0,
|
|
max_ri: 0,
|
|
max_v: 0,
|
|
total_v: 0,
|
|
mappings: [],
|
|
};
|
|
}
|
|
m.max_v = 0;
|
|
m.max_i = 0;
|
|
m.max_ri = 0;
|
|
m.total_v = 0;
|
|
analysedResults[source].mappings.push(m);
|
|
});
|
|
});
|
|
Object.keys(audioSourceCombos).forEach((k) => {
|
|
const asc = audioSourceCombos[k];
|
|
if (asc.audioElement !== null) {
|
|
if (usedSourceCombos.indexOf(k) >= 0) {
|
|
if (positionRollover || asc.audioElement.paused) {
|
|
asc.audioElement.currentTime = position % asc.audioElement.duration;
|
|
asc.audioElement.play();
|
|
}
|
|
} else if (!asc.audioElement.paused) {
|
|
asc.audioElement.pause();
|
|
}
|
|
}
|
|
if (unmuted.indexOf(k) < 0) {
|
|
asc.gain.gain.value = 0;
|
|
} else {
|
|
asc.gain.gain.value = 1;
|
|
}
|
|
});
|
|
usedSourceCombos.forEach((source) => {
|
|
const afs = audioSourceCombos[source];
|
|
const r = analysedResults[source];
|
|
afs.analyser.getByteFrequencyData(afs.dataArray);
|
|
for (let f = 0; f < w; f++) {
|
|
const v = afs.dataArray[f];
|
|
r.total_v += v;
|
|
if (r.max_v < v) {
|
|
r.max_v = v;
|
|
r.max_i = v;
|
|
}
|
|
r.max_ri += v * f;
|
|
let fillStyle = 'rgb(200,200,200)';
|
|
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
|
|
// NOTE: this is not the most efficient way to do it
|
|
const k = canvasKeys[k_i];
|
|
const layerID = canvasCombos[k][2];
|
|
if (mapping[layerID][k].source === source) {
|
|
canvasCombos[k][1].fillStyle = fillStyle;
|
|
canvasCombos[k][1].fillRect(
|
|
f,
|
|
h - (v * verticalFactor),
|
|
1,
|
|
(v * verticalFactor)
|
|
);
|
|
}
|
|
}
|
|
analysedResults[source].mappings.forEach((m) => {
|
|
if (m.min_freq <= f && m.max_freq >= f) {
|
|
m.total_v += v;
|
|
if (m.max_v < v) {
|
|
m.max_v = v;
|
|
m.max_i = f;
|
|
}
|
|
m.max_ri += v * f;
|
|
}
|
|
});
|
|
}
|
|
r.max_ri /= r.total_v;
|
|
analysedResults[source].mappings.forEach((m) => {
|
|
m.max_ri /= m.total_v;
|
|
});
|
|
});
|
|
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
|
|
const k = canvasKeys[k_i];
|
|
const layerID = canvasCombos[k][2];
|
|
const m = mapping[layerID][k];
|
|
if (m.sync === 'volume') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = h - (m.max_in * verticalFactor);
|
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
|
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
|
|
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
|
|
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = 0;
|
|
const sh = h;
|
|
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
|
|
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
|
|
}
|
|
}
|
|
|
|
const propsToSet = [];
|
|
Object.keys(mapping).forEach((layerID) => {
|
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
|
const m = mapping[layerID][propTitle];
|
|
switch (m.sync) {
|
|
case 'volume': {
|
|
let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
|
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
propsToSet.push({
|
|
id: layerID,
|
|
title: propTitle,
|
|
value: m.value,
|
|
});
|
|
break;
|
|
}
|
|
case 'pitch': {
|
|
const r = analysedResults[m.source];
|
|
const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : r.max_i;
|
|
const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : r.max_ri;
|
|
const fi = config.audio.pitchCombineFrequencies ? ri : mi;
|
|
let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
|
|
if (!isNaN(a)) {
|
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
propsToSet.push({
|
|
id: layerID,
|
|
title: propTitle,
|
|
value: m.value,
|
|
});
|
|
}
|
|
break;
|
|
}
|
|
case 'clarity': {
|
|
const clarity = m.max_v / m.total_v;
|
|
const a = mapValue(clarity, 0.01, 0.05, m.min_out, m.max_out, true);
|
|
if (!isNaN(a)) {
|
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
propsToSet.push({
|
|
id: layerID,
|
|
title: propTitle,
|
|
value: m.value,
|
|
});
|
|
}
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
if (m.letterDelay) {
|
|
const pt = `letterDelays.${propTitle}`;
|
|
propsToSet.push({
|
|
id: layerID,
|
|
title: pt,
|
|
value: m.letterDelay,
|
|
});
|
|
}
|
|
});
|
|
});
|
|
if (propsToSet.length > 0 && frameCount % 2 === 0) {
|
|
// this is when to monitor live
|
|
if (!record.isRecording()) {
|
|
//if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
|
|
let values = {};
|
|
propsToSet.forEach((p) => {
|
|
const newValues = {
|
|
[p.title]: p.value
|
|
};
|
|
if (!values.hasOwnProperty(p.id)) {
|
|
values[p.id] = {};
|
|
}
|
|
values[p.id] = {
|
|
...values[p.id],
|
|
...newValues,
|
|
};
|
|
});
|
|
Object.keys(values).forEach((layerID) => {
|
|
deFlattenObject(values[layerID]);
|
|
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
|
|
});
|
|
//}
|
|
} else {
|
|
const position = tp.sheet.sequence.position;
|
|
propsToSet.forEach((p) => {
|
|
const title = tp
|
|
.getPanelPropTitle(p.title);
|
|
const layer = getLayer(p.id);
|
|
|
|
if (title !== null) {
|
|
const inputElement = title
|
|
.parentNode.parentNode
|
|
.querySelector('input.recording');
|
|
|
|
if (inputElement !== null) {
|
|
inputElement.value = p.value;
|
|
inputElement.dispatchEvent(new Event('change'));
|
|
}
|
|
}
|
|
record.addValue(p.id, p.title, p.value, position);
|
|
if (p.title.indexOf('color') === 0) {
|
|
if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
|
|
record.liveUpdate(layer, position);
|
|
}
|
|
} else {
|
|
record.liveUpdate(layer, position);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
//const panel = tp.getPanel();
|
|
//const fft_images = panel.querySelectorAll('.audio_fft');
|
|
//if (fft_images !== null) {
|
|
//const src = canvas.toDataURL();
|
|
//fft_images.forEach((e) => {
|
|
//e.src = src;
|
|
//});
|
|
//}
|
|
frameCount++;
|
|
};
|
|
drawAlt();
|
|
}
|
|
if (audioCtx === false || audioCtx.state === 'suspended') {
|
|
const notice = document.querySelector('#notice');
|
|
const button = notice.querySelector('.button');
|
|
const buttonP = button.querySelector('p');
|
|
const whatP = notice.querySelector('.what p');
|
|
const detailsP = notice.querySelector('.details p');
|
|
button.classList.add('visible');
|
|
notice.classList.add('visible');
|
|
|
|
whatP.innerHTML = 'Start AudioContext';
|
|
detailsP.innerHTML = 'This project has audio. For audio to be allowed to start, we need a user interaction.<br>You are the user. If you click the button below, you interacted with Variable Time, and we can start audio.<br>Also, if you have not previously allowed Variable Time to use the microphone, there might be another notification asking you for permission.<br>Sounds good?';
|
|
buttonP.innerHTML = 'Yeah, absolutely, I love audio!';
|
|
|
|
const alright = () => {
|
|
audioCtx.resume();
|
|
button.classList.remove('visible');
|
|
notice.classList.remove('visible');
|
|
detailsP.innerHTML = '';
|
|
whatP.innerHTML = '';
|
|
buttonP.innerHTML = 'OK';
|
|
button.removeEventListener('click', alright);
|
|
};
|
|
button.addEventListener('click', alright);
|
|
}
|
|
}
|
|
}
|
|
const deinit = () => {
|
|
if (started) {
|
|
if (audioCtx !== false) {
|
|
audioCtx.suspend();
|
|
}
|
|
started = false;
|
|
}
|
|
};
|
|
|
|
this.getContext = () => {
|
|
return audioCtx;
|
|
};
|
|
this.init = init;
|
|
this.deinit = deinit;
|
|
this.injectPanel = injectPanel;
|
|
this.getMapping = () => { return mapping; };
|
|
this.getSavedMapping = () => { return savedMapping };
|
|
this.setMapping = (m) => { mapping = m; };
|
|
this.setSavedMapping = (m) => { savedMapping = m; };
|
|
this.addAudioMapping = addAudioMapping;
|
|
this.removeAudioMapping = removeAudioMapping;
|
|
this.addAudioOptions = addAudioOptions;
|
|
this.removeAudioOptions = removeAudioOptions;
|
|
this.AudioMappingOptions = AudioMappingOptions;
|
|
this.readAudioFiles = readAudioFiles;
|
|
|
|
// debug
|
|
this.canvasCombos = canvasCombos;
|
|
this.audioSourceCombos = audioSourceCombos;
|
|
};
|
|
|
|
export {
|
|
Audio
|
|
}
|