themancalledjakob
16124c755d
hardcoded filepath watch out, also it automatically starts playing and the fft visualisation shows only one file
1214 lines
52 KiB
JavaScript
1214 lines
52 KiB
JavaScript
import {
|
|
mapValue,
|
|
mix,
|
|
toCssClass,
|
|
flattenObject,
|
|
deFlattenObject,
|
|
clone,
|
|
} from './utils.js';
|
|
|
|
window.mapValue = mapValue;
|
|
|
|
|
|
const AudioMappingOptions = function() {
|
|
this.min_freq = 0.0;
|
|
this.max_freq = config.audio.fftBandsUsed;
|
|
this.min_in = 0.0;
|
|
this.max_in = 255.0;
|
|
this.min_out = 0.0;
|
|
this.max_out = 1.0;
|
|
this.smoothing = config.audio.defaultSmoothing;
|
|
this.sync = 'volume';
|
|
this.source = 'microphone';
|
|
this.value = 0.0;
|
|
};
|
|
|
|
const Audio = function(tp, record) {
|
|
|
|
const audioDom = document.querySelector('.audioWrapper');
|
|
let audioCtx = false;
|
|
const heading = audioDom.querySelector("h1");
|
|
heading.textContent = "CLICK HERE TO START";
|
|
|
|
// an array of possible sync options.
|
|
const audio_sync_options = ['volume', 'pitch', 'frequency'];
|
|
// could also be an enum
|
|
// like that
|
|
//const AudioSyncOptions = Object.freeze({
|
|
//RED: Symbol("volume"),
|
|
//BLUE: Symbol("pitch"),
|
|
//GREEN: Symbol("frequency"),
|
|
//toString: (e) => {
|
|
//return e.toString.match(/\(([\S\s]*)\)/)[1]
|
|
//},
|
|
//});
|
|
//document.body.addEventListener("click", init);
|
|
let started = false;
|
|
|
|
const mapping = {};
|
|
//const canvass = [];
|
|
let canvasCombos = {};
|
|
const mutationObserver = new MutationObserver(function(e) {
|
|
if (e[0].removedNodes) {
|
|
e[0].removedNodes.forEach((n) => {
|
|
if (typeof n === 'object' &&
|
|
n.hasOwnProperty('hasAttribute') &&
|
|
n.hasOwnProperty('querySelectorAll')) {
|
|
if (n.hasAttribute('data-propTitle')) {
|
|
const propTitle = n.getAttribute('data-propTitle');
|
|
delete canvasCombos[propTitle];
|
|
} else {
|
|
const subProps = n.querySelectorAll('[data-propTitle]');
|
|
if (subProps.length > 0) {
|
|
subProps.forEach((sp) => {
|
|
const propTitle = sp.getAttribute('data-propTitle');
|
|
delete canvasCombos[propTitle];
|
|
});
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
});
|
|
let areMutationsObserved = false;
|
|
|
|
const isMapped = (layer, propTitle) => {
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
return false;
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
if (propTitle === 'color' &&
|
|
config.audio.colorSeparateRGBA &&
|
|
mapping[layer.id()].hasOwnProperty('color.r') &&
|
|
mapping[layer.id()].hasOwnProperty('color.g') &&
|
|
mapping[layer.id()].hasOwnProperty('color.b') &&
|
|
mapping[layer.id()].hasOwnProperty('color.a')) {
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
return true;
|
|
};
|
|
|
|
const getDefaultRange = (layer, propTitle) => {
|
|
if (config.audio.defaultRange.hasOwnProperty(propTitle)) {
|
|
return config.audio.defaultRange[propTitle];
|
|
} else if (propTitle.indexOf('width') === 0) {
|
|
return [
|
|
getArtboard().theatreObject.value.width / 2,
|
|
getArtboard().theatreObject.value.width
|
|
];
|
|
} else if (propTitle.indexOf('y') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.height / 2
|
|
];
|
|
} else if (propTitle.indexOf('x') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.width / 2
|
|
];
|
|
} else if (propTitle.indexOf('y') === 0) {
|
|
return [
|
|
0,
|
|
getArtboard().theatreObject.value.height / 2
|
|
];
|
|
} else if (propTitle.indexOf('letterDelay') === 0) {
|
|
return [
|
|
config.audio.defaultRange.letterDelay[0],
|
|
config.audio.defaultRange.letterDelay[1]
|
|
];
|
|
} else if (propTitle.split('.')[0] === 'fontVariationAxes') {
|
|
return layer.props.fontVariationAxes
|
|
.props[propTitle.split('.')[1]].range;
|
|
}
|
|
};
|
|
|
|
const getAudioMappingOptions = (layer, propTitle) => {
|
|
if (propTitle === 'color') {
|
|
const mm = getDefaultRange(layer, 'color');
|
|
if (config.audio.colorSeparateRGBA) {
|
|
const r = new AudioMappingOptions();
|
|
r.min_out = mm[0];
|
|
r.max_out = mm[1];
|
|
const g = new AudioMappingOptions();
|
|
g.min_out = mm[0];
|
|
g.max_out = mm[1];
|
|
const b = new AudioMappingOptions();
|
|
b.min_out = mm[0];
|
|
b.max_out = mm[1];
|
|
const a = new AudioMappingOptions();
|
|
a.min_out = mm[0];
|
|
a.max_out = mm[1];
|
|
return [{r}, {g}, {b}, {a}];
|
|
} else {
|
|
const o = new AudioMappingOptions();
|
|
o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]};
|
|
o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]};
|
|
return o;
|
|
}
|
|
} else {
|
|
const o = new AudioMappingOptions();
|
|
const mm = getDefaultRange(layer, propTitle);
|
|
o.min_out = mm[0];
|
|
o.max_out = mm[1];
|
|
return o;
|
|
}
|
|
};
|
|
|
|
// potentially recursive
|
|
const addAudioMapping = (layer, propTitle, options = false) => {
|
|
if (!options) {
|
|
options = getAudioMappingOptions(layer, propTitle);
|
|
if (Array.isArray(options)) {
|
|
let isGood = true;
|
|
options.forEach((o) => {
|
|
const subPropKey = Object.keys(o)[0];
|
|
const subPropTitle = `${propTitle}.${subPropKey}`;
|
|
isGood = addAudioMapping(layer, subPropTitle, o[subPropKey]) ? isGood : false;
|
|
});
|
|
return isGood;
|
|
}
|
|
}
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
mapping[layer.id()] = {};
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
mapping[layer.id()][propTitle] = options;
|
|
return true;
|
|
} else {
|
|
// already there
|
|
return false;
|
|
}
|
|
};
|
|
|
|
const removeAudioMapping = (layer = false, propTitle = false) => {
|
|
if (!layer && !propTitle) {
|
|
Object.keys(mapping).forEach((layerID) => {
|
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
|
delete mapping[layerID][propTitle];
|
|
});
|
|
delete mapping[layerID];
|
|
});
|
|
return true;
|
|
}
|
|
if (!mapping.hasOwnProperty(layer.id())) {
|
|
// no layer
|
|
return false;
|
|
}
|
|
if (!mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
// no propTitle
|
|
// perhaps color?
|
|
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
|
|
let isGood = true;
|
|
isGood = removeAudioMapping(layer, 'color.r');
|
|
isGood = removeAudioMapping(layer, 'color.g');
|
|
isGood = removeAudioMapping(layer, 'color.b');
|
|
isGood = removeAudioMapping(layer, 'color.a');
|
|
return isGood;
|
|
}
|
|
return false;
|
|
}
|
|
delete mapping[layer.id()][propTitle];
|
|
if (Object.keys(mapping[layer.id()]).length === 0) {
|
|
delete mapping[layer.id()];
|
|
}
|
|
return true;
|
|
}
|
|
|
|
const createAudioOptions = (layer, propTitle, container) => {
|
|
const mappingOptions = mapping[layer.id()][propTitle];
|
|
let hasLetterDelay = config
|
|
.layer.letterDelayProps
|
|
.indexOf(propTitle.split('.')[0]) >= 0
|
|
&& tp.isSequenced([...[layer.id()], ...propTitle.split('.')]);
|
|
const panel = tp.getPanel();
|
|
if (!areMutationsObserved) {
|
|
mutationObserver.observe(panel, { childList: true, subtree: true });
|
|
areMutationsObserved = true;
|
|
}
|
|
const audioOptions = document.createElement('div');
|
|
audioOptions.setAttribute('data-propTitle',propTitle);
|
|
audioOptions.classList.add('audioOptions');
|
|
audioOptions.classList.add('audioOptionsTypeDefault');
|
|
audioOptions.classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
audioOptions.style.position = 'relative';
|
|
audioOptions.style.width = '100%';
|
|
if (propTitle.split('.')[0] === 'color' && propTitle.split('.').length > 1) {
|
|
audioOptions.classList.add(toCssClass('audioOptionscolor'));
|
|
switch(propTitle.split('.')[1]) {
|
|
case 'r': {
|
|
audioOptions.style.background = 'rgba(255,0,0,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'g': {
|
|
audioOptions.style.background = 'rgba(0,255,0,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'b': {
|
|
audioOptions.style.background = 'rgba(0,0,255,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
case 'a': {
|
|
audioOptions.style.background = 'rgba(255,255,255,0.2)'; // AUDIO COLOR
|
|
break;
|
|
}
|
|
}
|
|
} else {
|
|
audioOptions.style.background = 'rgba(163, 163, 163, 0.2)'; // AUDIO COLOR
|
|
|
|
}
|
|
audioOptions.style.order = parseInt(container.style.order) + 1;
|
|
|
|
const updateMappingOptions = () => {
|
|
mappingOptions.min_out = parseFloat(panel.querySelector(toCssClass(`audio_min${propTitle}`,'#')).value);
|
|
mappingOptions.max_out = parseFloat(panel.querySelector(toCssClass(`audio_max${propTitle}`,'#')).value);
|
|
mappingOptions.sync =
|
|
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
|
|
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
|
|
mappingOptions.smoothing = parseFloat(s);
|
|
if (hasLetterDelay) {
|
|
const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#'));
|
|
mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
|
|
}
|
|
mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`, '#')).value;
|
|
};
|
|
|
|
const source_Dom = document.createElement('select');
|
|
source_Dom.id = toCssClass(`audio_source${propTitle}`);
|
|
const source_mic = document.createElement('option');
|
|
source_mic.value = 'microphone';
|
|
source_mic.innerHTML = 'microphone';
|
|
source_Dom.append(source_mic);
|
|
FS.readdir(config.fs.idbfsAudioDir)
|
|
.forEach((file) => {
|
|
if (file[0] !== '.') {
|
|
const source_file = document.createElement('option');
|
|
source_file.value = file;
|
|
source_file.innerHTML = file;
|
|
source_Dom.append(source_file);
|
|
}
|
|
});
|
|
audioOptions.append(source_Dom);
|
|
|
|
const min_max_Dom = document.createElement('div');
|
|
min_max_Dom.classList.add('audio_min_max');
|
|
const min_Cont = document.createElement('div');
|
|
min_Cont.classList.add('audio_min_Cont');
|
|
const min_inputDom_label = document.createElement('label');
|
|
min_inputDom_label.for = 'audio_min';
|
|
min_inputDom_label.innerHTML = 'min ';
|
|
const min_inputDom = document.createElement('input');
|
|
min_inputDom.type = 'number';
|
|
min_inputDom.name = toCssClass(`audio_min${propTitle}`);
|
|
min_inputDom.id = toCssClass(`audio_min${propTitle}`);
|
|
min_inputDom.value = `${mappingOptions.min_out}`;
|
|
const max_Cont = document.createElement('div');
|
|
max_Cont.classList.add('audio_max_Cont');
|
|
const max_inputDom_label = document.createElement('label');
|
|
max_inputDom_label.for = 'audio_max';
|
|
max_inputDom_label.innerHTML = 'max ';
|
|
const max_inputDom = document.createElement('input');
|
|
max_inputDom.type = 'number';
|
|
max_inputDom.name = toCssClass(`audio_max${propTitle}`);
|
|
max_inputDom.id = toCssClass(`audio_max${propTitle}`);
|
|
max_inputDom.value = `${mappingOptions.max_out}`;
|
|
const smoothing_inputDom_label = document.createElement('label');
|
|
smoothing_inputDom_label.for = 'audio_smoothing';
|
|
smoothing_inputDom_label.innerHTML = 'audio smoothing';
|
|
const smoothing_inputDom = document.createElement('input');
|
|
smoothing_inputDom.type = 'number';
|
|
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
|
|
smoothing_inputDom.id = toCssClass(`audio_smoothing${propTitle}`);
|
|
smoothing_inputDom.value = mappingOptions.smoothing;
|
|
smoothing_inputDom.min = 0;
|
|
smoothing_inputDom.max = 1;
|
|
smoothing_inputDom.step = 0.01;
|
|
min_max_Dom.append(smoothing_inputDom_label);
|
|
min_max_Dom.append(smoothing_inputDom);
|
|
min_max_Dom.append(min_Cont);
|
|
min_Cont.append(min_inputDom_label);
|
|
min_Cont.append(min_inputDom);
|
|
min_max_Dom.append(max_Cont);
|
|
max_Cont.append(max_inputDom_label);
|
|
max_Cont.append(max_inputDom);
|
|
if (hasLetterDelay) {
|
|
const letterDelayCont = document.createElement('div');
|
|
const letterDelay_inputDom_label = document.createElement('label');
|
|
letterDelay_inputDom_label.for = 'audio_letterDelay';
|
|
letterDelay_inputDom_label.innerHTML = 'letterDelay';
|
|
const letterDelay_inputDom = document.createElement('input');
|
|
letterDelay_inputDom.type = 'number';
|
|
letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`);
|
|
letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`);
|
|
letterDelay_inputDom.value = 0;
|
|
letterDelay_inputDom.min = 0;
|
|
letterDelay_inputDom.step = 1;
|
|
letterDelayCont.append(letterDelay_inputDom_label);
|
|
letterDelayCont.append(letterDelay_inputDom);
|
|
min_max_Dom.append(letterDelayCont);
|
|
letterDelay_inputDom.addEventListener('change', updateMappingOptions);
|
|
}
|
|
audioOptions.append(min_max_Dom);
|
|
|
|
const sync_Dom = document.createElement('div');
|
|
sync_Dom.classList.add('sync_Dom');
|
|
const sync_titleDom = document.createElement('p');
|
|
const sync_titleDom_Cont = document.createElement('div');
|
|
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
|
|
sync_titleDom.innerHTML = 'sync with:';
|
|
sync_Dom.append(sync_titleDom);
|
|
|
|
audio_sync_options.forEach((o) => {
|
|
const sync_inputDom_Cont = document.createElement('div');
|
|
sync_inputDom_Cont.classList.add('sync_inputDom_Cont');
|
|
const sync_inputDom_label = document.createElement('label');
|
|
sync_inputDom_label.for = `audio_sync${o}`;
|
|
sync_inputDom_label.innerHTML = o;
|
|
const sync_inputDom = document.createElement('input');
|
|
sync_inputDom.type = 'radio';
|
|
sync_inputDom.name = toCssClass(`audio_sync${propTitle}`);
|
|
sync_inputDom.id = toCssClass(`audio_sync${propTitle}${o}`);
|
|
sync_inputDom.value = o;
|
|
// default select first option
|
|
if (o === mappingOptions.sync) {
|
|
sync_inputDom.checked = '1';
|
|
}
|
|
sync_inputDom_Cont.append(sync_inputDom_label);
|
|
sync_inputDom_Cont.append(sync_inputDom);
|
|
sync_titleDom_Cont.append(sync_inputDom_Cont);
|
|
sync_Dom.append(sync_titleDom_Cont);
|
|
// sync_Dom.append(sync_inputDom);
|
|
sync_inputDom.addEventListener('change', updateMappingOptions);
|
|
});
|
|
|
|
audioOptions.append(sync_Dom);
|
|
|
|
const fft_Dom = document.createElement('div');
|
|
const fft_imgDom = document.createElement('canvas');
|
|
const fft_selectDom = document.createElement('div');
|
|
fft_Dom.style.position = 'relative';
|
|
fft_Dom.style.top = '0px';
|
|
fft_Dom.style.left = '0px';
|
|
fft_imgDom.classList.add('audio_fft');
|
|
fft_imgDom.classList.add(toCssClass(`audio_fft${propTitle}`));
|
|
fft_imgDom.style.width = '100%';
|
|
fft_imgDom.style.userDrag = 'none';
|
|
fft_imgDom.style.userSelect = 'none';
|
|
fft_imgDom.style.pointerEvents = 'none';
|
|
fft_imgDom.setAttribute('width', config.audio.fftBandsUsed);
|
|
fft_imgDom.setAttribute('height', config.audio.fftHeight);
|
|
fft_selectDom.style.position = 'absolute';
|
|
fft_selectDom.style.top = '0px';
|
|
fft_selectDom.style.left = '0px';
|
|
fft_selectDom.style.width = '100%';
|
|
fft_selectDom.style.height = '100%';
|
|
fft_selectDom.style.pointerEvents = 'none';
|
|
fft_selectDom.style.backgroundColor = 'rgba(28, 186, 148,0.4)'; // AUDIO COLOR
|
|
fft_selectDom.style.border = 'none'; // AUDIO COLOR
|
|
|
|
fft_Dom.append(fft_imgDom);
|
|
fft_Dom.append(fft_selectDom);
|
|
audioOptions.append(fft_Dom);
|
|
source_Dom.addEventListener('change', updateMappingOptions);
|
|
min_inputDom.addEventListener('change', updateMappingOptions);
|
|
max_inputDom.addEventListener('change', updateMappingOptions);
|
|
smoothing_inputDom.addEventListener('change', updateMappingOptions);
|
|
let setFrequency = false;
|
|
let freq_down = 0;
|
|
let freq_up = 0;
|
|
let xy_start;
|
|
fft_Dom.addEventListener('mousedown', (e) => {
|
|
setFrequency = true;
|
|
const bb = fft_imgDom.getBoundingClientRect();
|
|
const x = e.clientX - bb.x;
|
|
const y = e.clientY - bb.y;
|
|
xy_start = {x, y};
|
|
});
|
|
fft_Dom.addEventListener('mousemove', (e) => {
|
|
if (setFrequency) {
|
|
const bb = fft_imgDom.getBoundingClientRect();
|
|
const x_factor = config.audio.fftBandsUsed / bb.width;
|
|
const y_factor = 256.0 / bb.height;
|
|
const x = e.clientX - bb.x;
|
|
const y = e.clientY - bb.y;
|
|
let min_x, max_x, min_y, max_y;
|
|
if (x > xy_start.x) {
|
|
min_x = xy_start.x;
|
|
max_x = x;
|
|
} else {
|
|
min_x = x;
|
|
max_x = xy_start.x;
|
|
}
|
|
if (y > xy_start.y) {
|
|
min_y = xy_start.y;
|
|
max_y = y;
|
|
} else {
|
|
min_y = y;
|
|
max_y = xy_start.y;
|
|
}
|
|
mappingOptions.min_freq = min_x * x_factor;
|
|
mappingOptions.max_freq = max_x * x_factor;
|
|
mappingOptions.min_in = (bb.height - max_y) * y_factor;
|
|
mappingOptions.max_in = (bb.height - min_y) * y_factor;
|
|
}
|
|
});
|
|
const unset = (e) => {
|
|
setFrequency = false;
|
|
};
|
|
const unsetFromOutside = (e) => {
|
|
document.removeEventListener('mouseup', unsetFromOutside);
|
|
unset(e);
|
|
};
|
|
fft_Dom.addEventListener('mouseup', unset);
|
|
fft_Dom.addEventListener('mouseleave', (e) => {
|
|
if (setFrequency) {
|
|
document.addEventListener('mouseup', unsetFromOutside);
|
|
}
|
|
});
|
|
fft_Dom.addEventListener('mouseenter', (e) => {
|
|
if (setFrequency) {
|
|
document.removeEventListener('mouseup', unsetFromOutside);
|
|
}
|
|
});
|
|
|
|
container.after(audioOptions);
|
|
|
|
//canvass.push(fft_imgDom);
|
|
canvasCombos[propTitle] = [fft_imgDom, fft_imgDom.getContext("2d"), layer.id()];
|
|
updateMappingOptions();
|
|
return audioOptions;
|
|
};
|
|
|
|
const addAudioOptions = (layer, propTitle) => {
|
|
if (!started) {
|
|
// audioOptions need a started init
|
|
init();
|
|
}
|
|
const panelPropTitle = tp.getPanelPropTitle(propTitle);
|
|
if (panelPropTitle === null) {
|
|
console.log('Audio::addAudioOptions::error',`cannot find panelPropTitle "${propTitle}"`);
|
|
return;
|
|
}
|
|
if (tp.getPanel().querySelector(toCssClass(`audioOptions${propTitle}`, '.')) !== null) {
|
|
//console.log('Audio::addAudioOptions::error',`audioOptions already exist for "${propTitle}"`);
|
|
return;
|
|
}
|
|
const container = panelPropTitle.parentNode.parentNode;
|
|
|
|
if (propTitle === 'color' && config.audio.colorSeparateRGBA) {
|
|
// NOTE: attach reversed, because container.after(audioOptions)
|
|
createAudioOptions(layer, `${propTitle}.a`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.b`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.g`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
createAudioOptions(layer, `${propTitle}.r`, container).classList.add(toCssClass(`audioOptions${propTitle}`));
|
|
} else {
|
|
createAudioOptions(layer, propTitle, container);
|
|
}
|
|
|
|
const audioButton = container.querySelector('.audioButton');
|
|
audioButton.classList.add('active');
|
|
};
|
|
|
|
const removeAudioOptions = (layer = false, propTitle = false) => {
|
|
const panel = tp.getPanel();
|
|
if (!layer && !propTitle) {
|
|
const allAudioOptions = panel.querySelectorAll('.audioOptions');
|
|
if (allAudioOptions !== null) {
|
|
for (let i = 0; i < allAudioOptions.length; i++) {
|
|
allAudioOptions[i].remove();
|
|
}
|
|
}
|
|
canvasCombos = {};
|
|
panel.querySelectorAll('.audioButton').forEach((button) => {
|
|
button.classList.remove('active');
|
|
});
|
|
} else {
|
|
// only selected layers have options
|
|
// otherwise the ui is not there
|
|
if (layer.isSelected()) {
|
|
if (config.audio.colorSeparateRGBA && propTitle === 'color') {
|
|
delete canvasCombos['color.r'];
|
|
delete canvasCombos['color.g'];
|
|
delete canvasCombos['color.b'];
|
|
delete canvasCombos['color.a'];
|
|
} else {
|
|
delete canvasCombos[propTitle];
|
|
}
|
|
const audioOptions = panel.querySelectorAll(toCssClass(`audioOptions${propTitle}`,'.'));
|
|
if (audioOptions.length > 0) {
|
|
audioOptions.forEach((e) => { e.remove(); });
|
|
}
|
|
const audioButton = panel.querySelector(toCssClass(`audioButton${propTitle}`,'.'));
|
|
if (audioButton !== null) {
|
|
audioButton.classList.remove('active');
|
|
}
|
|
}
|
|
}
|
|
};
|
|
|
|
const addAudioButton = (layer, propTitle, isActive) => {
|
|
const panel = tp.getPanel();
|
|
const panelPropTitle = tp.getPanelPropTitle(propTitle);
|
|
if (panelPropTitle !== null) {
|
|
//const container = tp.getPanelPropContainer(panelPropTitle);
|
|
const container = panelPropTitle.parentNode.parentNode;
|
|
|
|
if (container === null) {
|
|
console.log("Audio::addAudioButton",
|
|
`impossible! cannot find panelPropContainer for ${propTitle}`);
|
|
} else if (container.querySelector('.audioButton') !== null) {
|
|
// this is super verbose, let's not log by default
|
|
//console.log("Audio::addAudioButton",
|
|
//`already added an audio button for ${propTitle}`);
|
|
} else {
|
|
const button = document.createElement('div');
|
|
button.classList.add('audioButton');
|
|
button.classList.add(toCssClass(`audioButton${propTitle}`));
|
|
button.innerHTML = `<img src="/web/assets/sound.svg" alt="audio" />`;
|
|
container.append(button);
|
|
button.addEventListener('click', () => {
|
|
if (!started) {
|
|
init();
|
|
}
|
|
if (!isMapped(layer, propTitle)) {
|
|
addAudioMapping(layer, propTitle);
|
|
addAudioOptions(layer, propTitle);
|
|
layer.updateValuesViaTheatre(false);
|
|
} else {
|
|
removeAudioMapping(layer, propTitle);
|
|
removeAudioOptions(layer, propTitle);
|
|
layer.updateValuesViaTheatre(true);
|
|
}
|
|
});
|
|
if (isActive) {
|
|
addAudioMapping(layer, propTitle);
|
|
addAudioOptions(layer, propTitle);
|
|
}
|
|
}
|
|
} else {
|
|
console.log("Audio::addAudioButton",
|
|
`cannot find panelPropTitle for ${propTitle}`);
|
|
}
|
|
};
|
|
|
|
const injectPanel = (layer) => {
|
|
const flatValues = clone(layer.theatreObject.value);
|
|
flattenObject(flatValues, ['color']);
|
|
const props = Object.keys(flatValues);
|
|
props.forEach((propTitle) => {
|
|
if (config.audio.ignoreProps.indexOf(propTitle) < 0) {
|
|
let isActive = false;
|
|
if (mapping.hasOwnProperty(layer.id())) {
|
|
if (mapping[layer.id()].hasOwnProperty(propTitle)) {
|
|
isActive = true;
|
|
}
|
|
}
|
|
addAudioButton(layer, propTitle, isActive);
|
|
}
|
|
});
|
|
};
|
|
const audioFileStuff = {};
|
|
const readAudioFiles = () => {
|
|
FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
|
|
if (file.indexOf('.') !== 0 && !audioFileStuff.hasOwnProperty(file)) {
|
|
const audioElement = document.createElement('audio');
|
|
audioElement.classList.add('invisible');
|
|
audioElement.classList.add('audio_file');
|
|
audioElement.classList.add(toCssClass(`audio_file${file}`));
|
|
document.querySelector('body').append(audioElement);
|
|
|
|
const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`);
|
|
let type = 'audio/wav';
|
|
const filesplit = file.split('.');
|
|
const extension = filesplit[filesplit.length - 1];
|
|
if (extension === 'wav') {
|
|
type = 'audio/wav';
|
|
} else if (extension === 'mp3') {
|
|
type = 'audio/mpeg';
|
|
} else if (extension === 'ogg') {
|
|
type = 'audio/ogg';
|
|
}
|
|
|
|
const src = URL.createObjectURL(
|
|
new Blob([arr], {
|
|
type
|
|
})
|
|
);
|
|
|
|
audioElement.src = src;
|
|
|
|
const source = audioCtx.createMediaElementSource(audioElement);
|
|
source.connect(audioCtx.destination);
|
|
const analyser = audioCtx.createAnalyser();
|
|
analyser.fftSize = config.audio.fftBandsAnalysed;
|
|
const bufferLength = analyser.frequencyBinCount;
|
|
const dataArray = new Uint8Array(bufferLength);
|
|
|
|
source.connect(analyser);
|
|
|
|
audioElement.play();
|
|
|
|
audioFileStuff[file] = {
|
|
dataArray,
|
|
analyser,
|
|
audioElement,
|
|
};
|
|
}
|
|
});
|
|
};
|
|
|
|
|
|
const init = () => {
|
|
if (!started) {
|
|
started = true;
|
|
if (audioCtx !== false && audioCtx.state === 'suspended') {
|
|
audioCtx.resume();
|
|
return;
|
|
}
|
|
heading.textContent = "Voice-change-O-matic";
|
|
//document.body.removeEventListener("click", init);
|
|
|
|
// Older browsers might not implement mediaDevices at all, so we set an empty object first
|
|
if (navigator.mediaDevices === undefined) {
|
|
navigator.mediaDevices = {};
|
|
}
|
|
|
|
// Some browsers partially implement mediaDevices. We can't assign an object
|
|
// with getUserMedia as it would overwrite existing properties.
|
|
// Add the getUserMedia property if it's missing.
|
|
if (navigator.mediaDevices.getUserMedia === undefined) {
|
|
navigator.mediaDevices.getUserMedia = function(constraints) {
|
|
// First get ahold of the legacy getUserMedia, if present
|
|
const getUserMedia =
|
|
navigator.webkitGetUserMedia ||
|
|
navigator.mozGetUserMedia ||
|
|
navigator.msGetUserMedia;
|
|
|
|
// Some browsers just don't implement it - return a rejected promise with an error
|
|
// to keep a consistent interface
|
|
if (!getUserMedia) {
|
|
return Promise.reject(
|
|
new Error("getUserMedia is not implemented in this browser")
|
|
);
|
|
}
|
|
|
|
// Otherwise, wrap the call to the old navigator.getUserMedia with a Promise
|
|
return new Promise(function(resolve, reject) {
|
|
getUserMedia.call(navigator, constraints, resolve, reject);
|
|
});
|
|
};
|
|
}
|
|
|
|
// Set up forked web audio context, for multiple browsers
|
|
// window. is needed otherwise Safari explodes
|
|
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
|
|
const voiceSelect = audioDom.querySelector("#voice");
|
|
let source;
|
|
let stream;
|
|
|
|
// Grab the mute button to use below
|
|
const mute = audioDom.querySelector(".mute");
|
|
|
|
// Set up the different audio nodes we will use for the app
|
|
const analyser = audioCtx.createAnalyser();
|
|
analyser.minDecibels = -90;
|
|
analyser.maxDecibels = -10;
|
|
analyser.smoothingTimeConstant = 0.85;
|
|
window.analyser = analyser;
|
|
|
|
readAudioFiles();
|
|
|
|
//const distortion = audioCtx.createWaveShaper();
|
|
//const gainNode = audioCtx.createGain();
|
|
//const biquadFilter = audioCtx.createBiquadFilter();
|
|
//const convolver = audioCtx.createConvolver();
|
|
|
|
//const echoDelay = createEchoDelayEffect(audioCtx);
|
|
|
|
// Distortion curve for the waveshaper, thanks to Kevin Ennis
|
|
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
|
|
const makeDistortionCurve = (amount) => {
|
|
let k = typeof amount === "number" ? amount : 50,
|
|
n_samples = 44100,
|
|
curve = new Float32Array(n_samples),
|
|
deg = Math.PI / 180,
|
|
i = 0,
|
|
x;
|
|
for (; i < n_samples; ++i) {
|
|
x = (i * 2) / n_samples - 1;
|
|
curve[i] = ((3 + k) * x * 20 * deg) / (Math.PI + k * Math.abs(x));
|
|
}
|
|
return curve;
|
|
}
|
|
|
|
// Grab audio track via XHR for convolver node
|
|
let soundSource;
|
|
const ajaxRequest = new XMLHttpRequest();
|
|
|
|
ajaxRequest.open(
|
|
"GET",
|
|
"https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg",
|
|
true
|
|
);
|
|
|
|
ajaxRequest.responseType = "arraybuffer";
|
|
|
|
ajaxRequest.onload = function() {
|
|
const audioData = ajaxRequest.response;
|
|
|
|
audioCtx.decodeAudioData(
|
|
audioData,
|
|
function(buffer) {
|
|
soundSource = audioCtx.createBufferSource();
|
|
},
|
|
function(e) {
|
|
console.log("Audio::audioCtx.decodeAudioData", "Error with decoding audio data" + e.err);
|
|
}
|
|
);
|
|
};
|
|
|
|
ajaxRequest.send();
|
|
|
|
// Set up canvas context for visualizer
|
|
const canvas = audioDom.querySelector(".visualizer");
|
|
const canvasCtx = canvas.getContext("2d");
|
|
|
|
const intendedWidth = audioDom.clientWidth;
|
|
canvas.setAttribute("width", config.audio.fftBandsUsed);
|
|
const visualSelect = audioDom.querySelector("#visual");
|
|
let drawVisual;
|
|
|
|
// Main block for doing the audio recording
|
|
if (navigator.mediaDevices.getUserMedia) {
|
|
console.log("getUserMedia supported.");
|
|
const constraints = {
|
|
audio: true
|
|
};
|
|
navigator.mediaDevices
|
|
.getUserMedia(constraints)
|
|
.then(function(stream) {
|
|
source = audioCtx.createMediaStreamSource(stream);
|
|
source.connect(analyser);
|
|
|
|
visualize();
|
|
voiceChange();
|
|
})
|
|
.catch(function(err) {
|
|
console.log("The following gUM error occured: " + err);
|
|
});
|
|
} else {
|
|
console.log("getUserMedia not supported on your browser!");
|
|
}
|
|
|
|
const visualize = () => {
|
|
const WIDTH = canvas.width;
|
|
const HEIGHT = canvas.height;
|
|
|
|
const visualSetting = visualSelect.value;
|
|
|
|
if (visualSetting === "sinewave") {
|
|
//analyser.fftSize = 2048;
|
|
//const bufferLength = analyser.fftSize;
|
|
|
|
//// We can use Float32Array instead of Uint8Array if we want higher precision
|
|
//// const dataArray = new Float32Array(bufferLength);
|
|
//const dataArray = new Uint8Array(bufferLength);
|
|
|
|
//canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
|
|
|
|
//const draw = function() {
|
|
//drawVisual = requestAnimationFrame(draw);
|
|
|
|
//analyser.getByteTimeDomainData(dataArray);
|
|
|
|
//canvasCtx.fillStyle = "rgb(200, 200, 200)";
|
|
//canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
|
|
//canvasCtx.lineWidth = 2;
|
|
//canvasCtx.strokeStyle = "rgb(0, 0, 0)";
|
|
|
|
//canvasCtx.beginPath();
|
|
|
|
//const sliceWidth = (WIDTH * 1.0) / bufferLength;
|
|
//let x = 0;
|
|
|
|
//for (let i = 0; i < bufferLength; i++) {
|
|
//let v = dataArray[i] / 128.0;
|
|
//let y = (v * HEIGHT) / 2;
|
|
|
|
//if (i === 0) {
|
|
//canvasCtx.moveTo(x, y);
|
|
//} else {
|
|
//canvasCtx.lineTo(x, y);
|
|
//}
|
|
|
|
//x += sliceWidth;
|
|
//}
|
|
|
|
//canvasCtx.lineTo(canvas.width, canvas.height / 2);
|
|
//canvasCtx.stroke();
|
|
//};
|
|
|
|
//draw();
|
|
} else if (visualSetting == "frequencybars") {
|
|
analyser.fftSize = config.audio.fftBandsAnalysed;
|
|
const w = config.audio.fftBandsUsed;
|
|
const h = config.audio.fftHeight;
|
|
const verticalFactor = h / 256.0;
|
|
const bufferLengthAlt = analyser.frequencyBinCount / 2;
|
|
|
|
// See comment above for Float32Array()
|
|
const dataArrayAlt = new Uint8Array(bufferLengthAlt);
|
|
|
|
let canvasKeys = Object.keys(canvasCombos);
|
|
|
|
for (let i = 0; i < canvasKeys.length; i++) {
|
|
canvasCombos[canvasKeys[i]][1].clearRect(0, 0, w, h);
|
|
}
|
|
|
|
let frameCount = 0;
|
|
const drawAlt = function() {
|
|
canvasKeys = Object.keys(canvasCombos);
|
|
drawVisual = requestAnimationFrame(drawAlt);
|
|
|
|
//analyser.getByteFrequencyData(dataArrayAlt);
|
|
//Object.keys(audioFileStuff).forEach((afs) => {
|
|
//afs.analyser.ByteFrequencyData(afs.dataArray);
|
|
//});
|
|
audioFileStuff['hito_steyerl_about_suicide_cameras.ogg'].analyser.getByteFrequencyData(dataArrayAlt);
|
|
|
|
for (let i = 0; i < canvasKeys.length; i++) {
|
|
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[i]][1].fillRect(0, 0, w, h);
|
|
const layerID = canvasCombos[canvasKeys[i]][2];
|
|
const m = mapping[layerID][canvasKeys[i]];
|
|
if (m.sync === 'volume') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = h - (m.max_in * verticalFactor);
|
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
|
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
|
|
} else if (m.sync === 'pitch') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = 0;
|
|
const sh = h;
|
|
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
|
|
}
|
|
}
|
|
|
|
const barWidth = 1;//(w / bufferLengthAlt) * 2.5;
|
|
let barHeight;
|
|
let x = 0;
|
|
|
|
let max_i = 0;
|
|
let max_ri = 0;
|
|
let total_v = 0;
|
|
let max_v = 0;
|
|
for (let k = 0; k < canvasKeys.length; k++) {
|
|
const layerID = canvasCombos[canvasKeys[k]][2];
|
|
const m = mapping[layerID][canvasKeys[k]];
|
|
m.max_v = max_v;
|
|
m.max_i = max_i;
|
|
m.max_ri = max_ri;
|
|
m.total_v = total_v;
|
|
}
|
|
for (let i = 0; i < w; i++) {
|
|
barHeight = dataArrayAlt[i];
|
|
total_v += barHeight;
|
|
max_ri = barHeight * i;
|
|
|
|
if (barHeight > max_v) {
|
|
max_v = barHeight;
|
|
max_i = i;
|
|
}
|
|
for (let k = 0; k < canvasKeys.length; k++) {
|
|
const layerID = canvasCombos[canvasKeys[k]][2];
|
|
const m = mapping[layerID][canvasKeys[k]];
|
|
let fillStyle = "rgb(200,200,200)"; // AUDIO COLOR
|
|
if (m.min_freq <= i && m.max_freq >= i) {
|
|
m.total_v += barHeight;
|
|
if (barHeight > m.max_v) {
|
|
m.max_v = barHeight;
|
|
m.max_i = i;
|
|
m.max_ri = barHeight * i;
|
|
}
|
|
fillStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
}
|
|
canvasCombos[canvasKeys[k]][1].fillStyle = fillStyle;
|
|
canvasCombos[canvasKeys[k]][1].fillRect(
|
|
x,
|
|
h - (barHeight * verticalFactor),
|
|
barWidth,
|
|
(barHeight * verticalFactor)
|
|
);
|
|
}
|
|
|
|
x += barWidth;
|
|
}
|
|
max_ri /= total_v;
|
|
for (let k = 0; k < canvasKeys.length; k++) {
|
|
const layerID = canvasCombos[canvasKeys[k]][2];
|
|
const m = mapping[layerID][canvasKeys[k]];
|
|
m.max_ri /= m.total_v;
|
|
if (m.sync === 'volume') {
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = h - (m.max_in * verticalFactor);
|
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
|
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
|
|
} else if (m.sync === 'pitch') {
|
|
const m = mapping[layerID][canvasKeys[k]];
|
|
const sx = m.min_freq;
|
|
const sw = m.max_freq - m.min_freq;
|
|
const sy = 0;
|
|
const sh = h;
|
|
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
|
|
}
|
|
}
|
|
const propsToSet = [];
|
|
getLayers().forEach((layer) => {
|
|
if (mapping.hasOwnProperty(layer.id())) {
|
|
Object.keys(mapping[layer.id()]).forEach((propTitle) => {
|
|
const m = mapping[layer.id()][propTitle];
|
|
switch (m.sync) {
|
|
case 'volume': {
|
|
let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
|
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
propsToSet.push({
|
|
layer,
|
|
id: layer.id(),
|
|
title: propTitle,
|
|
value: m.value,
|
|
});
|
|
break;
|
|
}
|
|
case 'pitch': {
|
|
const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : max_i;
|
|
const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : max_ri;
|
|
const fi = config.audio.pitchCombineFrequencies ? ri : mi;
|
|
let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
|
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
propsToSet.push({
|
|
layer,
|
|
id: layer.id(),
|
|
title: propTitle,
|
|
value: m.value,
|
|
});
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
if (m.letterDelay) {
|
|
const pt = `letterDelays.${propTitle}`;
|
|
propsToSet.push({
|
|
layer,
|
|
id: layer.id(),
|
|
title: pt,
|
|
value: m.letterDelay,
|
|
});
|
|
}
|
|
});
|
|
}
|
|
});
|
|
if (propsToSet.length > 0 && frameCount % 2 === 0) {
|
|
// this is when to monitor live
|
|
if (!record.isRecording()) {
|
|
//if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
|
|
let values = {};
|
|
propsToSet.forEach((p) => {
|
|
const newValues = {
|
|
[p.title]: p.value
|
|
};
|
|
if (!values.hasOwnProperty(p.id)) {
|
|
values[p.id] = {};
|
|
}
|
|
values[p.id] = {
|
|
...values[p.id],
|
|
...newValues,
|
|
};
|
|
});
|
|
Object.keys(values).forEach((layerID) => {
|
|
deFlattenObject(values[layerID]);
|
|
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
|
|
});
|
|
//}
|
|
} else {
|
|
const position = tp.sheet.sequence.position;
|
|
propsToSet.forEach((p) => {
|
|
const title = tp
|
|
.getPanelPropTitle(p.title);
|
|
|
|
if (title !== null) {
|
|
const inputElement = title
|
|
.parentNode.parentNode
|
|
.querySelector('input.recording');
|
|
|
|
if (inputElement !== null) {
|
|
inputElement.value = p.value;
|
|
inputElement.dispatchEvent(new Event('change'));
|
|
}
|
|
}
|
|
record.addValue(p.id, p.title, p.value, position);
|
|
if (p.title.indexOf('color') === 0) {
|
|
if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
|
|
record.liveUpdate(p.layer, position);
|
|
}
|
|
} else {
|
|
record.liveUpdate(p.layer, position);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
//const panel = tp.getPanel();
|
|
//const fft_images = panel.querySelectorAll('.audio_fft');
|
|
//if (fft_images !== null) {
|
|
//const src = canvas.toDataURL();
|
|
//fft_images.forEach((e) => {
|
|
//e.src = src;
|
|
//});
|
|
//}
|
|
frameCount++;
|
|
};
|
|
drawAlt();
|
|
} else if (visualSetting == "off") {
|
|
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
|
|
canvasCtx.fillStyle = "red";
|
|
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
}
|
|
}
|
|
|
|
const voiceChange = () => {
|
|
distortion.oversample = "4x";
|
|
biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0);
|
|
|
|
const voiceSetting = voiceSelect.value;
|
|
|
|
if (echoDelay.isApplied()) {
|
|
echoDelay.discard();
|
|
}
|
|
|
|
// When convolver is selected it is connected back into the audio path
|
|
if (voiceSetting == "convolver") {
|
|
biquadFilter.disconnect(0);
|
|
biquadFilter.connect(convolver);
|
|
} else {
|
|
biquadFilter.disconnect(0);
|
|
biquadFilter.connect(gainNode);
|
|
|
|
if (voiceSetting == "distortion") {
|
|
distortion.curve = makeDistortionCurve(400);
|
|
} else if (voiceSetting == "biquad") {
|
|
biquadFilter.type = "lowshelf";
|
|
biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0);
|
|
biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0);
|
|
} else if (voiceSetting == "delay") {
|
|
echoDelay.apply();
|
|
} else if (voiceSetting == "off") {
|
|
console.log("Voice settings turned off");
|
|
}
|
|
}
|
|
}
|
|
|
|
function createEchoDelayEffect(audioContext) {
|
|
const delay = audioContext.createDelay(1);
|
|
const dryNode = audioContext.createGain();
|
|
const wetNode = audioContext.createGain();
|
|
const mixer = audioContext.createGain();
|
|
const filter = audioContext.createBiquadFilter();
|
|
|
|
delay.delayTime.value = 0.75;
|
|
dryNode.gain.value = 1;
|
|
wetNode.gain.value = 0;
|
|
filter.frequency.value = 1100;
|
|
filter.type = "highpass";
|
|
|
|
return {
|
|
apply: function() {
|
|
wetNode.gain.setValueAtTime(0.75, audioContext.currentTime);
|
|
},
|
|
discard: function() {
|
|
wetNode.gain.setValueAtTime(0, audioContext.currentTime);
|
|
},
|
|
isApplied: function() {
|
|
return wetNode.gain.value > 0;
|
|
},
|
|
placeBetween: function(inputNode, outputNode) {
|
|
inputNode.connect(delay);
|
|
delay.connect(wetNode);
|
|
wetNode.connect(filter);
|
|
filter.connect(delay);
|
|
|
|
inputNode.connect(dryNode);
|
|
dryNode.connect(mixer);
|
|
wetNode.connect(mixer);
|
|
mixer.connect(outputNode);
|
|
},
|
|
};
|
|
}
|
|
|
|
// Event listeners to change visualize and voice settings
|
|
visualSelect.onchange = function() {
|
|
window.cancelAnimationFrame(drawVisual);
|
|
visualize();
|
|
};
|
|
|
|
voiceSelect.onchange = function() {
|
|
voiceChange();
|
|
};
|
|
|
|
mute.onclick = voiceMute;
|
|
|
|
let previousGain;
|
|
|
|
function voiceMute() {
|
|
if (mute.id === "") {
|
|
previousGain = gainNode.gain.value;
|
|
gainNode.gain.value = 0;
|
|
mute.id = "activated";
|
|
mute.innerHTML = "Unmute";
|
|
} else {
|
|
gainNode.gain.value = previousGain;
|
|
mute.id = "";
|
|
mute.innerHTML = "Mute";
|
|
}
|
|
}
|
|
}
|
|
}
|
|
const deinit = () => {
|
|
if (started) {
|
|
if (audioCtx !== false) {
|
|
audioCtx.suspend();
|
|
}
|
|
started = false;
|
|
}
|
|
};
|
|
|
|
this.getContext = () => {
|
|
return audioCtx;
|
|
};
|
|
this.init = init;
|
|
this.deinit = deinit;
|
|
this.injectPanel = injectPanel;
|
|
this.mapping = mapping;
|
|
this.addAudioMapping = addAudioMapping;
|
|
this.removeAudioMapping = removeAudioMapping;
|
|
this.addAudioOptions = addAudioOptions;
|
|
this.removeAudioOptions = removeAudioOptions;
|
|
this.AudioMappingOptions = AudioMappingOptions;
|
|
|
|
// debug
|
|
this.canvasCombos = canvasCombos;
|
|
};
|
|
|
|
export {
|
|
Audio
|
|
}
|