Compare commits
9 commits
523eb8c7f8
...
0ca73bca05
Author | SHA1 | Date | |
---|---|---|---|
0ca73bca05 | |||
748af243fa | |||
16124c755d | |||
e6e705f86f | |||
5860343f70 | |||
b46d6bb5d4 | |||
6aba91b6ca | |||
64af8d49d1 | |||
62f03862d6 |
9 changed files with 609 additions and 367 deletions
|
@ -290,7 +290,9 @@
|
||||||
<!--<div class="move">move</div>-->
|
<!--<div class="move">move</div>-->
|
||||||
<!--</div>-->
|
<!--</div>-->
|
||||||
<button id="midi_open">midi</button>
|
<button id="midi_open">midi</button>
|
||||||
|
<button id="hide_ui">hide ui</button>
|
||||||
<button id="exporter_open">export</button>
|
<button id="exporter_open">export</button>
|
||||||
|
<button id="upload_audio">upload audiofile</button>
|
||||||
<button id="save_project" onclick="window.tp.downloadProject()">save project</button>
|
<button id="save_project" onclick="window.tp.downloadProject()">save project</button>
|
||||||
<button id="open_project" onclick="window.tp.uploadProject(true)">open project</button>
|
<button id="open_project" onclick="window.tp.uploadProject(true)">open project</button>
|
||||||
<button id="start_new_project" onclick="window.tp.startNewProject()">start new project</button>
|
<button id="start_new_project" onclick="window.tp.startNewProject()">start new project</button>
|
||||||
|
@ -359,6 +361,12 @@
|
||||||
<div class="details"><p></p></div>
|
<div class="details"><p></p></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="notice_recording">
|
||||||
|
<div class="content">
|
||||||
|
<div class="what"><p>recording</p></div>
|
||||||
|
<div class="details"><p>please wait</p></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<!-- MIDI BEGIN -->
|
<!-- MIDI BEGIN -->
|
||||||
<div id="midiController">
|
<div id="midiController">
|
||||||
<div class="midiMessages"></div>
|
<div class="midiMessages"></div>
|
||||||
|
|
|
@ -223,6 +223,29 @@ body.debug div:not(.centerLine) {
|
||||||
#notice .content .details p {
|
#notice .content .details p {
|
||||||
color: black;
|
color: black;
|
||||||
}
|
}
|
||||||
|
#notice_recording {
|
||||||
|
position: fixed;
|
||||||
|
top: 0px;
|
||||||
|
left: 0px;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0,0,0,0.1);
|
||||||
|
z-index: 2000;
|
||||||
|
display: none;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
font-family: "Tonka";
|
||||||
|
font-variation-settings: 'wght' 500;
|
||||||
|
font-size: 0.8em;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
#notice_recording.visible {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
#notice_recording.impenetrable {
|
||||||
|
pointer-events: all;
|
||||||
|
background-color: rgba(0,0,0,0.5);
|
||||||
|
}
|
||||||
|
|
||||||
.exporterChild * {
|
.exporterChild * {
|
||||||
font-family: "Tonka";
|
font-family: "Tonka";
|
||||||
|
@ -941,3 +964,6 @@ h4{
|
||||||
margin-bottom: -3px;
|
margin-bottom: -3px;
|
||||||
box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.7), 0 3px 4px rgba(0, 0, 0, 0.7);
|
box-shadow: 0 -2px 4px rgba(0, 0, 0, 0.7), 0 3px 4px rgba(0, 0, 0, 0.7);
|
||||||
}
|
}
|
||||||
|
.invisible {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
|
@ -14,12 +14,14 @@ const AudioMappingOptions = function() {
|
||||||
this.min_freq = 0.0;
|
this.min_freq = 0.0;
|
||||||
this.max_freq = config.audio.fftBandsUsed;
|
this.max_freq = config.audio.fftBandsUsed;
|
||||||
this.min_in = 0.0;
|
this.min_in = 0.0;
|
||||||
this.max_in = 255.0 / 2;
|
this.max_in = 255.0;
|
||||||
this.min_out = 0.0;
|
this.min_out = 0.0;
|
||||||
this.max_out = 1.0;
|
this.max_out = 1.0;
|
||||||
this.smoothing = config.audio.defaultSmoothing;
|
this.smoothing = config.audio.defaultSmoothing;
|
||||||
this.sync = 'volume';
|
this.sync = 'volume';
|
||||||
|
this.source = 'microphone';
|
||||||
this.value = 0.0;
|
this.value = 0.0;
|
||||||
|
this.muted = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
const Audio = function(tp, record) {
|
const Audio = function(tp, record) {
|
||||||
|
@ -30,7 +32,7 @@ const Audio = function(tp, record) {
|
||||||
heading.textContent = "CLICK HERE TO START";
|
heading.textContent = "CLICK HERE TO START";
|
||||||
|
|
||||||
// an array of possible sync options.
|
// an array of possible sync options.
|
||||||
const audio_sync_options = ['volume', 'pitch', 'frequency'];
|
const audio_sync_options = ['volume', 'pitch', 'clarity'];
|
||||||
// could also be an enum
|
// could also be an enum
|
||||||
// like that
|
// like that
|
||||||
//const AudioSyncOptions = Object.freeze({
|
//const AudioSyncOptions = Object.freeze({
|
||||||
|
@ -50,16 +52,20 @@ const Audio = function(tp, record) {
|
||||||
const mutationObserver = new MutationObserver(function(e) {
|
const mutationObserver = new MutationObserver(function(e) {
|
||||||
if (e[0].removedNodes) {
|
if (e[0].removedNodes) {
|
||||||
e[0].removedNodes.forEach((n) => {
|
e[0].removedNodes.forEach((n) => {
|
||||||
if (n.hasAttribute('data-propTitle')) {
|
if (typeof n === 'object' &&
|
||||||
const propTitle = n.getAttribute('data-propTitle');
|
n.hasOwnProperty('hasAttribute') &&
|
||||||
delete canvasCombos[propTitle];
|
n.hasOwnProperty('querySelectorAll')) {
|
||||||
} else {
|
if (n.hasAttribute('data-propTitle')) {
|
||||||
const subProps = n.querySelectorAll('[data-propTitle]');
|
const propTitle = n.getAttribute('data-propTitle');
|
||||||
if (subProps.length > 0) {
|
delete canvasCombos[propTitle];
|
||||||
subProps.forEach((sp) => {
|
} else {
|
||||||
const propTitle = sp.getAttribute('data-propTitle');
|
const subProps = n.querySelectorAll('[data-propTitle]');
|
||||||
delete canvasCombos[propTitle];
|
if (subProps.length > 0) {
|
||||||
});
|
subProps.forEach((sp) => {
|
||||||
|
const propTitle = sp.getAttribute('data-propTitle');
|
||||||
|
delete canvasCombos[propTitle];
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -85,24 +91,68 @@ const Audio = function(tp, record) {
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const getDefaultRange = (layer, propTitle) => {
|
||||||
|
if (config.audio.defaultRange.hasOwnProperty(propTitle)) {
|
||||||
|
return config.audio.defaultRange[propTitle];
|
||||||
|
} else if (propTitle.indexOf('width') === 0) {
|
||||||
|
return [
|
||||||
|
getArtboard().theatreObject.value.width / 2,
|
||||||
|
getArtboard().theatreObject.value.width
|
||||||
|
];
|
||||||
|
} else if (propTitle.indexOf('y') === 0) {
|
||||||
|
return [
|
||||||
|
0,
|
||||||
|
getArtboard().theatreObject.value.height / 2
|
||||||
|
];
|
||||||
|
} else if (propTitle.indexOf('x') === 0) {
|
||||||
|
return [
|
||||||
|
0,
|
||||||
|
getArtboard().theatreObject.value.width / 2
|
||||||
|
];
|
||||||
|
} else if (propTitle.indexOf('y') === 0) {
|
||||||
|
return [
|
||||||
|
0,
|
||||||
|
getArtboard().theatreObject.value.height / 2
|
||||||
|
];
|
||||||
|
} else if (propTitle.indexOf('letterDelay') === 0) {
|
||||||
|
return [
|
||||||
|
config.audio.defaultRange.letterDelay[0],
|
||||||
|
config.audio.defaultRange.letterDelay[1]
|
||||||
|
];
|
||||||
|
} else if (propTitle.split('.')[0] === 'fontVariationAxes') {
|
||||||
|
return layer.props.fontVariationAxes
|
||||||
|
.props[propTitle.split('.')[1]].range;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const getAudioMappingOptions = (layer, propTitle) => {
|
const getAudioMappingOptions = (layer, propTitle) => {
|
||||||
if (propTitle === 'color') {
|
if (propTitle === 'color') {
|
||||||
|
const mm = getDefaultRange(layer, 'color');
|
||||||
if (config.audio.colorSeparateRGBA) {
|
if (config.audio.colorSeparateRGBA) {
|
||||||
const r = new AudioMappingOptions();
|
const r = new AudioMappingOptions();
|
||||||
|
r.min_out = mm[0];
|
||||||
|
r.max_out = mm[1];
|
||||||
const g = new AudioMappingOptions();
|
const g = new AudioMappingOptions();
|
||||||
|
g.min_out = mm[0];
|
||||||
|
g.max_out = mm[1];
|
||||||
const b = new AudioMappingOptions();
|
const b = new AudioMappingOptions();
|
||||||
|
b.min_out = mm[0];
|
||||||
|
b.max_out = mm[1];
|
||||||
const a = new AudioMappingOptions();
|
const a = new AudioMappingOptions();
|
||||||
|
a.min_out = 1.0; // NOTE: dirty, dirty
|
||||||
|
a.max_out = 1.0; // hardcoded value, you
|
||||||
return [{r}, {g}, {b}, {a}];
|
return [{r}, {g}, {b}, {a}];
|
||||||
} else {
|
} else {
|
||||||
const rgba = new AudioMappingOptions();
|
const o = new AudioMappingOptions();
|
||||||
rgba.min_out = {r: 0, b: 0, g: 0, a: 0};
|
o.min_out = {r: mm[0], b: mm[0], g: mm[0], a: mm[0]};
|
||||||
rgba.max_out = {r: 1, b: 1, g: 1, a: 1};
|
o.max_out = {r: mm[1], b: mm[1], g: mm[1], a: mm[1]};
|
||||||
return rgba;
|
return o;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const o = new AudioMappingOptions();
|
const o = new AudioMappingOptions();
|
||||||
// TODO: get min_out, max_out from layer.props
|
const mm = getDefaultRange(layer, propTitle);
|
||||||
// check for typeof layer.props[propTitle.split('.')[0]] blabla
|
o.min_out = mm[0];
|
||||||
|
o.max_out = mm[1];
|
||||||
return o;
|
return o;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -169,6 +219,11 @@ const Audio = function(tp, record) {
|
||||||
|
|
||||||
const createAudioOptions = (layer, propTitle, container) => {
|
const createAudioOptions = (layer, propTitle, container) => {
|
||||||
const mappingOptions = mapping[layer.id()][propTitle];
|
const mappingOptions = mapping[layer.id()][propTitle];
|
||||||
|
let hasLetterDelay = //false;
|
||||||
|
config
|
||||||
|
.layer.letterDelayProps
|
||||||
|
.indexOf(propTitle.split('.')[0]) >= 0 && propTitle.indexOf('color') < 0;
|
||||||
|
//&& tp.isSequenced([...[layer.id()], ...propTitle.split('.')]);
|
||||||
const panel = tp.getPanel();
|
const panel = tp.getPanel();
|
||||||
if (!areMutationsObserved) {
|
if (!areMutationsObserved) {
|
||||||
mutationObserver.observe(panel, { childList: true, subtree: true });
|
mutationObserver.observe(panel, { childList: true, subtree: true });
|
||||||
|
@ -214,8 +269,46 @@ const Audio = function(tp, record) {
|
||||||
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
|
panel.querySelector(`input[name="${toCssClass('audio_sync' + propTitle)}"]:checked`).value;
|
||||||
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
|
const s = panel.querySelector(toCssClass(`audio_smoothing${propTitle}`,'#')).value;
|
||||||
mappingOptions.smoothing = parseFloat(s);
|
mappingOptions.smoothing = parseFloat(s);
|
||||||
|
if (hasLetterDelay) {
|
||||||
|
const ld = panel.querySelector(toCssClass(`audio_letterDelay${propTitle}`,'#'));
|
||||||
|
mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
|
||||||
|
}
|
||||||
|
mappingOptions.source = panel.querySelector(toCssClass(`audio_source${propTitle}`,'#')).value;
|
||||||
|
mappingOptions.muted = panel.querySelector(toCssClass(`audio_mute${propTitle}`,'#')).checked;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const source_Dom = document.createElement('select');
|
||||||
|
source_Dom.id = toCssClass(`audio_source${propTitle}`);
|
||||||
|
const source_mic = document.createElement('option');
|
||||||
|
source_mic.value = 'microphone';
|
||||||
|
source_mic.innerHTML = 'microphone';
|
||||||
|
source_Dom.append(source_mic);
|
||||||
|
FS.readdir(config.fs.idbfsAudioDir)
|
||||||
|
.forEach((file) => {
|
||||||
|
if (file[0] !== '.') {
|
||||||
|
const source_file = document.createElement('option');
|
||||||
|
source_file.value = file;
|
||||||
|
if (file.length > config.audio.maxFilenameLength) {
|
||||||
|
source_file.innerHTML = file.substr(0,6) + '..' + file.substr(file.length - 6, 6);
|
||||||
|
} else {
|
||||||
|
source_file.innerHTML = file;
|
||||||
|
}
|
||||||
|
source_Dom.append(source_file);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
audioOptions.append(source_Dom);
|
||||||
|
|
||||||
|
const muteDom = document.createElement('input');
|
||||||
|
const muteDom_label = document.createElement('label');
|
||||||
|
muteDom.id = toCssClass(`audio_mute${propTitle}`);
|
||||||
|
muteDom.name = toCssClass(`audio_mute${propTitle}`);
|
||||||
|
muteDom.type = 'checkbox';
|
||||||
|
muteDom.checked = true;
|
||||||
|
muteDom_label.for = toCssClass(`audio_mute${propTitle}`);
|
||||||
|
muteDom_label.innerHTML = 'muted';
|
||||||
|
audioOptions.append(muteDom);
|
||||||
|
audioOptions.append(muteDom_label);
|
||||||
|
|
||||||
const min_max_Dom = document.createElement('div');
|
const min_max_Dom = document.createElement('div');
|
||||||
min_max_Dom.classList.add('audio_min_max');
|
min_max_Dom.classList.add('audio_min_max');
|
||||||
const min_Cont = document.createElement('div');
|
const min_Cont = document.createElement('div');
|
||||||
|
@ -240,7 +333,7 @@ const Audio = function(tp, record) {
|
||||||
max_inputDom.value = `${mappingOptions.max_out}`;
|
max_inputDom.value = `${mappingOptions.max_out}`;
|
||||||
const smoothing_inputDom_label = document.createElement('label');
|
const smoothing_inputDom_label = document.createElement('label');
|
||||||
smoothing_inputDom_label.for = 'audio_smoothing';
|
smoothing_inputDom_label.for = 'audio_smoothing';
|
||||||
smoothing_inputDom_label.innerHTML = 'audio smoothing ';
|
smoothing_inputDom_label.innerHTML = 'audio smoothing';
|
||||||
const smoothing_inputDom = document.createElement('input');
|
const smoothing_inputDom = document.createElement('input');
|
||||||
smoothing_inputDom.type = 'number';
|
smoothing_inputDom.type = 'number';
|
||||||
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
|
smoothing_inputDom.name = toCssClass(`audio_smoothing${propTitle}`);
|
||||||
|
@ -257,6 +350,23 @@ const Audio = function(tp, record) {
|
||||||
min_max_Dom.append(max_Cont);
|
min_max_Dom.append(max_Cont);
|
||||||
max_Cont.append(max_inputDom_label);
|
max_Cont.append(max_inputDom_label);
|
||||||
max_Cont.append(max_inputDom);
|
max_Cont.append(max_inputDom);
|
||||||
|
if (hasLetterDelay) {
|
||||||
|
const letterDelayCont = document.createElement('div');
|
||||||
|
const letterDelay_inputDom_label = document.createElement('label');
|
||||||
|
letterDelay_inputDom_label.for = 'audio_letterDelay';
|
||||||
|
letterDelay_inputDom_label.innerHTML = 'letterDelay';
|
||||||
|
const letterDelay_inputDom = document.createElement('input');
|
||||||
|
letterDelay_inputDom.type = 'number';
|
||||||
|
letterDelay_inputDom.name = toCssClass(`audio_letterDelay${propTitle}`);
|
||||||
|
letterDelay_inputDom.id = toCssClass(`audio_letterDelay${propTitle}`);
|
||||||
|
letterDelay_inputDom.value = 0;
|
||||||
|
letterDelay_inputDom.min = 0;
|
||||||
|
letterDelay_inputDom.step = 1;
|
||||||
|
letterDelayCont.append(letterDelay_inputDom_label);
|
||||||
|
letterDelayCont.append(letterDelay_inputDom);
|
||||||
|
min_max_Dom.append(letterDelayCont);
|
||||||
|
letterDelay_inputDom.addEventListener('change', updateMappingOptions);
|
||||||
|
}
|
||||||
audioOptions.append(min_max_Dom);
|
audioOptions.append(min_max_Dom);
|
||||||
|
|
||||||
const sync_Dom = document.createElement('div');
|
const sync_Dom = document.createElement('div');
|
||||||
|
@ -266,7 +376,6 @@ const Audio = function(tp, record) {
|
||||||
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
|
sync_titleDom_Cont.classList.add('sync_titleDom_Cont');
|
||||||
sync_titleDom.innerHTML = 'sync with:';
|
sync_titleDom.innerHTML = 'sync with:';
|
||||||
sync_Dom.append(sync_titleDom);
|
sync_Dom.append(sync_titleDom);
|
||||||
|
|
||||||
|
|
||||||
audio_sync_options.forEach((o) => {
|
audio_sync_options.forEach((o) => {
|
||||||
const sync_inputDom_Cont = document.createElement('div');
|
const sync_inputDom_Cont = document.createElement('div');
|
||||||
|
@ -319,6 +428,8 @@ const Audio = function(tp, record) {
|
||||||
fft_Dom.append(fft_imgDom);
|
fft_Dom.append(fft_imgDom);
|
||||||
fft_Dom.append(fft_selectDom);
|
fft_Dom.append(fft_selectDom);
|
||||||
audioOptions.append(fft_Dom);
|
audioOptions.append(fft_Dom);
|
||||||
|
source_Dom.addEventListener('change', updateMappingOptions);
|
||||||
|
muteDom.addEventListener('change', updateMappingOptions);
|
||||||
min_inputDom.addEventListener('change', updateMappingOptions);
|
min_inputDom.addEventListener('change', updateMappingOptions);
|
||||||
max_inputDom.addEventListener('change', updateMappingOptions);
|
max_inputDom.addEventListener('change', updateMappingOptions);
|
||||||
smoothing_inputDom.addEventListener('change', updateMappingOptions);
|
smoothing_inputDom.addEventListener('change', updateMappingOptions);
|
||||||
|
@ -361,12 +472,24 @@ const Audio = function(tp, record) {
|
||||||
mappingOptions.max_in = (bb.height - min_y) * y_factor;
|
mappingOptions.max_in = (bb.height - min_y) * y_factor;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
fft_Dom.addEventListener('mouseup', (e) => {
|
const unset = (e) => {
|
||||||
setFrequency = false;
|
setFrequency = false;
|
||||||
|
};
|
||||||
|
const unsetFromOutside = (e) => {
|
||||||
|
document.removeEventListener('mouseup', unsetFromOutside);
|
||||||
|
unset(e);
|
||||||
|
};
|
||||||
|
fft_Dom.addEventListener('mouseup', unset);
|
||||||
|
fft_Dom.addEventListener('mouseleave', (e) => {
|
||||||
|
if (setFrequency) {
|
||||||
|
document.addEventListener('mouseup', unsetFromOutside);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
fft_Dom.addEventListener('mouseenter', (e) => {
|
||||||
|
if (setFrequency) {
|
||||||
|
document.removeEventListener('mouseup', unsetFromOutside);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
//fft_Dom.addEventListener('mouseout', (e) => {
|
|
||||||
//setFrequency = false;
|
|
||||||
//});
|
|
||||||
|
|
||||||
container.after(audioOptions);
|
container.after(audioOptions);
|
||||||
|
|
||||||
|
@ -470,9 +593,11 @@ const Audio = function(tp, record) {
|
||||||
if (!isMapped(layer, propTitle)) {
|
if (!isMapped(layer, propTitle)) {
|
||||||
addAudioMapping(layer, propTitle);
|
addAudioMapping(layer, propTitle);
|
||||||
addAudioOptions(layer, propTitle);
|
addAudioOptions(layer, propTitle);
|
||||||
|
layer.updateValuesViaTheatre(false);
|
||||||
} else {
|
} else {
|
||||||
removeAudioMapping(layer, propTitle);
|
removeAudioMapping(layer, propTitle);
|
||||||
removeAudioOptions(layer, propTitle);
|
removeAudioOptions(layer, propTitle);
|
||||||
|
layer.updateValuesViaTheatre(true);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
|
@ -502,6 +627,62 @@ const Audio = function(tp, record) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
const audioSourceCombos = {};
|
||||||
|
const readAudioFiles = () => {
|
||||||
|
FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
|
||||||
|
if (file.indexOf('.') !== 0 && !audioSourceCombos.hasOwnProperty(file)) {
|
||||||
|
const audioElement = document.createElement('audio');
|
||||||
|
audioElement.classList.add('invisible');
|
||||||
|
audioElement.classList.add('audio_file');
|
||||||
|
audioElement.classList.add(toCssClass(`audio_file${file}`));
|
||||||
|
document.querySelector('body').append(audioElement);
|
||||||
|
|
||||||
|
const arr = FS.readFile(`${config.fs.idbfsAudioDir}/${file}`);
|
||||||
|
let type = 'audio/wav';
|
||||||
|
const filesplit = file.split('.');
|
||||||
|
const extension = filesplit[filesplit.length - 1];
|
||||||
|
if (extension === 'wav') {
|
||||||
|
type = 'audio/wav';
|
||||||
|
} else if (extension === 'mp3') {
|
||||||
|
type = 'audio/mpeg';
|
||||||
|
} else if (extension === 'ogg') {
|
||||||
|
type = 'audio/ogg';
|
||||||
|
}
|
||||||
|
|
||||||
|
const src = URL.createObjectURL(
|
||||||
|
new Blob([arr], {
|
||||||
|
type
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
audioElement.src = src;
|
||||||
|
audioElement.loop = true;
|
||||||
|
|
||||||
|
const source = audioCtx.createMediaElementSource(audioElement);
|
||||||
|
const gain = audioCtx.createGain();
|
||||||
|
gain.gain.value = 0;
|
||||||
|
source.connect(gain);
|
||||||
|
gain.connect(audioCtx.destination);
|
||||||
|
//source.connect(audioCtx.destination);
|
||||||
|
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
|
||||||
|
const bufferLength = analyser.frequencyBinCount / 2;
|
||||||
|
const dataArray = new Uint8Array(bufferLength);
|
||||||
|
|
||||||
|
source.connect(analyser);
|
||||||
|
|
||||||
|
audioElement.play();
|
||||||
|
|
||||||
|
audioSourceCombos[file] = {
|
||||||
|
gain,
|
||||||
|
source,
|
||||||
|
dataArray,
|
||||||
|
analyser,
|
||||||
|
audioElement,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
const init = () => {
|
const init = () => {
|
||||||
if (!started) {
|
if (!started) {
|
||||||
|
@ -548,25 +729,31 @@ const Audio = function(tp, record) {
|
||||||
// window. is needed otherwise Safari explodes
|
// window. is needed otherwise Safari explodes
|
||||||
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
|
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
|
||||||
const voiceSelect = audioDom.querySelector("#voice");
|
const voiceSelect = audioDom.querySelector("#voice");
|
||||||
let source;
|
|
||||||
let stream;
|
|
||||||
|
|
||||||
// Grab the mute button to use below
|
// Grab the mute button to use below
|
||||||
const mute = audioDom.querySelector(".mute");
|
const mute = audioDom.querySelector(".mute");
|
||||||
|
|
||||||
// Set up the different audio nodes we will use for the app
|
// Set up the different audio nodes we will use for the app
|
||||||
const analyser = audioCtx.createAnalyser();
|
{
|
||||||
analyser.minDecibels = -90;
|
const analyser = new AnalyserNode(audioCtx, config.audio.analyser);
|
||||||
analyser.maxDecibels = -10;
|
const bufferLength = analyser.frequencyBinCount / 2;
|
||||||
analyser.smoothingTimeConstant = 0.85;
|
|
||||||
window.analyser = analyser;
|
|
||||||
|
|
||||||
const distortion = audioCtx.createWaveShaper();
|
audioSourceCombos['microphone'] = {
|
||||||
const gainNode = audioCtx.createGain();
|
// source: see below when we actually get the microphone
|
||||||
const biquadFilter = audioCtx.createBiquadFilter();
|
analyser,
|
||||||
const convolver = audioCtx.createConvolver();
|
dataArray: new Uint8Array(bufferLength),
|
||||||
|
audioElement: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const echoDelay = createEchoDelayEffect(audioCtx);
|
readAudioFiles();
|
||||||
|
|
||||||
|
//const distortion = audioCtx.createWaveShaper();
|
||||||
|
//const gainNode = audioCtx.createGain();
|
||||||
|
//const biquadFilter = audioCtx.createBiquadFilter();
|
||||||
|
//const convolver = audioCtx.createConvolver();
|
||||||
|
|
||||||
|
//const echoDelay = createEchoDelayEffect(audioCtx);
|
||||||
|
|
||||||
// Distortion curve for the waveshaper, thanks to Kevin Ennis
|
// Distortion curve for the waveshaper, thanks to Kevin Ennis
|
||||||
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
|
// http://stackoverflow.com/questions/22312841/waveshaper-node-in-webaudio-how-to-emulate-distortion
|
||||||
|
@ -584,35 +771,6 @@ const Audio = function(tp, record) {
|
||||||
return curve;
|
return curve;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Grab audio track via XHR for convolver node
|
|
||||||
let soundSource;
|
|
||||||
const ajaxRequest = new XMLHttpRequest();
|
|
||||||
|
|
||||||
ajaxRequest.open(
|
|
||||||
"GET",
|
|
||||||
"https://mdn.github.io/voice-change-o-matic/audio/concert-crowd.ogg",
|
|
||||||
true
|
|
||||||
);
|
|
||||||
|
|
||||||
ajaxRequest.responseType = "arraybuffer";
|
|
||||||
|
|
||||||
ajaxRequest.onload = function() {
|
|
||||||
const audioData = ajaxRequest.response;
|
|
||||||
|
|
||||||
audioCtx.decodeAudioData(
|
|
||||||
audioData,
|
|
||||||
function(buffer) {
|
|
||||||
soundSource = audioCtx.createBufferSource();
|
|
||||||
convolver.buffer = buffer;
|
|
||||||
},
|
|
||||||
function(e) {
|
|
||||||
console.log("Audio::audioCtx.decodeAudioData", "Error with decoding audio data" + e.err);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
ajaxRequest.send();
|
|
||||||
|
|
||||||
// Set up canvas context for visualizer
|
// Set up canvas context for visualizer
|
||||||
const canvas = audioDom.querySelector(".visualizer");
|
const canvas = audioDom.querySelector(".visualizer");
|
||||||
const canvasCtx = canvas.getContext("2d");
|
const canvasCtx = canvas.getContext("2d");
|
||||||
|
@ -621,6 +779,7 @@ const Audio = function(tp, record) {
|
||||||
canvas.setAttribute("width", config.audio.fftBandsUsed);
|
canvas.setAttribute("width", config.audio.fftBandsUsed);
|
||||||
const visualSelect = audioDom.querySelector("#visual");
|
const visualSelect = audioDom.querySelector("#visual");
|
||||||
let drawVisual;
|
let drawVisual;
|
||||||
|
let previousPosition = -1;
|
||||||
|
|
||||||
// Main block for doing the audio recording
|
// Main block for doing the audio recording
|
||||||
if (navigator.mediaDevices.getUserMedia) {
|
if (navigator.mediaDevices.getUserMedia) {
|
||||||
|
@ -631,16 +790,16 @@ const Audio = function(tp, record) {
|
||||||
navigator.mediaDevices
|
navigator.mediaDevices
|
||||||
.getUserMedia(constraints)
|
.getUserMedia(constraints)
|
||||||
.then(function(stream) {
|
.then(function(stream) {
|
||||||
source = audioCtx.createMediaStreamSource(stream);
|
const source = audioCtx.createMediaStreamSource(stream);
|
||||||
source.connect(distortion);
|
const gain = audioCtx.createGain();
|
||||||
distortion.connect(biquadFilter);
|
gain.gain.value = 0;
|
||||||
biquadFilter.connect(gainNode);
|
source.connect(gain);
|
||||||
convolver.connect(gainNode);
|
gain.connect(audioCtx.destination);
|
||||||
echoDelay.placeBetween(gainNode, analyser);
|
source.connect(audioSourceCombos['microphone'].analyser);
|
||||||
analyser.connect(audioCtx.destination);
|
audioSourceCombos['microphone'].source = source;
|
||||||
|
audioSourceCombos['microphone'].gain = gain;
|
||||||
|
|
||||||
visualize();
|
visualize();
|
||||||
voiceChange();
|
|
||||||
})
|
})
|
||||||
.catch(function(err) {
|
.catch(function(err) {
|
||||||
console.log("The following gUM error occured: " + err);
|
console.log("The following gUM error occured: " + err);
|
||||||
|
@ -650,65 +809,13 @@ const Audio = function(tp, record) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const visualize = () => {
|
const visualize = () => {
|
||||||
const WIDTH = canvas.width;
|
|
||||||
const HEIGHT = canvas.height;
|
|
||||||
|
|
||||||
const visualSetting = visualSelect.value;
|
//analyser.fftSize = config.audio.fftBandsAnalysed;
|
||||||
|
|
||||||
if (visualSetting === "sinewave") {
|
|
||||||
//analyser.fftSize = 2048;
|
|
||||||
//const bufferLength = analyser.fftSize;
|
|
||||||
|
|
||||||
//// We can use Float32Array instead of Uint8Array if we want higher precision
|
|
||||||
//// const dataArray = new Float32Array(bufferLength);
|
|
||||||
//const dataArray = new Uint8Array(bufferLength);
|
|
||||||
|
|
||||||
//canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
|
|
||||||
|
|
||||||
//const draw = function() {
|
|
||||||
//drawVisual = requestAnimationFrame(draw);
|
|
||||||
|
|
||||||
//analyser.getByteTimeDomainData(dataArray);
|
|
||||||
|
|
||||||
//canvasCtx.fillStyle = "rgb(200, 200, 200)";
|
|
||||||
//canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
||||||
|
|
||||||
//canvasCtx.lineWidth = 2;
|
|
||||||
//canvasCtx.strokeStyle = "rgb(0, 0, 0)";
|
|
||||||
|
|
||||||
//canvasCtx.beginPath();
|
|
||||||
|
|
||||||
//const sliceWidth = (WIDTH * 1.0) / bufferLength;
|
|
||||||
//let x = 0;
|
|
||||||
|
|
||||||
//for (let i = 0; i < bufferLength; i++) {
|
|
||||||
//let v = dataArray[i] / 128.0;
|
|
||||||
//let y = (v * HEIGHT) / 2;
|
|
||||||
|
|
||||||
//if (i === 0) {
|
|
||||||
//canvasCtx.moveTo(x, y);
|
|
||||||
//} else {
|
|
||||||
//canvasCtx.lineTo(x, y);
|
|
||||||
//}
|
|
||||||
|
|
||||||
//x += sliceWidth;
|
|
||||||
//}
|
|
||||||
|
|
||||||
//canvasCtx.lineTo(canvas.width, canvas.height / 2);
|
|
||||||
//canvasCtx.stroke();
|
|
||||||
//};
|
|
||||||
|
|
||||||
//draw();
|
|
||||||
} else if (visualSetting == "frequencybars") {
|
|
||||||
analyser.fftSize = config.audio.fftBandsAnalysed;
|
|
||||||
const w = config.audio.fftBandsUsed;
|
const w = config.audio.fftBandsUsed;
|
||||||
const h = config.audio.fftHeight;
|
const h = config.audio.fftHeight;
|
||||||
const verticalFactor = h / 256.0;
|
const verticalFactor = h / 256.0;
|
||||||
const bufferLengthAlt = analyser.frequencyBinCount / 2;
|
|
||||||
|
|
||||||
// See comment above for Float32Array()
|
// See comment above for Float32Array()
|
||||||
const dataArrayAlt = new Uint8Array(bufferLengthAlt);
|
|
||||||
|
|
||||||
let canvasKeys = Object.keys(canvasCombos);
|
let canvasKeys = Object.keys(canvasCombos);
|
||||||
|
|
||||||
for (let i = 0; i < canvasKeys.length; i++) {
|
for (let i = 0; i < canvasKeys.length; i++) {
|
||||||
|
@ -717,149 +824,210 @@ const Audio = function(tp, record) {
|
||||||
|
|
||||||
let frameCount = 0;
|
let frameCount = 0;
|
||||||
const drawAlt = function() {
|
const drawAlt = function() {
|
||||||
|
const position = tp.sheet.sequence.position;
|
||||||
|
let positionRollover = false;
|
||||||
|
if (config.audio.rolloverResetLoop && position < previousPosition) {
|
||||||
|
positionRollover = true;
|
||||||
|
}
|
||||||
|
previousPosition = position;
|
||||||
canvasKeys = Object.keys(canvasCombos);
|
canvasKeys = Object.keys(canvasCombos);
|
||||||
drawVisual = requestAnimationFrame(drawAlt);
|
drawVisual = requestAnimationFrame(drawAlt);
|
||||||
|
|
||||||
analyser.getByteFrequencyData(dataArrayAlt);
|
canvasKeys.forEach((k) => {
|
||||||
|
canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
|
||||||
for (let i = 0; i < canvasKeys.length; i++) {
|
canvasCombos[k][1].fillRect(0, 0, w, h);
|
||||||
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
|
const layerID = canvasCombos[k][2];
|
||||||
canvasCombos[canvasKeys[i]][1].fillRect(0, 0, w, h);
|
const m = mapping[layerID][k];
|
||||||
const layerID = canvasCombos[canvasKeys[i]][2];
|
|
||||||
const m = mapping[layerID][canvasKeys[i]];
|
|
||||||
if (m.sync === 'volume') {
|
if (m.sync === 'volume') {
|
||||||
const sx = m.min_freq;
|
const sx = m.min_freq;
|
||||||
const sw = m.max_freq - m.min_freq;
|
const sw = m.max_freq - m.min_freq;
|
||||||
const sy = h - (m.max_in * verticalFactor);
|
const sy = h - (m.max_in * verticalFactor);
|
||||||
const sh = (m.max_in - m.min_in) * verticalFactor;
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
||||||
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
||||||
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
|
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
|
||||||
} else if (m.sync === 'pitch') {
|
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
|
||||||
const sx = m.min_freq;
|
const sx = m.min_freq;
|
||||||
const sw = m.max_freq - m.min_freq;
|
const sw = m.max_freq - m.min_freq;
|
||||||
const sy = 0;
|
const sy = 0;
|
||||||
const sh = h;
|
const sh = h;
|
||||||
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
|
||||||
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
|
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
const barWidth = 1;//(w / bufferLengthAlt) * 2.5;
|
const usedSourceCombos = [];
|
||||||
let barHeight;
|
const analysedResults = {};
|
||||||
let x = 0;
|
const unmuted = [];
|
||||||
|
Object.keys(mapping).forEach((layerID) => {
|
||||||
let max_i = 0;
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
||||||
let max_ri = 0;
|
const m = mapping[layerID][propTitle];
|
||||||
let total_v = 0;
|
const source = m.source;
|
||||||
let max_v = 0;
|
if (!m.muted) {
|
||||||
for (let k = 0; k < canvasKeys.length; k++) {
|
if (unmuted.indexOf(source) < 0) {
|
||||||
const layerID = canvasCombos[canvasKeys[k]][2];
|
unmuted.push(source);
|
||||||
const m = mapping[layerID][canvasKeys[k]];
|
|
||||||
m.max_v = max_v;
|
|
||||||
m.max_i = max_i;
|
|
||||||
m.max_ri = max_ri;
|
|
||||||
m.total_v = total_v;
|
|
||||||
}
|
|
||||||
for (let i = 0; i < w; i++) {
|
|
||||||
barHeight = dataArrayAlt[i];
|
|
||||||
total_v += barHeight;
|
|
||||||
max_ri = barHeight * i;
|
|
||||||
|
|
||||||
if (barHeight > max_v) {
|
|
||||||
max_v = barHeight;
|
|
||||||
max_i = i;
|
|
||||||
}
|
|
||||||
for (let k = 0; k < canvasKeys.length; k++) {
|
|
||||||
const layerID = canvasCombos[canvasKeys[k]][2];
|
|
||||||
const m = mapping[layerID][canvasKeys[k]];
|
|
||||||
let fillStyle = "rgb(200,200,200)"; // AUDIO COLOR
|
|
||||||
if (m.min_freq <= i && m.max_freq >= i) {
|
|
||||||
m.total_v += barHeight;
|
|
||||||
if (barHeight > m.max_v) {
|
|
||||||
m.max_v = barHeight;
|
|
||||||
m.max_i = i;
|
|
||||||
m.max_ri = barHeight * i;
|
|
||||||
}
|
}
|
||||||
fillStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
||||||
}
|
}
|
||||||
canvasCombos[canvasKeys[k]][1].fillStyle = fillStyle;
|
if (usedSourceCombos.indexOf(source) < 0) {
|
||||||
canvasCombos[canvasKeys[k]][1].fillRect(
|
usedSourceCombos.push(source);
|
||||||
x,
|
analysedResults[source] = {
|
||||||
h - (barHeight * verticalFactor),
|
max_i: 0,
|
||||||
barWidth,
|
max_ri: 0,
|
||||||
(barHeight * verticalFactor)
|
max_v: 0,
|
||||||
);
|
total_v: 0,
|
||||||
|
mappings: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
m.max_v = 0;
|
||||||
|
m.max_i = 0;
|
||||||
|
m.max_ri = 0;
|
||||||
|
m.total_v = 0;
|
||||||
|
analysedResults[source].mappings.push(m);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
Object.keys(audioSourceCombos).forEach((k) => {
|
||||||
|
const asc = audioSourceCombos[k];
|
||||||
|
if (asc.audioElement !== null) {
|
||||||
|
if (usedSourceCombos.indexOf(k) >= 0) {
|
||||||
|
if (positionRollover || asc.audioElement.paused) {
|
||||||
|
asc.audioElement.currentTime = position % asc.audioElement.duration;
|
||||||
|
asc.audioElement.play();
|
||||||
|
}
|
||||||
|
} else if (!asc.audioElement.paused) {
|
||||||
|
asc.audioElement.pause();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
if (unmuted.indexOf(k) < 0) {
|
||||||
x += barWidth;
|
asc.gain.gain.value = 0;
|
||||||
}
|
} else {
|
||||||
max_ri /= total_v;
|
asc.gain.gain.value = 1;
|
||||||
for (let k = 0; k < canvasKeys.length; k++) {
|
|
||||||
const layerID = canvasCombos[canvasKeys[k]][2];
|
|
||||||
const m = mapping[layerID][canvasKeys[k]];
|
|
||||||
m.max_ri /= m.total_v;
|
|
||||||
if (m.sync === 'volume') {
|
|
||||||
const sx = m.min_freq;
|
|
||||||
const sw = m.max_freq - m.min_freq;
|
|
||||||
const sy = h - (m.max_in * verticalFactor);
|
|
||||||
const sh = (m.max_in - m.min_in) * verticalFactor;
|
|
||||||
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
|
|
||||||
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
||||||
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
|
|
||||||
} else if (m.sync === 'pitch') {
|
|
||||||
const m = mapping[layerID][canvasKeys[k]];
|
|
||||||
const sx = m.min_freq;
|
|
||||||
const sw = m.max_freq - m.min_freq;
|
|
||||||
const sy = 0;
|
|
||||||
const sh = h;
|
|
||||||
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
|
|
||||||
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
|
||||||
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
|
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
const propsToSet = [];
|
usedSourceCombos.forEach((source) => {
|
||||||
getLayers().forEach((layer) => {
|
const afs = audioSourceCombos[source];
|
||||||
if (mapping.hasOwnProperty(layer.id())) {
|
const r = analysedResults[source];
|
||||||
Object.keys(mapping[layer.id()]).forEach((propTitle) => {
|
afs.analyser.getByteFrequencyData(afs.dataArray);
|
||||||
const m = mapping[layer.id()][propTitle];
|
for (let f = 0; f < w; f++) {
|
||||||
switch (m.sync) {
|
const v = afs.dataArray[f];
|
||||||
case 'volume': {
|
r.total_v += v;
|
||||||
let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
|
if (r.max_v < v) {
|
||||||
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
r.max_v = v;
|
||||||
propsToSet.push({
|
r.max_i = v;
|
||||||
layer,
|
}
|
||||||
id: layer.id(),
|
r.max_ri += v * f;
|
||||||
title: propTitle,
|
let fillStyle = 'rgb(200,200,200)';
|
||||||
prop: layer.theatreObject.props[propTitle],
|
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
|
||||||
value: m.value,
|
// NOTE: this is not the most efficient way to do it
|
||||||
});
|
const k = canvasKeys[k_i];
|
||||||
break;
|
const layerID = canvasCombos[k][2];
|
||||||
|
if (mapping[layerID][k].source === source) {
|
||||||
|
canvasCombos[k][1].fillStyle = fillStyle;
|
||||||
|
canvasCombos[k][1].fillRect(
|
||||||
|
f,
|
||||||
|
h - (v * verticalFactor),
|
||||||
|
1,
|
||||||
|
(v * verticalFactor)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
analysedResults[source].mappings.forEach((m) => {
|
||||||
|
if (m.min_freq <= f && m.max_freq >= f) {
|
||||||
|
m.total_v += v;
|
||||||
|
if (m.max_v < v) {
|
||||||
|
m.max_v = v;
|
||||||
|
m.max_i = f;
|
||||||
}
|
}
|
||||||
case 'pitch': {
|
m.max_ri += v * f;
|
||||||
const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : max_i;
|
|
||||||
const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : max_ri;
|
|
||||||
const fi = config.audio.pitchCombineFrequencies ? ri : mi;
|
|
||||||
let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
|
|
||||||
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
|
||||||
propsToSet.push({
|
|
||||||
layer,
|
|
||||||
id: layer.id(),
|
|
||||||
title: propTitle,
|
|
||||||
prop: layer.theatreObject.props[propTitle],
|
|
||||||
value: m.value,
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
r.max_ri /= r.total_v;
|
||||||
|
analysedResults[source].mappings.forEach((m) => {
|
||||||
|
m.max_ri /= m.total_v;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
|
||||||
|
const k = canvasKeys[k_i];
|
||||||
|
const layerID = canvasCombos[k][2];
|
||||||
|
const m = mapping[layerID][k];
|
||||||
|
if (m.sync === 'volume') {
|
||||||
|
const sx = m.min_freq;
|
||||||
|
const sw = m.max_freq - m.min_freq;
|
||||||
|
const sy = h - (m.max_in * verticalFactor);
|
||||||
|
const sh = (m.max_in - m.min_in) * verticalFactor;
|
||||||
|
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
|
||||||
|
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
||||||
|
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
|
||||||
|
} else if (m.sync === 'pitch' || m.sync === 'clarity') {
|
||||||
|
const sx = m.min_freq;
|
||||||
|
const sw = m.max_freq - m.min_freq;
|
||||||
|
const sy = 0;
|
||||||
|
const sh = h;
|
||||||
|
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
|
||||||
|
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
|
||||||
|
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const propsToSet = [];
|
||||||
|
Object.keys(mapping).forEach((layerID) => {
|
||||||
|
Object.keys(mapping[layerID]).forEach((propTitle) => {
|
||||||
|
const m = mapping[layerID][propTitle];
|
||||||
|
switch (m.sync) {
|
||||||
|
case 'volume': {
|
||||||
|
let a = mapValue(m.max_v, m.min_in, m.max_in, m.min_out, m.max_out, true);
|
||||||
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
||||||
|
propsToSet.push({
|
||||||
|
id: layerID,
|
||||||
|
title: propTitle,
|
||||||
|
value: m.value,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'pitch': {
|
||||||
|
const r = analysedResults[m.source];
|
||||||
|
const mi = config.audio.ignoreOutboundFrequencies ? m.max_i : r.max_i;
|
||||||
|
const ri = config.audio.ignoreOutboundFrequencies ? m.max_ri : r.max_ri;
|
||||||
|
const fi = config.audio.pitchCombineFrequencies ? ri : mi;
|
||||||
|
let a = mapValue(fi, m.min_freq, m.max_freq, m.min_out, m.max_out, true);
|
||||||
|
if (!isNaN(a)) {
|
||||||
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
||||||
|
propsToSet.push({
|
||||||
|
id: layerID,
|
||||||
|
title: propTitle,
|
||||||
|
value: m.value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'clarity': {
|
||||||
|
const clarity = m.max_v / m.total_v;
|
||||||
|
const a = mapValue(clarity, 0.01, 0.05, m.min_out, m.max_out, true);
|
||||||
|
if (!isNaN(a)) {
|
||||||
|
m.value = m.value * m.smoothing + (1.0 - m.smoothing) * a;
|
||||||
|
propsToSet.push({
|
||||||
|
id: layerID,
|
||||||
|
title: propTitle,
|
||||||
|
value: m.value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (m.letterDelay) {
|
||||||
|
const pt = `letterDelays.${propTitle}`;
|
||||||
|
propsToSet.push({
|
||||||
|
id: layerID,
|
||||||
|
title: pt,
|
||||||
|
value: m.letterDelay,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
if (propsToSet.length > 0 && frameCount % 2 === 0) {
|
if (propsToSet.length > 0 && frameCount % 2 === 0) {
|
||||||
// this is when to monitor live
|
// this is when to monitor live
|
||||||
if (!record.isRecording()) {
|
if (!record.isRecording()) {
|
||||||
if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
|
//if (!tp.core.val(tp.sheet.sequence.pointer.playing)) {
|
||||||
let values = {};
|
let values = {};
|
||||||
propsToSet.forEach((p) => {
|
propsToSet.forEach((p) => {
|
||||||
const newValues = {
|
const newValues = {
|
||||||
|
@ -874,17 +1042,16 @@ const Audio = function(tp, record) {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
Object.keys(values).forEach((layerID) => {
|
Object.keys(values).forEach((layerID) => {
|
||||||
window.debugPreValues = clone(values[layerID]);
|
|
||||||
deFlattenObject(values[layerID]);
|
deFlattenObject(values[layerID]);
|
||||||
window.debugValues = clone(values[layerID]);
|
|
||||||
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
|
record.liveUpdater.immediateUpdate(getLayer(layerID), values[layerID]);
|
||||||
});
|
});
|
||||||
}
|
//}
|
||||||
} else {
|
} else {
|
||||||
const position = tp.sheet.sequence.position;
|
const position = tp.sheet.sequence.position;
|
||||||
propsToSet.forEach((p) => {
|
propsToSet.forEach((p) => {
|
||||||
const title = tp
|
const title = tp
|
||||||
.getPanelPropTitle(p.title);
|
.getPanelPropTitle(p.title);
|
||||||
|
const layer = getLayer(p.id);
|
||||||
|
|
||||||
if (title !== null) {
|
if (title !== null) {
|
||||||
const inputElement = title
|
const inputElement = title
|
||||||
|
@ -897,8 +1064,12 @@ const Audio = function(tp, record) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
record.addValue(p.id, p.title, p.value, position);
|
record.addValue(p.id, p.title, p.value, position);
|
||||||
if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
|
if (p.title.indexOf('color') === 0) {
|
||||||
record.liveUpdate(p.layer, position);
|
if (!config.audio.colorSeparateRGBA || p.title === 'color.a') {
|
||||||
|
record.liveUpdate(layer, position);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
record.liveUpdate(layer, position);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -914,107 +1085,6 @@ const Audio = function(tp, record) {
|
||||||
frameCount++;
|
frameCount++;
|
||||||
};
|
};
|
||||||
drawAlt();
|
drawAlt();
|
||||||
} else if (visualSetting == "off") {
|
|
||||||
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
|
|
||||||
canvasCtx.fillStyle = "red";
|
|
||||||
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const voiceChange = () => {
|
|
||||||
distortion.oversample = "4x";
|
|
||||||
biquadFilter.gain.setTargetAtTime(0, audioCtx.currentTime, 0);
|
|
||||||
|
|
||||||
const voiceSetting = voiceSelect.value;
|
|
||||||
|
|
||||||
if (echoDelay.isApplied()) {
|
|
||||||
echoDelay.discard();
|
|
||||||
}
|
|
||||||
|
|
||||||
// When convolver is selected it is connected back into the audio path
|
|
||||||
if (voiceSetting == "convolver") {
|
|
||||||
biquadFilter.disconnect(0);
|
|
||||||
biquadFilter.connect(convolver);
|
|
||||||
} else {
|
|
||||||
biquadFilter.disconnect(0);
|
|
||||||
biquadFilter.connect(gainNode);
|
|
||||||
|
|
||||||
if (voiceSetting == "distortion") {
|
|
||||||
distortion.curve = makeDistortionCurve(400);
|
|
||||||
} else if (voiceSetting == "biquad") {
|
|
||||||
biquadFilter.type = "lowshelf";
|
|
||||||
biquadFilter.frequency.setTargetAtTime(1000, audioCtx.currentTime, 0);
|
|
||||||
biquadFilter.gain.setTargetAtTime(25, audioCtx.currentTime, 0);
|
|
||||||
} else if (voiceSetting == "delay") {
|
|
||||||
echoDelay.apply();
|
|
||||||
} else if (voiceSetting == "off") {
|
|
||||||
console.log("Voice settings turned off");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createEchoDelayEffect(audioContext) {
|
|
||||||
const delay = audioContext.createDelay(1);
|
|
||||||
const dryNode = audioContext.createGain();
|
|
||||||
const wetNode = audioContext.createGain();
|
|
||||||
const mixer = audioContext.createGain();
|
|
||||||
const filter = audioContext.createBiquadFilter();
|
|
||||||
|
|
||||||
delay.delayTime.value = 0.75;
|
|
||||||
dryNode.gain.value = 1;
|
|
||||||
wetNode.gain.value = 0;
|
|
||||||
filter.frequency.value = 1100;
|
|
||||||
filter.type = "highpass";
|
|
||||||
|
|
||||||
return {
|
|
||||||
apply: function() {
|
|
||||||
wetNode.gain.setValueAtTime(0.75, audioContext.currentTime);
|
|
||||||
},
|
|
||||||
discard: function() {
|
|
||||||
wetNode.gain.setValueAtTime(0, audioContext.currentTime);
|
|
||||||
},
|
|
||||||
isApplied: function() {
|
|
||||||
return wetNode.gain.value > 0;
|
|
||||||
},
|
|
||||||
placeBetween: function(inputNode, outputNode) {
|
|
||||||
inputNode.connect(delay);
|
|
||||||
delay.connect(wetNode);
|
|
||||||
wetNode.connect(filter);
|
|
||||||
filter.connect(delay);
|
|
||||||
|
|
||||||
inputNode.connect(dryNode);
|
|
||||||
dryNode.connect(mixer);
|
|
||||||
wetNode.connect(mixer);
|
|
||||||
mixer.connect(outputNode);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Event listeners to change visualize and voice settings
|
|
||||||
visualSelect.onchange = function() {
|
|
||||||
window.cancelAnimationFrame(drawVisual);
|
|
||||||
visualize();
|
|
||||||
};
|
|
||||||
|
|
||||||
voiceSelect.onchange = function() {
|
|
||||||
voiceChange();
|
|
||||||
};
|
|
||||||
|
|
||||||
mute.onclick = voiceMute;
|
|
||||||
|
|
||||||
let previousGain;
|
|
||||||
|
|
||||||
function voiceMute() {
|
|
||||||
if (mute.id === "") {
|
|
||||||
previousGain = gainNode.gain.value;
|
|
||||||
gainNode.gain.value = 0;
|
|
||||||
mute.id = "activated";
|
|
||||||
mute.innerHTML = "Unmute";
|
|
||||||
} else {
|
|
||||||
gainNode.gain.value = previousGain;
|
|
||||||
mute.id = "";
|
|
||||||
mute.innerHTML = "Mute";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1039,9 +1109,11 @@ const Audio = function(tp, record) {
|
||||||
this.addAudioOptions = addAudioOptions;
|
this.addAudioOptions = addAudioOptions;
|
||||||
this.removeAudioOptions = removeAudioOptions;
|
this.removeAudioOptions = removeAudioOptions;
|
||||||
this.AudioMappingOptions = AudioMappingOptions;
|
this.AudioMappingOptions = AudioMappingOptions;
|
||||||
|
this.readAudioFiles = readAudioFiles;
|
||||||
|
|
||||||
// debug
|
// debug
|
||||||
this.canvasCombos = canvasCombos;
|
this.canvasCombos = canvasCombos;
|
||||||
|
this.audioSourceCombos = audioSourceCombos;
|
||||||
};
|
};
|
||||||
|
|
||||||
export {
|
export {
|
||||||
|
|
|
@ -83,14 +83,33 @@ const config = {
|
||||||
zoomDynamicMax: 42,
|
zoomDynamicMax: 42,
|
||||||
},
|
},
|
||||||
audio: {
|
audio: {
|
||||||
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],
|
defaultRange: { // check audio.getDefaultRange for dynamic defaults
|
||||||
|
'textAlignment': [0, 1],
|
||||||
|
'fontSize_px': [42, 100],
|
||||||
|
'letterSpacing': [0, 1],
|
||||||
|
'lineHeight': [0, 1],
|
||||||
|
'rotation': [0, 180],
|
||||||
|
'mirror_x_distance': [0, 200],
|
||||||
|
'mirror_y_distance': [0, 70],
|
||||||
|
'color': [0, 1],
|
||||||
|
'letterDelays': [0, 1000],
|
||||||
|
},
|
||||||
|
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy', 'height'],
|
||||||
|
maxFilenameLength: 24,
|
||||||
defaultSmoothing: 0.7,
|
defaultSmoothing: 0.7,
|
||||||
|
analyser: {
|
||||||
|
fftSize: 256 * 8,
|
||||||
|
minDecibels: -90,
|
||||||
|
maxDecibels: -10,
|
||||||
|
smoothingTimeConstant: 0.85,
|
||||||
|
},
|
||||||
fftBandsAnalysed: 256 * 8,
|
fftBandsAnalysed: 256 * 8,
|
||||||
fftBandsUsed: 256 / 2,
|
fftBandsUsed: 256 / 2,
|
||||||
fftHeight: 256 / 4,
|
fftHeight: 256 / 4,
|
||||||
colorSeparateRGBA: true,
|
colorSeparateRGBA: true,
|
||||||
ignoreOutboundFrequencies: true,
|
ignoreOutboundFrequencies: true,
|
||||||
pitchCombineFrequencies: false,
|
pitchCombineFrequencies: false,
|
||||||
|
rolloverResetLoop: true,
|
||||||
},
|
},
|
||||||
record: {
|
record: {
|
||||||
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],
|
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],
|
||||||
|
@ -103,6 +122,7 @@ const config = {
|
||||||
fs: {
|
fs: {
|
||||||
idbfsDir: '/idbfs',
|
idbfsDir: '/idbfs',
|
||||||
idbfsFontDir: '/idbfs/fonts',
|
idbfsFontDir: '/idbfs/fonts',
|
||||||
|
idbfsAudioDir: '/idbfs/audio',
|
||||||
idbfsTmpDir: '/idbfs/tmp',
|
idbfsTmpDir: '/idbfs/tmp',
|
||||||
},
|
},
|
||||||
timeline: {
|
timeline: {
|
||||||
|
|
|
@ -151,6 +151,16 @@ const findInjectPanel = () => {
|
||||||
bottomButtonsContainer.classList.add("bottomButtonsContainer");
|
bottomButtonsContainer.classList.add("bottomButtonsContainer");
|
||||||
panel.append(bottomButtonsContainer);
|
panel.append(bottomButtonsContainer);
|
||||||
}
|
}
|
||||||
|
const hideuiButton = document.querySelector('#hide_ui');
|
||||||
|
if (hideuiButton !== null) {
|
||||||
|
bottomButtonsContainer.append(hideuiButton);
|
||||||
|
hideuiButton.classList.add("main_panel_button");
|
||||||
|
}
|
||||||
|
const audiofileButton = document.querySelector('#upload_audio');
|
||||||
|
if (audiofileButton !== null) {
|
||||||
|
bottomButtonsContainer.append(audiofileButton);
|
||||||
|
audiofileButton.classList.add("main_panel_button");
|
||||||
|
}
|
||||||
const exportButton = document.querySelector('#exporter_open');
|
const exportButton = document.querySelector('#exporter_open');
|
||||||
if (exportButton !== null) {
|
if (exportButton !== null) {
|
||||||
bottomButtonsContainer.append(exportButton);
|
bottomButtonsContainer.append(exportButton);
|
||||||
|
@ -406,12 +416,20 @@ const listAvailableFontsAndAxes = () => {
|
||||||
window.listAvailableFontsAndAxes = listAvailableFontsAndAxes;
|
window.listAvailableFontsAndAxes = listAvailableFontsAndAxes;
|
||||||
window.getFontsAndAxes = getFontsAndAxes;
|
window.getFontsAndAxes = getFontsAndAxes;
|
||||||
|
|
||||||
|
window.getArtboard = () => {
|
||||||
|
return artboard;
|
||||||
|
};
|
||||||
|
|
||||||
window.getLayers = () => {
|
window.getLayers = () => {
|
||||||
return layers;
|
return layers;
|
||||||
};
|
};
|
||||||
|
|
||||||
window.getLayer = (layerID) => {
|
window.getLayer = (layerID) => {
|
||||||
return layers.find((layer) => layer.id() === layerID);
|
if (layerID === 'artboard') {
|
||||||
|
return artboard;
|
||||||
|
} else {
|
||||||
|
return layers.find((layer) => layer.id() === layerID);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
window.moveLayerUp = (layerID) => {
|
window.moveLayerUp = (layerID) => {
|
||||||
|
@ -422,10 +440,6 @@ window.moveLayerDown = (layerID) => {
|
||||||
layerOrder.moveDown(layerID);
|
layerOrder.moveDown(layerID);
|
||||||
};
|
};
|
||||||
|
|
||||||
window.getArtboard = () => {
|
|
||||||
return artboard;
|
|
||||||
};
|
|
||||||
|
|
||||||
const addLayer = (autoInit = true) => {
|
const addLayer = (autoInit = true) => {
|
||||||
const layerID = Module.addNewLayer();
|
const layerID = Module.addNewLayer();
|
||||||
const layer = new Layer(tp, layerID, fontsAndAxes, autoInit);
|
const layer = new Layer(tp, layerID, fontsAndAxes, autoInit);
|
||||||
|
@ -531,6 +545,48 @@ window.renderFrames = exporter.renderFrames;
|
||||||
|
|
||||||
const layer_panel = document.querySelector('#layer_panel');
|
const layer_panel = document.querySelector('#layer_panel');
|
||||||
|
|
||||||
const initPanels = () => {
|
const ui = (show) => {
|
||||||
//makeDraggable(layer_panel);
|
if (show && tp.studio.ui.isHidden) {
|
||||||
|
tp.studio.ui.restore();
|
||||||
|
} else if (!show && !tp.studio.ui.isHidden) {
|
||||||
|
tp.studio.ui.hide();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUiKeypress = (e) => {
|
||||||
|
if (e.key.toLowerCase() === 'q') {
|
||||||
|
document.removeEventListener('keypress', handleUiKeypress);
|
||||||
|
ui(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const initPanels = () => {
|
||||||
|
let hideuiButton = document.querySelector('#hide_ui');
|
||||||
|
if (hideuiButton === null) {
|
||||||
|
hideuiButton = tp.getPanel().querySelector('#hide_ui');
|
||||||
|
}
|
||||||
|
if (hideuiButton !== null) {
|
||||||
|
hideuiButton.addEventListener('click', () => {
|
||||||
|
ui(false);
|
||||||
|
document.addEventListener('keypress', handleUiKeypress);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
let audiofileButton = document.querySelector('#upload_audio');
|
||||||
|
if (audiofileButton === null) {
|
||||||
|
audiofileButton = tp.getPanel().querySelector('#upload_audio');
|
||||||
|
}
|
||||||
|
if (audiofileButton !== null) {
|
||||||
|
audiofileButton.addEventListener('click', () => {
|
||||||
|
uploadFile('audio')
|
||||||
|
.then((file) => {
|
||||||
|
moduleFS
|
||||||
|
.save(file)
|
||||||
|
.then(() => {
|
||||||
|
console.log('ermh... done uploading?', file);
|
||||||
|
audio.readAudioFiles();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,6 +13,9 @@ const ModuleFS = function() {
|
||||||
if (!FS.analyzePath(config.fs.idbfsFontDir).exists) {
|
if (!FS.analyzePath(config.fs.idbfsFontDir).exists) {
|
||||||
FS.mkdir(config.fs.idbfsFontDir);
|
FS.mkdir(config.fs.idbfsFontDir);
|
||||||
}
|
}
|
||||||
|
if (!FS.analyzePath(config.fs.idbfsAudioDir).exists) {
|
||||||
|
FS.mkdir(config.fs.idbfsAudioDir);
|
||||||
|
}
|
||||||
if (!FS.analyzePath(config.fs.idbfsTmpDir).exists) {
|
if (!FS.analyzePath(config.fs.idbfsTmpDir).exists) {
|
||||||
FS.mkdir(config.fs.idbfsTmpDir);
|
FS.mkdir(config.fs.idbfsTmpDir);
|
||||||
}
|
}
|
||||||
|
@ -59,6 +62,19 @@ const ModuleFS = function() {
|
||||||
.then(() => {
|
.then(() => {
|
||||||
resolve(filePath);
|
resolve(filePath);
|
||||||
});
|
});
|
||||||
|
} else if (file.type.indexOf('audio') === 0) {
|
||||||
|
var uint8View = new Uint8Array(file.arrayBuffer);
|
||||||
|
console.log('trying to save the audio file, file, uint8View', file, uint8View);
|
||||||
|
if (!FS.analyzePath(`${config.fs.idbfsAudioDir}/${file.name}`).exists) {
|
||||||
|
FS.createDataFile(config.fs.idbfsAudioDir, file.name, uint8View, true, true);
|
||||||
|
this.syncfs(MODE_WRITE_TO_PERSISTENT)
|
||||||
|
.then(() => {
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
alert(`It seems as if an audiofile with the name "${file.name}" already exists. Please rename your file and upload again, thanks <3`);
|
||||||
|
resolve(false);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
resolve(false);
|
resolve(false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,6 +117,7 @@ const LiveUpdater = function(tp, buffy) {
|
||||||
};
|
};
|
||||||
this.immediateUpdate = (layer, values) => {
|
this.immediateUpdate = (layer, values) => {
|
||||||
const cv = clone(values);
|
const cv = clone(values);
|
||||||
|
const ctv = clone(layer.theatreObject.value);
|
||||||
if (cv.hasOwnProperty('color.r')) {
|
if (cv.hasOwnProperty('color.r')) {
|
||||||
cv['color'] = {
|
cv['color'] = {
|
||||||
r: cv['color.r'],
|
r: cv['color.r'],
|
||||||
|
@ -129,7 +130,10 @@ const LiveUpdater = function(tp, buffy) {
|
||||||
delete cv['color.b'];
|
delete cv['color.b'];
|
||||||
delete cv['color.a'];
|
delete cv['color.a'];
|
||||||
}
|
}
|
||||||
const v = {...layer.theatreObject.value, ...cv};
|
flattenObject(cv, ['color']);
|
||||||
|
flattenObject(ctv, ['color']);
|
||||||
|
const v = {...ctv, ...cv};
|
||||||
|
deFlattenObject(v, ['color']);
|
||||||
const p = layer.values2cppProps(v);
|
const p = layer.values2cppProps(v);
|
||||||
if (p !== false) {
|
if (p !== false) {
|
||||||
const id = layer.id();
|
const id = layer.id();
|
||||||
|
@ -144,10 +148,27 @@ const LiveUpdater = function(tp, buffy) {
|
||||||
|
|
||||||
const Record = function(tp) {
|
const Record = function(tp) {
|
||||||
|
|
||||||
|
const NOT_RECORDING = 0;
|
||||||
|
const STARTING_RECORDING = 1;
|
||||||
|
const RECORDING = 2;
|
||||||
|
const STOPPING_RECORDING = 3;
|
||||||
|
|
||||||
const hot = {};
|
const hot = {};
|
||||||
let isRecording = false;
|
let isRecording = NOT_RECORDING;
|
||||||
const buffy = new LiveBuffer();
|
const buffy = new LiveBuffer();
|
||||||
const liveUpdater = new LiveUpdater(tp, buffy);
|
const liveUpdater = new LiveUpdater(tp, buffy);
|
||||||
|
let isInitialized = false;
|
||||||
|
|
||||||
|
const init = () => {
|
||||||
|
if (!isInitialized) {
|
||||||
|
tp.core.onChange(tp.sheet.sequence.pointer.playing, (playing) => {
|
||||||
|
if (isRecording === RECORDING && !playing) {
|
||||||
|
stopRecording();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
isInitialized = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const isHot = (layerID, propTitle) => {
|
const isHot = (layerID, propTitle) => {
|
||||||
return hot.hasOwnProperty(layerID)
|
return hot.hasOwnProperty(layerID)
|
||||||
|
@ -231,19 +252,19 @@ const Record = function(tp) {
|
||||||
button.innerHTML = `<img src="/web/assets/record.svg" alt="record" />`;
|
button.innerHTML = `<img src="/web/assets/record.svg" alt="record" />`;
|
||||||
container.append(button);
|
container.append(button);
|
||||||
button.addEventListener('click', () => {
|
button.addEventListener('click', () => {
|
||||||
if(isRecording) {
|
if(isRecording === RECORDING) {
|
||||||
stopRecording();
|
stopRecording();
|
||||||
} else {
|
} else {
|
||||||
if (config.record.recordMapped) {
|
if (config.record.recordMapped) {
|
||||||
// make all mapped props hot and
|
// make all mapped props hot and
|
||||||
Object.keys(audio.mapping)
|
Object.keys(audio.mapping)
|
||||||
.forEach((layerID) => {
|
.forEach((layerID) => {
|
||||||
if (getLayer(layerID).isSelected()) {
|
//if (getLayer(layerID).isSelected()) { // NOTE: multilayer recording
|
||||||
Object.keys(audio.mapping[layerID])
|
Object.keys(audio.mapping[layerID])
|
||||||
.forEach((propTitle) => {
|
.forEach((propTitle) => {
|
||||||
addHot(layerID, propTitle);
|
addHot(layerID, propTitle);
|
||||||
});
|
});
|
||||||
}
|
//}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// only make this propTitle hot and
|
// only make this propTitle hot and
|
||||||
|
@ -339,6 +360,10 @@ const Record = function(tp) {
|
||||||
value,
|
value,
|
||||||
position = tp.sheet.sequence.position,
|
position = tp.sheet.sequence.position,
|
||||||
lastPosition = buffy.NO_TIME) => {
|
lastPosition = buffy.NO_TIME) => {
|
||||||
|
// NOTE: multilayer recording
|
||||||
|
if (!hot.hasOwnProperty(layerID) || !hot[layerID].hasOwnProperty(propTitle)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
hot[layerID][propTitle].recording.push({
|
hot[layerID][propTitle].recording.push({
|
||||||
position,
|
position,
|
||||||
value,
|
value,
|
||||||
|
@ -369,7 +394,17 @@ const Record = function(tp) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const startRecording = () => {
|
const startRecording = () => {
|
||||||
|
isRecording = STARTING_RECORDING;
|
||||||
console.log('Record::startRecording');
|
console.log('Record::startRecording');
|
||||||
|
document.querySelector('#notice_recording')
|
||||||
|
.classList.add('visible');
|
||||||
|
document.querySelector('#notice_recording')
|
||||||
|
.classList.remove('imprenetrable');
|
||||||
|
document.querySelector('#notice_recording .what p').innerHTML = 'recording';
|
||||||
|
document.querySelector('#notice_recording .details p').innerHTML = '';
|
||||||
|
if (!isInitialized) {
|
||||||
|
init();
|
||||||
|
}
|
||||||
lastPositions = {};
|
lastPositions = {};
|
||||||
tp.sheet.sequence.pause();
|
tp.sheet.sequence.pause();
|
||||||
const layerKeys = Object.keys(hot);
|
const layerKeys = Object.keys(hot);
|
||||||
|
@ -398,9 +433,16 @@ const Record = function(tp) {
|
||||||
tp.sheet.sequence.position = 0;
|
tp.sheet.sequence.position = 0;
|
||||||
tp.sheet.sequence.play();
|
tp.sheet.sequence.play();
|
||||||
});
|
});
|
||||||
isRecording = true;
|
isRecording = RECORDING;
|
||||||
};
|
};
|
||||||
const stopRecording = () => {
|
const stopRecording = () => {
|
||||||
|
document.querySelector('#notice_recording')
|
||||||
|
.classList.add('visible');
|
||||||
|
document.querySelector('#notice_recording')
|
||||||
|
.classList.add('imprenetrable');
|
||||||
|
document.querySelector('#notice_recording .what p').innerHTML = 'digesting recording';
|
||||||
|
document.querySelector('#notice_recording .details p').innerHTML = 'please wait';
|
||||||
|
isRecording = STOPPING_RECORDING;
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const layerKeys = Object.keys(hot);
|
const layerKeys = Object.keys(hot);
|
||||||
const promises = [];
|
const promises = [];
|
||||||
|
@ -474,8 +516,10 @@ const Record = function(tp) {
|
||||||
});
|
});
|
||||||
buffy.deregister(layerID);
|
buffy.deregister(layerID);
|
||||||
});
|
});
|
||||||
|
document.querySelector('#notice_recording')
|
||||||
|
.classList.remove('visible');
|
||||||
console.log('Record::stopRecording', 'stopped recording');
|
console.log('Record::stopRecording', 'stopped recording');
|
||||||
isRecording = false;
|
isRecording = NOT_RECORDING;
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -493,7 +537,7 @@ const Record = function(tp) {
|
||||||
return hot;
|
return hot;
|
||||||
};
|
};
|
||||||
this.isRecording = () => {
|
this.isRecording = () => {
|
||||||
return isRecording;
|
return isRecording != NOT_RECORDING;
|
||||||
};
|
};
|
||||||
this.injectPanel = injectPanel;
|
this.injectPanel = injectPanel;
|
||||||
this.startRecording = startRecording;
|
this.startRecording = startRecording;
|
||||||
|
|
|
@ -97,7 +97,7 @@ const TheatrePlay = function(autoInit = false) {
|
||||||
if (typeof value === 'undefined') {
|
if (typeof value === 'undefined') {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
return this.sheet.sequence.__experimental_getKeyframes(prop);
|
return this.sheet.sequence.__experimental_getKeyframes(prop);
|
||||||
};
|
};
|
||||||
// wtf, this function was being written in one go
|
// wtf, this function was being written in one go
|
||||||
|
|
|
@ -119,7 +119,7 @@ function uploadFile(expectedType = 'application/json') {
|
||||||
|
|
||||||
let reader = new FileReader();
|
let reader = new FileReader();
|
||||||
|
|
||||||
if (expectedType === 'application/zip' || file.type === 'application/zip') {
|
if (expectedType === 'application/zip' || file.type === 'application/zip' || file.type.indexOf('audio') === 0) {
|
||||||
reader.onload = (e) => {
|
reader.onload = (e) => {
|
||||||
const f = e.target.result;
|
const f = e.target.result;
|
||||||
console.log(e, file.name, file.size, file.type, f);
|
console.log(e, file.name, file.size, file.type, f);
|
||||||
|
|
Loading…
Reference in a new issue