individual audiofiles <-> prop mapping

This commit is contained in:
jrkb 2023-10-13 11:29:18 +02:00
parent 16124c755d
commit 748af243fa
3 changed files with 136 additions and 147 deletions

View file

@ -21,6 +21,7 @@ const AudioMappingOptions = function() {
this.sync = 'volume';
this.source = 'microphone';
this.value = 0.0;
this.muted = true;
};
const Audio = function(tp, record) {
@ -608,10 +609,10 @@ const Audio = function(tp, record) {
}
});
};
const audioFileStuff = {};
const audioSourceCombo = {};
const readAudioFiles = () => {
FS.readdir(config.fs.idbfsAudioDir).forEach((file) => {
if (file.indexOf('.') !== 0 && !audioFileStuff.hasOwnProperty(file)) {
if (file.indexOf('.') !== 0 && !audioSourceCombo.hasOwnProperty(file)) {
const audioElement = document.createElement('audio');
audioElement.classList.add('invisible');
audioElement.classList.add('audio_file');
@ -637,19 +638,23 @@ const Audio = function(tp, record) {
);
audioElement.src = src;
audioElement.loop = true;
const source = audioCtx.createMediaElementSource(audioElement);
source.connect(audioCtx.destination);
const analyser = audioCtx.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;
analyser.fftSize = config.audio.fftBandsAnalysed;
const bufferLength = analyser.frequencyBinCount;
const bufferLength = analyser.frequencyBinCount / 2;
const dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
audioElement.play();
audioFileStuff[file] = {
audioSourceCombo[file] = {
dataArray,
analyser,
audioElement,
@ -715,7 +720,14 @@ const Audio = function(tp, record) {
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;
window.analyser = analyser;
analyser.fftSize = config.audio.fftBandsAnalysed;
const bufferLength = analyser.frequencyBinCount / 2;
audioSourceCombo['microphone'] = {
analyser,
dataArray: new Uint8Array(bufferLength),
audioElement: null,
};
readAudioFiles();
@ -778,6 +790,7 @@ const Audio = function(tp, record) {
canvas.setAttribute("width", config.audio.fftBandsUsed);
const visualSelect = audioDom.querySelector("#visual");
let drawVisual;
let previousPosition = -1;
// Main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
@ -792,7 +805,6 @@ const Audio = function(tp, record) {
source.connect(analyser);
visualize();
voiceChange();
})
.catch(function(err) {
console.log("The following gUM error occured: " + err);
@ -802,57 +814,8 @@ const Audio = function(tp, record) {
}
const visualize = () => {
const WIDTH = canvas.width;
const HEIGHT = canvas.height;
const visualSetting = visualSelect.value;
if (visualSetting === "sinewave") {
//analyser.fftSize = 2048;
//const bufferLength = analyser.fftSize;
//// We can use Float32Array instead of Uint8Array if we want higher precision
//// const dataArray = new Float32Array(bufferLength);
//const dataArray = new Uint8Array(bufferLength);
//canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
//const draw = function() {
//drawVisual = requestAnimationFrame(draw);
//analyser.getByteTimeDomainData(dataArray);
//canvasCtx.fillStyle = "rgb(200, 200, 200)";
//canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
//canvasCtx.lineWidth = 2;
//canvasCtx.strokeStyle = "rgb(0, 0, 0)";
//canvasCtx.beginPath();
//const sliceWidth = (WIDTH * 1.0) / bufferLength;
//let x = 0;
//for (let i = 0; i < bufferLength; i++) {
//let v = dataArray[i] / 128.0;
//let y = (v * HEIGHT) / 2;
//if (i === 0) {
//canvasCtx.moveTo(x, y);
//} else {
//canvasCtx.lineTo(x, y);
//}
//x += sliceWidth;
//}
//canvasCtx.lineTo(canvas.width, canvas.height / 2);
//canvasCtx.stroke();
//};
//draw();
} else if (visualSetting == "frequencybars") {
analyser.fftSize = config.audio.fftBandsAnalysed;
//analyser.fftSize = config.audio.fftBandsAnalysed;
const w = config.audio.fftBandsUsed;
const h = config.audio.fftHeight;
const verticalFactor = h / 256.0;
@ -869,110 +832,137 @@ const Audio = function(tp, record) {
let frameCount = 0;
const drawAlt = function() {
const position = tp.sheet.sequence.position;
let positionRollover = false;
if (config.audio.rolloverResetLoop && position < previousPosition) {
positionRollover = true;
}
previousPosition = position;
canvasKeys = Object.keys(canvasCombos);
drawVisual = requestAnimationFrame(drawAlt);
//analyser.getByteFrequencyData(dataArrayAlt);
//Object.keys(audioFileStuff).forEach((afs) => {
//afs.analyser.ByteFrequencyData(afs.dataArray);
//});
audioFileStuff['hito_steyerl_about_suicide_cameras.ogg'].analyser.getByteFrequencyData(dataArrayAlt);
for (let i = 0; i < canvasKeys.length; i++) {
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
canvasCombos[canvasKeys[i]][1].fillRect(0, 0, w, h);
const layerID = canvasCombos[canvasKeys[i]][2];
const m = mapping[layerID][canvasKeys[i]];
canvasKeys.forEach((k) => {
canvasCombos[k][1].fillStyle = "rgb(0, 0, 0)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(0, 0, w, h);
const layerID = canvasCombos[k][2];
const m = mapping[layerID][k];
if (m.sync === 'volume') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = h - (m.max_in * verticalFactor);
const sh = (m.max_in - m.min_in) * verticalFactor;
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
} else if (m.sync === 'pitch') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = 0;
const sh = h;
canvasCombos[canvasKeys[i]][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[canvasKeys[i]][1].fillRect(sx, sy, sw, sh);
}
canvasCombos[k][1].fillStyle = "rgb(80, 80, 80)"; // AUDIO COLOR
canvasCombos[k][1].fillRect(sx, sy, sw, sh);
}
});
const barWidth = 1;//(w / bufferLengthAlt) * 2.5;
let barHeight;
let x = 0;
let max_i = 0;
let max_ri = 0;
let total_v = 0;
let max_v = 0;
for (let k = 0; k < canvasKeys.length; k++) {
const layerID = canvasCombos[canvasKeys[k]][2];
const m = mapping[layerID][canvasKeys[k]];
m.max_v = max_v;
m.max_i = max_i;
m.max_ri = max_ri;
m.total_v = total_v;
//analyser.getByteFrequencyData(dataArrayAlt);
const usedSourceCombos = [];
const analysedResults = {};
Object.keys(mapping).forEach((layerID) => {
Object.keys(mapping[layerID]).forEach((propTitle) => {
const m = mapping[layerID][propTitle];
const source = m.source;
if (usedSourceCombos.indexOf(source) < 0) {
usedSourceCombos.push(source);
analysedResults[source] = {
max_i: 0,
max_ri: 0,
max_v: 0,
total_v: 0,
mappings: [],
};
}
for (let i = 0; i < w; i++) {
barHeight = dataArrayAlt[i];
total_v += barHeight;
max_ri = barHeight * i;
if (barHeight > max_v) {
max_v = barHeight;
max_i = i;
m.max_v = 0;
m.max_i = 0;
m.max_ri = 0;
m.total_v = 0;
analysedResults[source].mappings.push(m);
});
});
Object.keys(audioSourceCombo).forEach((k) => {
const asc = audioSourceCombo[k];
if (asc.audioElement !== null) {
if (usedSourceCombos.indexOf(k) >= 0) {
if (positionRollover || asc.audioElement.paused) {
asc.audioElement.currentTime = position % asc.audioElement.duration;
asc.audioElement.play();
}
for (let k = 0; k < canvasKeys.length; k++) {
const layerID = canvasCombos[canvasKeys[k]][2];
const m = mapping[layerID][canvasKeys[k]];
let fillStyle = "rgb(200,200,200)"; // AUDIO COLOR
if (m.min_freq <= i && m.max_freq >= i) {
m.total_v += barHeight;
if (barHeight > m.max_v) {
m.max_v = barHeight;
m.max_i = i;
m.max_ri = barHeight * i;
} else if (!asc.audioElement.paused) {
asc.audioElement.pause();
}
fillStyle = "rgb(255,255,255)"; // AUDIO COLOR
}
canvasCombos[canvasKeys[k]][1].fillStyle = fillStyle;
canvasCombos[canvasKeys[k]][1].fillRect(
});
usedSourceCombos.forEach((source) => {
const afs = audioSourceCombo[source];
const r = analysedResults[source];
afs.analyser.getByteFrequencyData(afs.dataArray);
for (let f = 0; f < w; f++) {
const v = afs.dataArray[f];
r.total_v += v;
if (r.max_v < v) {
r.max_v = v;
r.max_i = v;
}
r.max_ri += v * f;
let fillStyle = 'rgb(200,200,200)';
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
const k = canvasKeys[k_i];
const x = f;
canvasCombos[k][1].fillStyle = fillStyle;
canvasCombos[k][1].fillRect(
x,
h - (barHeight * verticalFactor),
barWidth,
(barHeight * verticalFactor)
h - (v * verticalFactor),
1,
(v * verticalFactor)
);
}
x += barWidth;
analysedResults[source].mappings.forEach((m) => {
if (m.min_freq <= f && m.max_freq >= f) {
m.total_v += v;
if (m.max_v < v) {
m.max_v = v;
m.max_i = f;
}
max_ri /= total_v;
for (let k = 0; k < canvasKeys.length; k++) {
const layerID = canvasCombos[canvasKeys[k]][2];
const m = mapping[layerID][canvasKeys[k]];
m.max_ri += v * f;
}
});
}
r.max_ri /= r.total_v;
analysedResults[source].mappings.forEach((m) => {
m.max_ri /= m.total_v;
});
});
for (let k_i = 0; k_i < canvasKeys.length; k_i++) {
const k = canvasKeys[k_i];
const layerID = canvasCombos[k][2];
const m = mapping[layerID][k];
if (m.sync === 'volume') {
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = h - (m.max_in * verticalFactor);
const sh = (m.max_in - m.min_in) * verticalFactor;
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
} else if (m.sync === 'pitch') {
const m = mapping[layerID][canvasKeys[k]];
const sx = m.min_freq;
const sw = m.max_freq - m.min_freq;
const sy = 0;
const sh = h;
canvasCombos[canvasKeys[k]][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[canvasKeys[k]][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[canvasKeys[k]][1].strokeRect(sx, sy, sw, sh);
canvasCombos[k][1].lineWidth = 1; // AUDIO COLOR
canvasCombos[k][1].strokeStyle = "rgb(255,255,255)"; // AUDIO COLOR
canvasCombos[k][1].strokeRect(sx, sy, sw, sh);
}
}
const propsToSet = [];
getLayers().forEach((layer) => {
if (mapping.hasOwnProperty(layer.id())) {
@ -1079,11 +1069,6 @@ const Audio = function(tp, record) {
frameCount++;
};
drawAlt();
} else if (visualSetting == "off") {
canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
canvasCtx.fillStyle = "red";
canvasCtx.fillRect(0, 0, WIDTH, HEIGHT);
}
}
const voiceChange = () => {
@ -1204,9 +1189,11 @@ const Audio = function(tp, record) {
this.addAudioOptions = addAudioOptions;
this.removeAudioOptions = removeAudioOptions;
this.AudioMappingOptions = AudioMappingOptions;
this.readAudioFiles = readAudioFiles;
// debug
this.canvasCombos = canvasCombos;
this.audioSourceCombo = audioSourceCombo;
};
export {

View file

@ -102,6 +102,7 @@ const config = {
colorSeparateRGBA: true,
ignoreOutboundFrequencies: true,
pitchCombineFrequencies: false,
rolloverResetLoop: true,
},
record: {
ignoreProps: ['transformOrigin', 'fontFamily', 'text', 'mirror_x', 'mirror_y', 'mirror_xy'],

View file

@ -579,6 +579,7 @@ const initPanels = () => {
.save(file)
.then(() => {
console.log('ermh... done uploading?', file);
audio.readAudioFiles();
});
});
});