record microphone to audiofile and add to timeline
dirty dirty inbetween dependencies hashes: openFrameworks d78075f4bca6be2a2533c6e51a75cc1f18404501 ofxMsdfgen e14da13d02c4dff04fb69d7923469f606924e6c3 ofxGPUFont d482bb7cbdf6b296fa4ab5abcf73fb5ff8c8b239 ofxVariableLab 0b5f9bdebc1e5550621957e73c040c258ec6317b ofxProfiler a868e34fa1a79189dd4fbdede2938e308535e5e8 theatre 86d3e07f6f2c75fd6e08fca8c97e3617c9e23b18
This commit is contained in:
parent
5ff6ee3905
commit
c832982d46
4 changed files with 294 additions and 6 deletions
|
@ -9,6 +9,8 @@ import {
|
|||
clone,
|
||||
rgbaToHexa,
|
||||
hexaToRgba,
|
||||
getTimestamp,
|
||||
getFileExtensionFromMimeType,
|
||||
} from './utils.js';
|
||||
|
||||
window.mapValue = mapValue;
|
||||
|
@ -26,10 +28,202 @@ const AudioMappingOptions = function() {
|
|||
this.source = 'microphone';
|
||||
this.value = 0.0;
|
||||
this.muted = true;
|
||||
this.addToTimeline = false;
|
||||
};
|
||||
|
||||
window.playAudioFile = (file) => {
|
||||
const audioElement = document.createElement('audio');
|
||||
audioElement.classList.add('invisible');
|
||||
audioElement.classList.add('audio_file');
|
||||
audioElement.src = src;
|
||||
document.querySelector('body').append(audioElement);
|
||||
return audioElement;
|
||||
};
|
||||
|
||||
const AudioPlayer = function() {
|
||||
const audioElements = [];
|
||||
let updateInterval = false;
|
||||
let updateInterval_ms = 10;
|
||||
this.add = (layer, propTitle, time, file) => {
|
||||
const layerID = typeof layer === 'string' ? layer : layer.id();
|
||||
console.log('AudioPlayer::add',{layerID, propTitle, time, file});
|
||||
const index = audioElements.findIndex((e) => e.layerID === layerID && e.propTitle === propTitle);
|
||||
if (index === -1) {
|
||||
const audioElement = document.createElement('audio');
|
||||
audioElement.classList.add('invisible');
|
||||
audioElement.classList.add('audio_file');
|
||||
audioElement.src = audio.audioSourceCombos[file].audioElement.src;
|
||||
audioElements.push({
|
||||
layerID, propTitle, audioElement, time, file
|
||||
});
|
||||
} else {
|
||||
audioElements[index].src = audio.audioSourceCombos[file].audioElement.src;
|
||||
audioElements[index].time = time;
|
||||
}
|
||||
};
|
||||
this.update = () => {
|
||||
audioElements.forEach((audioElement, i) => {
|
||||
if (tp.isPlaying() && !record.isRecording()) {
|
||||
const diff = tp.sheet.sequence.position - audioElement.time;
|
||||
if (diff >= 0) {
|
||||
if (audioElement.audioElement.paused) {
|
||||
audioElement.audioElement.currentTime = diff;
|
||||
audioElement.audioElement.play();
|
||||
console.log('play audioElement ', audioElement.file, audioElement.propTitle, i);
|
||||
}
|
||||
} else if(!audioElement.audioElement.paused) {
|
||||
audioElement.audioElement.pause();
|
||||
audioElement.audioElement.currentTime = 0;
|
||||
console.log('paus audioElement ', audioElement.file, audioElement.propTitle, i);
|
||||
}
|
||||
} else if (!audioElement.audioElement.paused) {
|
||||
audioElement.audioElement.pause();
|
||||
audioElement.audioElement.currentTime = 0;
|
||||
console.log('pausé audioElement ', audioElement.file, audioElement.propTitle, i);
|
||||
}
|
||||
});
|
||||
};
|
||||
this.audioElements = audioElements;
|
||||
this.init = () => {
|
||||
clearInterval(updateInterval);
|
||||
updateInterval = setInterval(() => {
|
||||
this.update();
|
||||
}, updateInterval_ms);
|
||||
};
|
||||
this.listener = (event) => {
|
||||
console.log('AUDIOPLAYORECEIVED', event);
|
||||
let hot = false;
|
||||
let time = false;
|
||||
if (event.detail === record.possibleStates.RECORDING) {
|
||||
hot = clone(record.getHot());
|
||||
time = tp.sheet.sequence.position;
|
||||
const layerIDs = Object.keys(hot);
|
||||
layerIDs.forEach((layerID) => {
|
||||
const propTitles = Object.keys(hot[layerID]);
|
||||
propTitles.forEach((propTitle) => {
|
||||
const m = audio.getMapping()[layerID][propTitle];
|
||||
if (m.addToTimeline) {
|
||||
if (m.source === 'microphone') {
|
||||
const waitForMicrophoneListener = (event) => {
|
||||
if (event.detail.fileIsRead) {
|
||||
this.add(layerID, propTitle, time, event.detail.filename);
|
||||
window.removeEventListener('microphoneRecorder', waitForMicrophoneListener);
|
||||
}
|
||||
};
|
||||
window.addEventListener('microphoneRecorder', waitForMicrophoneListener);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
if (event.detail === record.possibleStates.NOT_RECORDING) {
|
||||
const layerIDs = Object.keys(hot);
|
||||
layerIDs.forEach((layerID) => {
|
||||
const propTitles = Object.keys(hot[layerID]);
|
||||
propTitles.forEach((propTitle) => {
|
||||
const m = audio.getMapping()[layerID][propTitle];
|
||||
if (m.addToTimeline) {
|
||||
if (m.source === 'microphone') {
|
||||
// we already handled this above
|
||||
} else {
|
||||
this.add(layerID, propTitle, time, m.source);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
window.removeEventListener('record', this.listener);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const MicrophoneRecorder = function() {
|
||||
let recorder = false;
|
||||
let buffy = [];
|
||||
let filenameWithoutExtension;
|
||||
let fileExtension;
|
||||
|
||||
this.getLastFilename = () => {
|
||||
return `${filenameWithoutExtension}.${fileExtension}`;
|
||||
};
|
||||
|
||||
this.init = (stream) => {
|
||||
recorder = new MediaRecorder(stream);
|
||||
recorder.addEventListener('dataavailable', (event) => {
|
||||
buffy.push(event.data);
|
||||
});
|
||||
};
|
||||
|
||||
this.start = (name = "") => {
|
||||
if (name === "") {
|
||||
filenameWithoutExtension = getTimestamp();
|
||||
} else {
|
||||
filenameWithoutExtension = name;
|
||||
}
|
||||
buffy = [];
|
||||
recorder.start();
|
||||
};
|
||||
|
||||
this.stop = () => {
|
||||
return new Promise((resolve) => {
|
||||
//save audio type to pass to set the Blob type
|
||||
let mimeType = recorder.mimeType;
|
||||
|
||||
//listen to the stop event in order to create & return a single Blob object
|
||||
recorder.addEventListener("stop", () => {
|
||||
//create a single blob object, as we might have gathered a few Blob objects that needs to be joined as one
|
||||
let blob = new Blob(buffy, { type: mimeType });
|
||||
var arrayBuffer;
|
||||
var fileReader = new FileReader();
|
||||
fileReader.onload = function(event) {
|
||||
arrayBuffer = event.target.result;
|
||||
fileExtension = getFileExtensionFromMimeType(mimeType);
|
||||
if (fileExtension !== false) {
|
||||
const file = {
|
||||
name: `${filenameWithoutExtension}.${fileExtension}`,
|
||||
type: 'audio',
|
||||
arrayBuffer
|
||||
};
|
||||
moduleFS.save(file);
|
||||
window.dispatchEvent(new CustomEvent('microphoneRecorder', {detail: {fileIsRead: false, filename: file.name}}));
|
||||
resolve(file.name);
|
||||
} else {
|
||||
resolve(false);
|
||||
}
|
||||
};
|
||||
fileReader.readAsArrayBuffer(blob);
|
||||
|
||||
//resolve promise with the single audio blob representing the recorded audio
|
||||
});
|
||||
|
||||
recorder.stop();
|
||||
});
|
||||
};
|
||||
|
||||
this.startListener = (event) => {
|
||||
console.log('microphoneRecorder startl received', event);
|
||||
if (event.detail === record.possibleStates.RECORDING) {
|
||||
console.log('microphoneRecorder startibus');
|
||||
this.start();
|
||||
window.removeEventListener('record', this.startListener);
|
||||
}
|
||||
};
|
||||
|
||||
this.stopListener = (event) => {
|
||||
console.log('microphoneRecorder stopl received', event);
|
||||
if (event.detail === record.possibleStates.STOPPING_RECORDING) {
|
||||
console.log('microphoneRecorder stopibus');
|
||||
this.stop().then((filename) => {
|
||||
// be happy
|
||||
});
|
||||
window.removeEventListener('record', this.stopListener);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const Audio = function(tp, record) {
|
||||
|
||||
const audioPlayer = new AudioPlayer();
|
||||
this.audioPlayer = audioPlayer; // DEBUG TODO: remove me
|
||||
const audioDom = document.querySelector('.audioWrapper');
|
||||
let audioCtx = false;
|
||||
const heading = audioDom.querySelector("h1");
|
||||
|
@ -49,6 +243,8 @@ const Audio = function(tp, record) {
|
|||
//});
|
||||
let started = false;
|
||||
let allowedMicrophone = true; // TODO: actually use this
|
||||
const microphoneRecorder = new MicrophoneRecorder();
|
||||
this.microphoneRecorder = microphoneRecorder; // DEBUG TODO: remove me
|
||||
|
||||
let mapping = {};
|
||||
let savedMapping = {};
|
||||
|
@ -288,8 +484,6 @@ const Audio = function(tp, record) {
|
|||
mappingOptions.letterDelay = typeof ld.value === 'number' ? ld.value : parseInt(ld.value);
|
||||
if (isSequenced) {
|
||||
const prop = getNestedProperty(layer.theatreObject.props.letterDelays, propTitle.split('.'));
|
||||
console.log('updateMAppingOptions', prop, propTitle.split('.'));
|
||||
alert('supposed to update for', propTitle.split('.'));
|
||||
tp.studio.transaction(({
|
||||
set
|
||||
}) => {
|
||||
|
@ -335,7 +529,31 @@ const Audio = function(tp, record) {
|
|||
record_Dom_Cont.append(recordSoloButton);
|
||||
record_Dom_Cont.append(recordAllButton);
|
||||
|
||||
{
|
||||
const cssClass = toCssClass(`audio_addToTimeline${propTitle}`);
|
||||
const checkboxDom = document.createElement('input');
|
||||
checkboxDom.type = 'checkbox';
|
||||
checkboxDom.checked = false;
|
||||
mappingOptions.addToTimeline = checkboxDom.checked;
|
||||
checkboxDom.addEventListener('change', (event) => {
|
||||
mappingOptions.addToTimeline = event.currentTarget.checked;
|
||||
});
|
||||
record_Dom_Cont.append(checkboxDom);
|
||||
}
|
||||
|
||||
recordSoloButton.addEventListener('click', () => {
|
||||
if (!record.isRecording()) {
|
||||
console.log('SHOULD ADD LEAST AUDOPLAYOR');
|
||||
// will be recording
|
||||
window.addEventListener('record', audioPlayer.listener);
|
||||
if (mappingOptions.source === 'microphone') {
|
||||
console.log('SHOULD ADD LEAST ALSO THE MICROPHONERECORDER');
|
||||
window.addEventListener('record', microphoneRecorder.startListener);
|
||||
window.addEventListener('record', microphoneRecorder.stopListener);
|
||||
} else {
|
||||
audioSourceCombos[mappingOptions.source].audioElement.currentTime = 0;
|
||||
}
|
||||
}
|
||||
record.toggleRecording([[layer.id()].concat(propTitle.split('.'))]);
|
||||
});
|
||||
recordAllButton.addEventListener('click', () => {
|
||||
|
@ -870,6 +1088,8 @@ const Audio = function(tp, record) {
|
|||
type = 'audio/mpeg';
|
||||
} else if (extension === 'ogg') {
|
||||
type = 'audio/ogg';
|
||||
} else if (extension === 'webm') {
|
||||
type = 'audio/webm';
|
||||
}
|
||||
|
||||
const src = URL.createObjectURL(
|
||||
|
@ -904,6 +1124,14 @@ const Audio = function(tp, record) {
|
|||
}
|
||||
});
|
||||
};
|
||||
window.addEventListener('microphoneRecorder', (event) => {
|
||||
if (!event.detail.fileIsRead) {
|
||||
readAudioFiles();
|
||||
const newDetails = event.detail;
|
||||
newDetails.fileIsRead = true;
|
||||
window.dispatchEvent(new CustomEvent('microphoneRecorder', {detail: newDetails}));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
const init = () => {
|
||||
|
@ -919,6 +1147,7 @@ const Audio = function(tp, record) {
|
|||
return;
|
||||
}
|
||||
}
|
||||
audioPlayer.init();
|
||||
heading.textContent = "Voice-change-O-matic";
|
||||
//document.body.removeEventListener("click", init);
|
||||
|
||||
|
@ -1026,6 +1255,8 @@ const Audio = function(tp, record) {
|
|||
audioSourceCombos['microphone'].source = source;
|
||||
audioSourceCombos['microphone'].gain = gain;
|
||||
|
||||
microphoneRecorder.init(stream);
|
||||
|
||||
visualize();
|
||||
})
|
||||
.catch(function(err) {
|
||||
|
|
|
@ -154,6 +154,28 @@ const Record = function(tp) {
|
|||
const RECORDING = 2;
|
||||
const STOPPING_RECORDING = 3;
|
||||
|
||||
this.possibleStates = {
|
||||
NOT_RECORDING,
|
||||
STARTING_RECORDING,
|
||||
RECORDING,
|
||||
STOPPING_RECORDING,
|
||||
};
|
||||
|
||||
this.friendlyState = (state) => {
|
||||
switch(state) {
|
||||
case NOT_RECORDING: return 'NOT_RECORDING';
|
||||
case STARTING_RECORDING: return 'STARTING_RECORDING';
|
||||
case RECORDING: return 'RECORDING';
|
||||
case STOPPING_RECORDING: return 'STOPPING_RECORDING';
|
||||
}
|
||||
};
|
||||
|
||||
const setIsRecording = (status) => {
|
||||
isRecording = status;
|
||||
window.dispatchEvent(new CustomEvent("record", {detail: status}));
|
||||
console.log('setIsRecording', this.friendlyState(status));
|
||||
};
|
||||
|
||||
const hot = {};
|
||||
let isRecording = NOT_RECORDING;
|
||||
const buffy = new LiveBuffer();
|
||||
|
@ -290,6 +312,7 @@ const Record = function(tp) {
|
|||
if(isRecording === RECORDING) {
|
||||
stopRecording();
|
||||
} else {
|
||||
// set microphone recording to false by default
|
||||
if (!propPaths) {
|
||||
// make all mapped props hot and
|
||||
Object.keys(audio.getMapping())
|
||||
|
@ -466,7 +489,7 @@ const Record = function(tp) {
|
|||
};
|
||||
|
||||
const startRecording = () => {
|
||||
isRecording = STARTING_RECORDING;
|
||||
setIsRecording(STARTING_RECORDING);
|
||||
console.log('Record::startRecording');
|
||||
document.querySelector('#notice_recording')
|
||||
.classList.add('visible');
|
||||
|
@ -511,7 +534,7 @@ const Record = function(tp) {
|
|||
//tp.sheet.sequence.position = 0;
|
||||
tp.sheet.sequence.play();
|
||||
});
|
||||
isRecording = RECORDING;
|
||||
setIsRecording(RECORDING);
|
||||
};
|
||||
const stopRecording = () => {
|
||||
document.querySelector('#notice_recording')
|
||||
|
@ -520,7 +543,7 @@ const Record = function(tp) {
|
|||
.classList.add('imprenetrable');
|
||||
document.querySelector('#notice_recording .what p').innerHTML = 'digesting recording';
|
||||
document.querySelector('#notice_recording .details p').innerHTML = 'please wait';
|
||||
isRecording = STOPPING_RECORDING;
|
||||
setIsRecording(STOPPING_RECORDING);
|
||||
return new Promise((resolve) => {
|
||||
const layerKeys = Object.keys(hot);
|
||||
const promises = [];
|
||||
|
@ -584,7 +607,7 @@ const Record = function(tp) {
|
|||
document.querySelector('#notice_recording')
|
||||
.classList.remove('visible');
|
||||
console.log('Record::stopRecording', 'stopped recording');
|
||||
isRecording = NOT_RECORDING;
|
||||
setIsRecording(NOT_RECORDING);
|
||||
|
||||
if (remember.isPlaying) {
|
||||
tp.sheet.sequence.play();
|
||||
|
|
|
@ -812,6 +812,9 @@ const TheatrePlay = function(autoInit = false) {
|
|||
Module.setPlaying(playing);
|
||||
});
|
||||
};
|
||||
this.isPlaying = () => {
|
||||
return this.core.val(this.sheet.sequence.pointer.playing);
|
||||
};
|
||||
this.studio = studio;
|
||||
this.core = core;
|
||||
|
||||
|
|
|
@ -34,6 +34,19 @@ const getUuid = () => {
|
|||
return uuid.getUuid();
|
||||
}
|
||||
|
||||
const getTimestamp = () => {
|
||||
const now = new Date();
|
||||
const dd = String(now.getDate()).padStart(2, '0');
|
||||
const mm = String(now.getMonth() + 1).padStart(2, '0'); //January is 0!
|
||||
const yyyy = now.getFullYear();
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const minutes = String(now.getMinutes()).padStart(2, '0');
|
||||
const seconds = String(now.getSeconds()).padStart(2, '0');
|
||||
|
||||
const timestamp = `${yyyy}.${mm}.${dd}.${hours}:${minutes}:${seconds}`;
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
const makeEven = (n) => {
|
||||
const nr = Math.round(n);
|
||||
return nr - nr % 2;
|
||||
|
@ -527,6 +540,22 @@ const hexaToRgba = (hex_a) => {
|
|||
}
|
||||
};
|
||||
|
||||
const getFileExtensionFromMimeType = (mimeType) => {
|
||||
if (mimeType.toLowerCase().indexOf('audio/webm') >= 0) {
|
||||
return "webm";
|
||||
}
|
||||
if (mimeType.toLowerCase().indexOf('audio/mpeg') >= 0) {
|
||||
return "mp3";
|
||||
}
|
||||
if (mimeType.toLowerCase().indexOf('audio/ogg') >= 0) {
|
||||
return "ogg";
|
||||
}
|
||||
if (mimeType.toLowerCase().indexOf('audio/wav') >= 0) {
|
||||
return "wav";
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
/////////////////////////////////////
|
||||
// you can test these functions in
|
||||
|
@ -542,6 +571,7 @@ const hexaToRgba = (hex_a) => {
|
|||
|
||||
export {
|
||||
getUuid,
|
||||
getTimestamp,
|
||||
htmlToElement,
|
||||
downloadFile,
|
||||
uploadFile,
|
||||
|
@ -567,4 +597,5 @@ export {
|
|||
getNestedProperty,
|
||||
rgbaToHexa,
|
||||
hexaToRgba,
|
||||
getFileExtensionFromMimeType,
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue