sequence.attachAudio() now handles autoplay blocking

This commit is contained in:
Aria Minaei 2021-09-22 13:32:59 +02:00
parent 1b573fb766
commit 6b99022220

View file

@ -106,8 +106,31 @@ export interface ISequence {
* *
* await sheet.sequence.attachAudio({source: audioBuffer, audioContext, destinationNode}) * await sheet.sequence.attachAudio({source: audioBuffer, audioContext, destinationNode})
* ``` * ```
*
* Note: It's better to provide the `audioContext` rather than allow Theatre to create it.
* That's because some browsers [suspend the audioContext](https://developer.chrome.com/blog/autoplay/#webaudio)
* unless it's initiated by a user gesture, like a click. If that happens, Theatre will
* wait for a user gesture to resume the audioContext. But that's probably not an
* optimal user experience. It is better to provide a button or some other UI element
* to communicate to the user that they have to initiate the animation.
*
* Example:
* ```ts
* // html: <button id="#start">start</button>
* const button = document.getElementById('start')
*
* button.addEventListener('click', async () => {
* const audioContext = ...
* await sheet.sequence.attachAudio({audioContext, source: '...'})
* sheet.sequence.play()
* })
* ```
*/ */
attachAudio(args: IAttachAudioArgs): Promise<void> attachAudio(args: IAttachAudioArgs): Promise<{
decodedBuffer: AudioBuffer
audioContext: AudioContext
destinationNode: AudioDestinationNode
}>
} }
export default class TheatreSequence implements ISequence { export default class TheatreSequence implements ISequence {
@ -164,7 +187,11 @@ export default class TheatreSequence implements ISequence {
privateAPI(this).position = position privateAPI(this).position = position
} }
async attachAudio(args: IAttachAudioArgs): Promise<void> { async attachAudio(args: IAttachAudioArgs): Promise<{
decodedBuffer: AudioBuffer
audioContext: AudioContext
destinationNode: AudioDestinationNode
}> {
const {audioContext, destinationNode, decodedBuffer} = const {audioContext, destinationNode, decodedBuffer} =
await resolveAudioBuffer(args) await resolveAudioBuffer(args)
@ -176,6 +203,8 @@ export default class TheatreSequence implements ISequence {
) )
privateAPI(this).replacePlaybackController(playbackController) privateAPI(this).replacePlaybackController(playbackController)
return {audioContext, destinationNode, decodedBuffer}
} }
} }
@ -184,17 +213,53 @@ async function resolveAudioBuffer(args: IAttachAudioArgs): Promise<{
audioContext: AudioContext audioContext: AudioContext
destinationNode: AudioDestinationNode destinationNode: AudioDestinationNode
}> { }> {
const audioContext = args.audioContext || new AudioContext() function getAudioContext(): Promise<AudioContext> {
if (args.audioContext) return Promise.resolve(args.audioContext)
const ctx = new AudioContext()
if (ctx.state === 'running') return Promise.resolve(ctx)
// AudioContext is suspended, probably because the browser
// has blocked it since it is not initiated by a user gesture
return new Promise<AudioContext>((resolve) => {
const listener = () => {
ctx.resume()
}
const eventsToHookInto: Array<keyof WindowEventMap> = [
'mousedown',
'keydown',
'touchstart',
]
const eventListenerOpts = {capture: true, passive: false}
eventsToHookInto.forEach((eventName) => {
window.addEventListener(eventName, listener, eventListenerOpts)
})
ctx.addEventListener('statechange', () => {
if (ctx.state === 'running') {
eventsToHookInto.forEach((eventName) => {
window.removeEventListener(eventName, listener, eventListenerOpts)
})
resolve(ctx)
}
})
})
}
async function getAudioBuffer(): Promise<AudioBuffer> {
if (args.source instanceof AudioBuffer) {
return args.source
}
const decodedBufferDeferred = defer<AudioBuffer>() const decodedBufferDeferred = defer<AudioBuffer>()
if (args.source instanceof AudioBuffer) { if (typeof args.source !== 'string') {
decodedBufferDeferred.resolve(args.source)
} else if (typeof args.source !== 'string') {
throw new Error( throw new Error(
`Error validating arguments to sequence.attachAudio(). ` + `Error validating arguments to sequence.attachAudio(). ` +
`args.source must either be a string or an instance of AudioBuffer.`, `args.source must either be a string or an instance of AudioBuffer.`,
) )
} else { }
let fetchResponse let fetchResponse
try { try {
fetchResponse = await fetch(args.source) fetchResponse = await fetch(args.source)
@ -205,20 +270,21 @@ async function resolveAudioBuffer(args: IAttachAudioArgs): Promise<{
) )
} }
let buffer let arrayBuffer
try { try {
buffer = await fetchResponse.arrayBuffer() arrayBuffer = await fetchResponse.arrayBuffer()
} catch (e) { } catch (e) {
console.error(e) console.error(e)
throw new Error(`Could not read '${args.source}' as an arrayBuffer.`) throw new Error(`Could not read '${args.source}' as an arrayBuffer.`)
} }
const audioContext = await audioContextPromise
audioContext.decodeAudioData( audioContext.decodeAudioData(
buffer, arrayBuffer,
decodedBufferDeferred.resolve, decodedBufferDeferred.resolve,
decodedBufferDeferred.reject, decodedBufferDeferred.reject,
) )
}
let decodedBuffer let decodedBuffer
try { try {
@ -228,6 +294,17 @@ async function resolveAudioBuffer(args: IAttachAudioArgs): Promise<{
throw new Error(`Could not decode ${args.source} as an audio file.`) throw new Error(`Could not decode ${args.source} as an audio file.`)
} }
return decodedBuffer
}
const audioContextPromise = getAudioContext()
const audioBufferPromise = getAudioBuffer()
const [audioContext, decodedBuffer] = await Promise.all([
audioContextPromise,
audioBufferPromise,
])
const destinationNode = args.destinationNode || audioContext.destination const destinationNode = args.destinationNode || audioContext.destination
return { return {