Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 56 additions & 39 deletions src/audio_worklet.js
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,17 @@ function createWasmAudioWorkletProcessor() {
return WasmAudioWorkletProcessor;
}

var messagePort;
#if MIN_FIREFOX_VERSION < 138 || MIN_CHROME_VERSION != TARGET_NOT_SUPPORTED || MIN_SAFARI_VERSION != TARGET_NOT_SUPPORTED
// If this browser does not support the up-to-date AudioWorklet standard
// that has a MessagePort over to the AudioWorklet, then polyfill that by
// a hacky AudioWorkletProcessor that provides the MessagePort.
// Firefox added support in https://hg-edge.mozilla.org/integration/autoland/rev/ab38a1796126f2b3fc06475ffc5a625059af59c1
// Chrome ticket: https://issues.chromium.org/issues/446920095
// Safari ticket: https://bugs.webkit.org/show_bug.cgi?id=299386
/**
* @suppress {duplicate, checkTypes}
*/
var port = globalThis.port || {};

// Specify a worklet processor that will be used to receive messages to this
// AudioWorkletGlobalScope. We never connect this initial AudioWorkletProcessor
Expand All @@ -260,47 +270,13 @@ class BootstrapMessages extends AudioWorkletProcessor {
constructor(arg) {
super();
startWasmWorker(arg.processorOptions)
#if WEBAUDIO_DEBUG
console.log('AudioWorklet global scope looks like this:');
console.dir(globalThis);
#endif
// Listen to messages from the main thread. These messages will ask this
// scope to create the real AudioWorkletProcessors that call out to Wasm to
// do audio processing.
messagePort = this.port;
/** @suppress {checkTypes} */
messagePort.onmessage = async (msg) => {
#if MINIMAL_RUNTIME
// Wait for the module instantiation before processing messages.
await instantiatePromise;
#endif
let d = msg.data;
if (d['_wpn']) {
// '_wpn' is short for 'Worklet Processor Node', using an identifier
// that will never conflict with user messages
// Register a real AudioWorkletProcessor that will actually do audio processing.
#if AUDIO_WORKLET_SUPPORT_AUDIO_PARAMS
registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor(d.audioParams));
#else
registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor());
#endif
#if WEBAUDIO_DEBUG
console.log(`Registered a new WasmAudioWorkletProcessor "${d['_wpn']}" with AudioParams: ${d.audioParams}`);
#endif
// Post a Wasm Call message back telling that we have now registered the
// AudioWorkletProcessor, and should trigger the user onSuccess callback
// of the emscripten_create_wasm_audio_worklet_processor_async() call.
//
// '_wsc' is short for 'wasm call', using an identifier that will never
// conflict with user messages.
//
// Note: we convert the pointer arg manually here since the call site
// ($_EmAudioDispatchProcessorCallback) is used with various signatures
// and we do not know the types in advance.
messagePort.postMessage({'_wsc': d.callback, args: [d.contextHandle, 1/*EM_TRUE*/, {{{ to64('d.userData') }}}] });
} else if (d['_wsc']) {
getWasmTableEntry(d['_wsc'])(...d.args);
};
if (!(port instanceof MessagePort)) {
this.port.onmessage = port.onmessage;
/** @suppress {checkTypes} */
port = this.port;
}
}

Expand All @@ -317,5 +293,46 @@ class BootstrapMessages extends AudioWorkletProcessor {

// Register the dummy processor that will just receive messages.
registerProcessor('em-bootstrap', BootstrapMessages);
#endif

port.onmessage = async (msg) => {
#if MINIMAL_RUNTIME
// Wait for the module instantiation before processing messages.
await instantiatePromise;
#endif
let d = msg.data;
if (d['_boot']) {
startWasmWorker(d);
#if WEBAUDIO_DEBUG
console.log('AudioWorklet global scope looks like this:');
console.dir(globalThis);
#endif
} else if (d['_wpn']) {
// '_wpn' is short for 'Worklet Processor Node', using an identifier
// that will never conflict with user messages
// Register a real AudioWorkletProcessor that will actually do audio processing.
#if AUDIO_WORKLET_SUPPORT_AUDIO_PARAMS
registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor(d.audioParams));
#else
registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor());
#endif
#if WEBAUDIO_DEBUG
console.log(`Registered a new WasmAudioWorkletProcessor "${d['_wpn']}" with AudioParams: ${d.audioParams}`);
#endif
// Post a Wasm Call message back telling that we have now registered the
// AudioWorkletProcessor, and should trigger the user onSuccess callback
// of the emscripten_create_wasm_audio_worklet_processor_async() call.
//
// '_wsc' is short for 'wasm call', using an identifier that will never
// conflict with user messages.
//
// Note: we convert the pointer arg manually here since the call site
// ($_EmAudioDispatchProcessorCallback) is used with various signatures
// and we do not know the types in advance.
port.postMessage({'_wsc': d.callback, args: [d.contextHandle, 1/*EM_TRUE*/, {{{ to64('d.userData') }}}] });
} else if (d['_wsc']) {
getWasmTableEntry(d['_wsc'])(...d.args);
};
}

} // ENVIRONMENT_IS_AUDIO_WORKLET
11 changes: 11 additions & 0 deletions src/closure-externs/audio-worklet-externs.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
/*
* AudioWorkletGlobalScope globals
*/
var registerProcessor = function(name, obj) {};
var currentFrame;
var currentTime;
var sampleRate;
/**
* @suppress {duplicate, checkTypes}
*/
var port;
8 changes: 0 additions & 8 deletions src/closure-externs/closure-externs.js
Original file line number Diff line number Diff line change
Expand Up @@ -222,14 +222,6 @@ var outerHeight;
var event;
var devicePixelRatio;

/*
* AudioWorkletGlobalScope globals
*/
var registerProcessor = function(name, obj) {};
var currentFrame;
var currentTime;
var sampleRate;

/*
* Avoid closure minifying anything to "id". See #13965
*/
Expand Down
64 changes: 45 additions & 19 deletions src/lib/libwebaudio.js
Original file line number Diff line number Diff line change
Expand Up @@ -192,23 +192,49 @@ var LibraryWebAudio = {
#if WEBAUDIO_DEBUG
console.log(`emscripten_start_wasm_audio_worklet_thread_async() addModule() completed`);
#endif
audioWorklet.bootstrapMessage = new AudioWorkletNode(audioContext, 'em-bootstrap', {
processorOptions: {
// Assign the loaded AudioWorkletGlobalScope a Wasm Worker ID so that
// it can utilized its own TLS slots, and it is recognized to not be
// the main browser thread.
wwID: _wasmWorkersID++,

#if MIN_FIREFOX_VERSION < 138 || MIN_CHROME_VERSION != TARGET_NOT_SUPPORTED || MIN_SAFARI_VERSION != TARGET_NOT_SUPPORTED
// If this browser does not support the up-to-date AudioWorklet standard
// that has a MessagePort over to the AudioWorklet, then polyfill that by
// instantiating a dummy AudioWorkletNode to get a MessagePort over.
// Firefox added support in https://hg-edge.mozilla.org/integration/autoland/rev/ab38a1796126f2b3fc06475ffc5a625059af59c1
// Chrome ticket: https://issues.chromium.org/issues/446920095
// Safari ticket: https://bugs.webkit.org/show_bug.cgi?id=299386
if (!audioWorklet['port']) {
audioWorklet['port'] = {
postMessage: (msg) => {
if (msg['_boot']) {
audioWorklet.bootstrapMessage = new AudioWorkletNode(audioContext, 'em-bootstrap', {
processorOptions: msg
});
audioWorklet.bootstrapMessage['port'].onmessage = (msg) => {
audioWorklet['port'].onmessage(msg);
}
} else {
audioWorklet.bootstrapMessage['port'].postMessage(msg);
}
}
}
}
#endif

audioWorklet['port'].postMessage({
// This is the bootstrap message to the Audio Worklet.
'_boot': 1,
// Assign the loaded AudioWorkletGlobalScope a Wasm Worker ID so that
// it can utilized its own TLS slots, and it is recognized to not be
// the main browser thread.
wwID: _wasmWorkersID++,
#if MINIMAL_RUNTIME
wasm: Module['wasm'],
wasm: Module['wasm'],
#else
wasm: wasmModule,
wasm: wasmModule,
#endif
wasmMemory,
stackLowestAddress, // sb = stack base
stackSize, // sz = stack size
}
wasmMemory,
stackLowestAddress, // sb = stack base
stackSize, // sz = stack size
});
audioWorklet.bootstrapMessage.port.onmessage = _EmAudioDispatchProcessorCallback;
audioWorklet['port'].onmessage = _EmAudioDispatchProcessorCallback;
{{{ makeDynCall('viip', 'callback') }}}(contextHandle, 1/*EM_TRUE*/, userData);
}).catch(audioWorkletCreationFailed);
},
Expand Down Expand Up @@ -256,7 +282,7 @@ var LibraryWebAudio = {
console.log(`emscripten_create_wasm_audio_worklet_processor_async() creating a new AudioWorklet processor with name ${processorName}`);
#endif

EmAudio[contextHandle].audioWorklet.bootstrapMessage.port.postMessage({
EmAudio[contextHandle].audioWorklet['port'].postMessage({
// Deliberately mangled and short names used here ('_wpn', the 'Worklet
// Processor Name' used as a 'key' to verify the message type so as to
// not get accidentally mixed with user submitted messages, the remainder
Expand Down Expand Up @@ -334,11 +360,11 @@ var LibraryWebAudio = {
emscripten_current_thread_is_audio_worklet: () => ENVIRONMENT_IS_AUDIO_WORKLET,

emscripten_audio_worklet_post_function_v: (audioContext, funcPtr) => {
(audioContext ? EmAudio[audioContext].audioWorklet.bootstrapMessage.port : messagePort).postMessage({'_wsc': funcPtr, args: [] }); // "WaSm Call"
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [] }); // "WaSm Call"
},

$emscripten_audio_worklet_post_function_1: (audioContext, funcPtr, arg0) => {
(audioContext ? EmAudio[audioContext].audioWorklet.bootstrapMessage.port : messagePort).postMessage({'_wsc': funcPtr, args: [arg0] }); // "WaSm Call"
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0] }); // "WaSm Call"
},

emscripten_audio_worklet_post_function_vi__deps: ['$emscripten_audio_worklet_post_function_1'],
Expand All @@ -352,7 +378,7 @@ var LibraryWebAudio = {
},

$emscripten_audio_worklet_post_function_2: (audioContext, funcPtr, arg0, arg1) => {
(audioContext ? EmAudio[audioContext].audioWorklet.bootstrapMessage.port : messagePort).postMessage({'_wsc': funcPtr, args: [arg0, arg1] }); // "WaSm Call"
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1] }); // "WaSm Call"
},

emscripten_audio_worklet_post_function_vii__deps: ['$emscripten_audio_worklet_post_function_2'],
Expand All @@ -366,7 +392,7 @@ var LibraryWebAudio = {
},

$emscripten_audio_worklet_post_function_3: (audioContext, funcPtr, arg0, arg1, arg2) => {
(audioContext ? EmAudio[audioContext].audioWorklet.bootstrapMessage.port : messagePort).postMessage({'_wsc': funcPtr, args: [arg0, arg1, arg2] }); // "WaSm Call"
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: [arg0, arg1, arg2] }); // "WaSm Call"
},
emscripten_audio_worklet_post_function_viii__deps: ['$emscripten_audio_worklet_post_function_3'],
emscripten_audio_worklet_post_function_viii: (audioContext, funcPtr, arg0, arg1, arg2) => {
Expand All @@ -386,7 +412,7 @@ var LibraryWebAudio = {
assert(UTF8ToString(sigPtr)[0] != 'v', 'Do NOT specify the return argument in the signature string for a call to emscripten_audio_worklet_post_function_sig(), just pass the function arguments.');
assert(varargs);
#endif
(audioContext ? EmAudio[audioContext].audioWorklet.bootstrapMessage.port : messagePort).postMessage({'_wsc': funcPtr, args: readEmAsmArgs(sigPtr, varargs) });
(audioContext ? EmAudio[audioContext].audioWorklet['port'] : port).postMessage({'_wsc': funcPtr, args: readEmAsmArgs(sigPtr, varargs) });
}
};

Expand Down
Loading