diff --git a/src/audio_worklet.js b/src/audio_worklet.js index 985f69d2ca083..ebd09517494b0 100644 --- a/src/audio_worklet.js +++ b/src/audio_worklet.js @@ -168,18 +168,22 @@ class BootstrapMessages extends AudioWorkletProcessor { } #endif // Register a real AudioWorkletProcessor that will actually do audio processing. - registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor(d['audioParams'])); + // 'ap' being the audio params + registerProcessor(d['_wpn'], createWasmAudioWorkletProcessor(d['ap'])); #if WEBAUDIO_DEBUG - console.log(`Registered a new WasmAudioWorkletProcessor "${d['_wpn']}" with AudioParams: ${d['audioParams']}`); + console.log(`Registered a new WasmAudioWorkletProcessor "${d['_wpn']}" with AudioParams: ${d['ap']}`); #endif // Post a Wasm Call message back telling that we have now registered the - // AudioWorkletProcessor class, and should trigger the user onSuccess - // callback of the - // emscripten_create_wasm_audio_worklet_processor_async() call. - p.postMessage({'_wsc': d['callback'], 'x': [d['contextHandle'], 1/*EM_TRUE*/, d['userData']] }); // "WaSm Call" - } else if (d['_wsc']) { + // AudioWorkletProcessor, and should trigger the user onSuccess callback + // of the emscripten_create_wasm_audio_worklet_processor_async() call. + // // '_wsc' is short for 'wasm call', using an identifier that will never // conflict with user messages + // 'cb' the callback function + // 'ch' the context handle + // 'ud' the passed user data + p.postMessage({'_wsc': d['cb'], 'x': [d['ch'], 1/*EM_TRUE*/, d['ud']] }); + } else if (d['_wsc']) { Module['wasmTable'].get(d['_wsc'])(...d['x']); }; } diff --git a/src/library_webaudio.js b/src/library_webaudio.js index 8d9491ff2436e..ad3db42b13b98 100644 --- a/src/library_webaudio.js +++ b/src/library_webaudio.js @@ -122,9 +122,12 @@ let LibraryWebAudio = { }, #if AUDIO_WORKLET + // emscripten_start_wasm_audio_worklet_thread_async() doesn't use stackAlloc, + // etc., but the created worklet does. emscripten_start_wasm_audio_worklet_thread_async__deps: [ '$_wasmWorkersID', - '$_EmAudioDispatchProcessorCallback'], + '$_EmAudioDispatchProcessorCallback', + '$stackAlloc', '$stackRestore', '$stackSave'], emscripten_start_wasm_audio_worklet_thread_async: (contextHandle, stackLowestAddress, stackSize, callback, userData) => { #if ASSERTIONS @@ -249,14 +252,15 @@ let LibraryWebAudio = { #endif EmAudio[contextHandle].audioWorklet.bootstrapMessage.port.postMessage({ - // '_wpn' == 'Worklet Processor Name', use a deliberately mangled name so - // that this field won't accidentally be mixed with user submitted - // messages. - _wpn: UTF8ToString(HEAPU32[options]), - audioParams, - contextHandle, - callback, - userData + // Deliberately mangled and short names used here ('_wpn', the 'Worklet + // Processor Name' used as a 'key' to verify the message type so as to + // not get accidentally mixed with user submitted messages, the remainder + // for space saving reasons, abbreviated from their variable names). + '_wpn': UTF8ToString(HEAPU32[options]), + 'ap': audioParams, + 'ch': contextHandle, + 'cb': callback, + 'ud': userData }); }, diff --git a/src/runtime_shared.js b/src/runtime_shared.js index 715211a67c6f4..7c23e943e64b8 100644 --- a/src/runtime_shared.js +++ b/src/runtime_shared.js @@ -13,7 +13,7 @@ if (!shouldExport) { if (MODULARIZE && EXPORT_ALL) { shouldExport = true; - } else if (AUDIO_WORKLET && (x == 'HEAP32' || x == 'HEAPU32')) { + } else if (AUDIO_WORKLET && (x == 'HEAPU32' || x == 'HEAPF32')) { // Export to the AudioWorkletGlobalScope the needed variables to access // the heap. AudioWorkletGlobalScope is unable to access global JS vars // in the compiled main JS file.