I have some good and bad news...
Changed
else ifeq ($(EMSCRIPTEN_TARGET),wasm)
# emits native wasm.
CPPFLAGS += -DMPT_BUILD_WASM
CXXFLAGS += -s WASM=1
CFLAGS += -s WASM=1
LDFLAGS += -s WASM=1
LDFLAGS += -s ALLOW_MEMORY_GROWTH=1
into
else ifeq ($(EMSCRIPTEN_TARGET),wasm)
# emits native wasm.
CPPFLAGS += -DMPT_BUILD_WASM
CXXFLAGS += -s WASM=1 -s BINARYEN_ASYNC_COMPILATION=0 -s=SINGLE_FILE
CFLAGS += -s WASM=1 -s BINARYEN_ASYNC_COMPILATION=0 -s=SINGLE_FILE
LDFLAGS += -s WASM=1 -s BINARYEN_ASYNC_COMPILATION=0 -s=SINGLE_FILE
LDFLAGS += -s ALLOW_MEMORY_GROWTH=1
Compiles and works, I can load it from the Audio Worklet Processor. Also had to add an "export default Module;" at the end of the generated js to make it compatible with es6 modules.
Problem is... emscripten relies on performance.now() to get the high-resolution time. The worklet thread has no access to that function.
As a workaround, I set performance.now to be Date.now, less resolution (it would "work" but I have no idea of the impact).
But this problem, I have no idea how to solve it, it seems the libopenmpt need to generate random values (I don't know why), so emscripten uses this code,
using the "window.crypto" built-in function, which again are unavailabe in the Audio Worklet.
function getRandomDevice() {
if (typeof crypto === "object" && typeof crypto["getRandomValues"] === "function") {
var randomBuffer = new Uint8Array(1);
return function () {
crypto.getRandomValues(randomBuffer);
return randomBuffer[0]
}
} else if (ENVIRONMENT_IS_NODE) {
try {
var crypto_module = require("crypto");
return function () {
return crypto_module["randomBytes"](1)[0]
}
} catch (e) {
}
}
return function () {
abort("randomDevice")
}
}
At this point I have no idea how to proceed... This is my worklet code. I guess that I could re-implement in JS a random number generator, if that is what emscripten needs.
import Module from './libopenmpt.js';
const libopenmpt = Module;
class ModPlayer extends AudioWorkletProcessor {
maxFramesPerChunk = 4096;
ptrToFile;
modulePtr;
constructor(options) {
super();
console.log(window);
console.log(libopenmpt);
console.log(libopenmpt._openmpt_get_library_version());
this.port.onmessage = (event) => {
console.log(event.data);
this.ptrToFile = libopenmpt._malloc(event.data.byteLength);
libopenmpt.HEAPU8.set(event.data, this.ptrToFile);
this.modulePtr = libopenmpt._openmpt_module_create_from_memory(this.ptrToFile, event.data.byteLength, 0, 0, 0);
this.leftBufferPtr = libopenmpt._malloc(4 * this.maxFramesPerChunk);
this.rightBufferPtr = libopenmpt._malloc(4 * this.maxFramesPerChunk);
};
}
process(inputs, outputs, parameters) {
return true;
}
}
registerProcessor('mod-player', ModPlayer);
Regarding your question, yeah, it works with the old api: "createScriptProcessor()" in the main UI thread, etc... that's how I used to do too. Now I'm trying to fill the audio buffers in a separate thread.