Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 75 additions & 6 deletions Backends/HTML5/kha/audio2/Audio.hx
Original file line number Diff line number Diff line change
@@ -1,21 +1,36 @@
package kha.audio2;

import js.Browser.document;
import js.Browser.window;
import js.Browser;
import js.Syntax;
import js.html.MessagePort;
import js.html.URL;
import js.html.audio.AudioContext;
import js.html.audio.AudioDestinationNode;
import js.html.audio.AudioProcessingEvent;
import js.html.audio.ScriptProcessorNode;
import js.lib.Promise;
import kha.Sound;
import kha.audio2.AudioWorkletProcessorJs;
import kha.internal.IntBox;
import kha.js.AEAudioChannel;

@:native("AudioWorkletNode")
private extern class AudioWorkletNode {
var port: MessagePort;
function new(context: AudioContext, name: String, options: {outputChannelCount: Array<Int>});

function connect(arg: AudioDestinationNode): Void;
}

class Audio {
public static var disableGcInteractions = false;
static var intBox: IntBox = new IntBox(0);
static var buffer: Buffer;
@:noCompletion public static var _context: AudioContext;
static var processingNode: ScriptProcessorNode;
static var workletNode: AudioWorkletNode;

static function initContext(): Void {
try {
Expand All @@ -37,13 +52,26 @@ class Audio {
return false;

Audio.samplesPerSecond = Math.round(_context.sampleRate);
var bufferSize = 1024 * 2;
final bufferSize = 1024 * 2;
buffer = new Buffer(bufferSize * 4, 2, Std.int(_context.sampleRate));

final useWorklet = window.isSecureContext && (_context : Dynamic).audioWorklet != null;
// final useWorklet = false;

if (useWorklet) {
initAudioWorklet(bufferSize);
}
else {
initOnAudioProcess(bufferSize);
}
return true;
}

static function initOnAudioProcess(bufferSize: Int): Void {
processingNode = _context.createScriptProcessor(bufferSize, 0, 2);
processingNode.onaudioprocess = function(e: AudioProcessingEvent) {
var output1 = e.outputBuffer.getChannelData(0);
var output2 = e.outputBuffer.getChannelData(1);
processingNode.onaudioprocess = (e: AudioProcessingEvent) -> {
final output1 = e.outputBuffer.getChannelData(0);
final output2 = e.outputBuffer.getChannelData(1);
if (audioCallback != null) {
intBox.value = e.outputBuffer.length * 2;
audioCallback(intBox, buffer);
Expand All @@ -65,7 +93,48 @@ class Audio {
}
}
processingNode.connect(_context.destination);
return true;
}

static function initAudioWorklet(bufferSize: Int): Void {
final workletName = "kha-audio-processor";
// Create a blob for the worklet processor code
final pjs = AudioWorkletProcessorJs.getProcessorJs(workletName);
final blob = new js.html.Blob([pjs], {type: "application/javascript"});
final url = URL.createObjectURL(blob);
final promise: Promise<Dynamic> = (_context : Dynamic).audioWorklet.addModule(url);
promise.then(_ -> {
workletNode = new AudioWorkletNode(_context, workletName, {outputChannelCount: [2]});
workletNode.connect(_context.destination);

final bufferLength = bufferSize * 2;
final audioArray = new js.lib.Float32Array(bufferLength);

// Send buffer data to worklet on each frame
final sendBuffer = () -> {
// throttled web pages will make strange sounds, mute instead
if (document.visibilityState != VISIBLE) {
workletNode.port.postMessage([]);
return;
}
if (audioCallback == null)
return;
intBox.value = bufferLength;
audioCallback(intBox, buffer);
for (i in 0...bufferLength) {
audioArray[i] = buffer.data.get(buffer.readLocation);
buffer.readLocation++;
if (buffer.readLocation >= buffer.size) {
buffer.readLocation = 0;
}
}
workletNode.port.postMessage(audioArray);
}
// worklet requests more data
workletNode.port.onmessage = (_) -> {
sendBuffer();
}
sendBuffer();
});
}

public static var samplesPerSecond: Int;
Expand All @@ -84,7 +153,7 @@ class Audio {
public static function stream(sound: Sound, loop: Bool = false): kha.audio1.AudioChannel {
// var source = _context.createMediaStreamSource(cast sound.compressedData.getData());
// source.connect(_context.destination);
var element = Browser.document.createAudioElement();
var element = document.createAudioElement();
#if kha_debug_html5
var blob = new js.html.Blob([sound.compressedData.getData()], {type: "audio/ogg"});
#else
Expand Down
41 changes: 41 additions & 0 deletions Backends/HTML5/kha/audio2/AudioWorkletProcessorJs.hx
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package kha.audio2;

@:noDoc
class AudioWorkletProcessorJs {
public static function getProcessorJs(name: String): String {
return '
class KhaAudioProcessor extends AudioWorkletProcessor {
needMoreData = 0
constructor() {
super();
this.buffer = [];
this.nextBuffer = [];
this.port.onmessage = (event) => {
this.nextBuffer = event.data;
};
}
process(inputs, outputs, parameters) {
const output = outputs[0];
if (this.buffer.length < output[0].length * 2) {
this.buffer = this.nextBuffer;
this.port.postMessage(this.needMoreData);
}
if (this.buffer.length >= output[0].length * 2) {
for (let i = 0; i < output[0].length; i++) {
output[0][i] = this.buffer[i * 2];
output[1][i] = this.buffer[i * 2 + 1];
}
this.buffer = this.buffer.slice(output[0].length * 2);
} else {
for (let i = 0; i < output[0].length; i++) {
output[0][i] = 0;
output[1][i] = 0;
}
}
return true;
}
}
registerProcessor("$name", KhaAudioProcessor);
';
}
}
Loading