From 7375134fb9a67de55701875d41cefcf61f359b90 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Fri, 14 Nov 2025 16:58:36 -0500 Subject: [PATCH 01/14] Reimplement legacy `Sound` class with new audio engine --- .../src/Audio/Interfaces/ISoundOptions.ts | 2 +- packages/dev/core/src/Audio/audioEngine.ts | 22 +- packages/dev/core/src/Audio/sound.ts | 1185 ++++++----------- .../abstractAudio/abstractAudioOutNode.ts | 3 +- .../AudioV2/abstractAudio/abstractSound.ts | 4 +- .../abstractAudio/abstractSoundSource.ts | 45 +- .../src/AudioV2/abstractAudio/audioBus.ts | 32 +- .../src/AudioV2/abstractAudio/staticSound.ts | 4 +- .../AudioV2/abstractAudio/streamingSound.ts | 4 +- .../subNodes/abstractAudioSubGraph.ts | 6 +- .../subNodes/spatialAudioSubNode.ts | 2 + .../subProperties/abstractSpatialAudio.ts | 17 +- .../subProperties/spatialAudio.ts | 28 + .../subNodes/spatialWebAudioSubNode.ts | 33 +- .../subNodes/webAudioBusAndSoundSubGraph.ts | 1 + .../webAudio/subProperties/spatialWebAudio.ts | 4 +- .../core/src/AudioV2/webAudio/webAudioBus.ts | 37 +- .../src/AudioV2/webAudio/webAudioEngine.ts | 3 + .../AudioV2/webAudio/webAudioSoundSource.ts | 36 +- .../AudioV2/webAudio/webAudioStaticSound.ts | 48 +- .../webAudio/webAudioStreamingSound.ts | 35 +- .../core/test/unit/Audio/audioEngine.test.ts | 9 +- .../unit/Audio/helpers/audioTestHelper.ts | 6 - .../unit/Audio/helpers/audioTestSamples.ts | 33 +- .../unit/Audio/helpers/mockedAudioObjects.ts | 360 +++-- .../dev/core/test/unit/Audio/sound.test.ts | 455 +++---- 26 files changed, 1090 insertions(+), 1324 deletions(-) diff --git a/packages/dev/core/src/Audio/Interfaces/ISoundOptions.ts b/packages/dev/core/src/Audio/Interfaces/ISoundOptions.ts index b85662c6540..76ce768ac4f 100644 --- a/packages/dev/core/src/Audio/Interfaces/ISoundOptions.ts +++ b/packages/dev/core/src/Audio/Interfaces/ISoundOptions.ts @@ -40,7 +40,7 @@ export interface ISoundOptions { * Define the distance attenuation model the sound will follow. * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound */ - distanceModel?: string; + distanceModel?: "linear" | "inverse" | "exponential"; /** * Defines the playback speed (1 by default) */ diff --git a/packages/dev/core/src/Audio/audioEngine.ts b/packages/dev/core/src/Audio/audioEngine.ts index 6da6868f0f2..4e3b182487f 100644 --- a/packages/dev/core/src/Audio/audioEngine.ts +++ b/packages/dev/core/src/Audio/audioEngine.ts @@ -1,11 +1,10 @@ -import type { Analyser } from "./analyser"; - -import type { Nullable } from "../types"; -import { Observable } from "../Misc/observable"; -import { AbstractEngine } from "../Engines/abstractEngine"; -import type { IAudioEngine } from "./Interfaces/IAudioEngine"; import { _WebAudioEngine } from "../AudioV2/webAudio/webAudioEngine"; import type { _WebAudioMainBus } from "../AudioV2/webAudio/webAudioMainBus"; +import { AbstractEngine } from "../Engines/abstractEngine"; +import { Observable } from "../Misc/observable"; +import type { Nullable } from "../types"; +import type { Analyser } from "./analyser"; +import type { IAudioEngine } from "./Interfaces/IAudioEngine"; // Sets the default audio engine to Babylon.js AbstractEngine.AudioEngineFactory = ( @@ -22,7 +21,6 @@ AbstractEngine.AudioEngineFactory = ( * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic */ export class AudioEngine implements IAudioEngine { - private _audioContext: Nullable = null; private _masterGain: GainNode; private _tryToRun = false; private _useCustomUnlockedButton: boolean = false; @@ -176,7 +174,7 @@ export class AudioEngine implements IAudioEngine { * This is helpful to resume play once browser policies have been satisfied. */ public unlock() { - if (this._audioContext?.state === "running") { + if (this._v2._audioContext?.state === "running") { if (!this.unlocked) { // Notify users that the audio stack is unlocked/unmuted this.unlocked = true; @@ -192,10 +190,10 @@ export class AudioEngine implements IAudioEngine { /** @internal */ public _resumeAudioContextOnStateChange(): void { - this._audioContext?.addEventListener( + this._v2._audioContext?.addEventListener( "statechange", () => { - if (this.unlocked && this._audioContext?.state !== "running") { + if (this.unlocked && this._v2._audioContext?.state !== "running") { // eslint-disable-next-line @typescript-eslint/no-floating-promises this._resumeAudioContextAsync(); } @@ -214,6 +212,10 @@ export class AudioEngine implements IAudioEngine { return Promise.resolve(); } + if (this._v2._audioContext.state === "suspended" && !this._useCustomUnlockedButton) { + this._v2._unmuteUIEnabled = true; + } + return this._v2._audioContext.resume(); } diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 750c2e6a454..0c0c79a121b 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -1,90 +1,155 @@ -import { Tools } from "../Misc/tools"; -import { Observable } from "../Misc/observable"; +import type { IStaticSoundOptions, IStaticSoundPlayOptions, IStaticSoundStopOptions } from "../AudioV2/abstractAudio/staticSound"; +import type { IStreamingSoundOptions } from "../AudioV2/abstractAudio/streamingSound"; +import { _HasSpatialAudioOptions, _SpatialAudioDefaults } from "../AudioV2/abstractAudio/subProperties/abstractSpatialAudio"; +import type { IAudioParameterRampOptions } from "../AudioV2/audioParameter"; +import { AudioParameterRampShape } from "../AudioV2/audioParameter"; +import { SoundState } from "../AudioV2/soundState"; +import { _WebAudioSoundSource } from "../AudioV2/webAudio/webAudioSoundSource"; +import { _WebAudioStaticSound } from "../AudioV2/webAudio/webAudioStaticSound"; +import { _WebAudioStreamingSound } from "../AudioV2/webAudio/webAudioStreamingSound"; +import { AbstractEngine } from "../Engines/abstractEngine"; +import { EngineStore } from "../Engines/engineStore"; import { Vector3 } from "../Maths/math.vector"; -import type { Nullable } from "../types"; -import type { Scene } from "../scene"; import type { AbstractMesh } from "../Meshes/abstractMesh"; import type { TransformNode } from "../Meshes/transformNode"; -import { Logger } from "../Misc/logger"; import { _WarnImport } from "../Misc/devTools"; -import type { ISoundOptions } from "./Interfaces/ISoundOptions"; -import { EngineStore } from "../Engines/engineStore"; -import type { IAudioEngine } from "./Interfaces/IAudioEngine"; -import type { Observer } from "../Misc/observable"; +import { Logger } from "../Misc/logger"; +import { Observable } from "../Misc/observable"; +import { _RetryWithInterval } from "../Misc/timingTools"; import { RegisterClass } from "../Misc/typeStore"; -import { AbstractEngine } from "core/Engines/abstractEngine"; -import { _RetryWithInterval } from "core/Misc/timingTools"; +import type { Scene } from "../scene"; +import type { Nullable } from "../types"; +import type { AudioEngine } from "./audioEngine"; +import type { ISoundOptions } from "./Interfaces/ISoundOptions"; + +const TmpRampOptions: IAudioParameterRampOptions = { + duration: 0, + shape: AudioParameterRampShape.Linear, +}; + +const TmpPlayOptions: IStaticSoundPlayOptions = { + duration: 0, + loop: false, + loopEnd: 0, + loopStart: 0, + startOffset: 0, + volume: 1, + waitTime: 0, +}; + +const TmpStopOptions: IStaticSoundStopOptions = { + waitTime: 0, +}; + +function D2r(degrees: number): number { + return (degrees * Math.PI) / 180; +} + +function R2d(radians: number): number { + return (radians * 180) / Math.PI; +} /** * Defines a sound that can be played in the application. * The sound can either be an ambient track or a simple sound played in reaction to a user action. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic + * @see https://doc.babylonjs.com/legacy/audio */ export class Sound { /** * The name of the sound in the scene. */ - public name: string; + public get name(): string { + return this._soundV2.name; + } + + public set name(value: string) { + this._soundV2.name = value; + } + /** * Does the sound autoplay once loaded. */ - public autoplay: boolean = false; + public get autoplay(): boolean { + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2.autoplay; + } + + public set autoplay(value: boolean) { + if (this._soundV2 instanceof _WebAudioSoundSource) { + return; + } + this._soundV2._getOptions().autoplay = value; + } - private _loop = false; /** * Does the sound loop after it finishes playing once. */ public get loop(): boolean { - return this._loop; + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2.loop; } public set loop(value: boolean) { - if (value === this._loop) { + if (this._soundV2 instanceof _WebAudioSoundSource) { return; } - - this._loop = value; - this.updateOptions({ loop: value }); + this._soundV2.loop = value; } /** * Does the sound use a custom attenuation curve to simulate the falloff * happening when the source gets further away from the camera. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-your-own-custom-attenuation-function + * @see https://doc.babylonjs.com/legacy/audio#creating-your-own-custom-attenuation-function */ public useCustomAttenuation: boolean = false; /** * The sound track id this sound belongs to. */ - public soundTrackId: number; + public soundTrackId: number = -1; /** * Is this sound currently played. */ - public isPlaying: boolean = false; + public get isPlaying(): boolean { + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2.state === SoundState.Started || this._optionsV2.autoplay!; + } + /** * Is this sound currently paused. */ - public isPaused: boolean = false; + public get isPaused(): boolean { + return this._soundV2 instanceof _WebAudioSoundSource ? false : this._soundV2.state === SoundState.Paused; + } + /** * Define the reference distance the sound should be heard perfectly. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public refDistance: number = 1; /** * Define the roll off factor of spatial sounds. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public rolloffFactor: number = 1; /** * Define the max distance the sound should be heard (intensity just became 0 at this point). - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ - public maxDistance: number = 100; + public get maxDistance(): number { + return this._optionsV2.spatialMaxDistance || 100; + } + public set maxDistance(value: number) { + this._optionsV2.spatialMaxDistance = value; + this._soundV2.spatial.maxDistance = value; + } /** * Define the distance attenuation model the sound will follow. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ - public distanceModel: string = "linear"; + public get distanceModel(): "linear" | "inverse" | "exponential" { + return this._optionsV2.spatialDistanceModel || "linear"; + } + public set distanceModel(value: "linear" | "inverse" | "exponential") { + this._optionsV2.spatialDistanceModel = value; + this._soundV2.spatial.distanceModel = value; + } /** * @internal * Back Compat @@ -104,88 +169,42 @@ export class Sound { * Gets the current time for the sound. */ public get currentTime(): number { - if (this._htmlAudioElement) { - return this._htmlAudioElement.currentTime; - } - - if (AbstractEngine.audioEngine?.audioContext && (this.isPlaying || this.isPaused)) { - // The `_currentTime` member is only updated when the sound is paused. Add the time since the last start - // to get the actual current time. - const timeSinceLastStart = this.isPaused ? 0 : AbstractEngine.audioEngine.audioContext.currentTime - this._startTime; - return this._currentTime + timeSinceLastStart; - } - - return 0; + return this._soundV2 instanceof _WebAudioSoundSource ? this._soundV2.engine.currentTime : this._soundV2.currentTime; } /** * Does this sound enables spatial sound. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public get spatialSound(): boolean { - return this._spatialSound; + return this._soundV2._isSpatial; } /** * Does this sound enables spatial sound. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public set spatialSound(newValue: boolean) { - if (newValue == this._spatialSound) { - return; - } - - const wasPlaying = this.isPlaying; - this.pause(); - - if (newValue) { - this._spatialSound = newValue; - this._updateSpatialParameters(); - } else { - this._disableSpatialSound(); - } - - if (wasPlaying) { - this.play(); - } + this._soundV2._isSpatial = newValue; } - private _spatialSound: boolean = false; - private _panningModel: string = "equalpower"; - private _playbackRate: number = 1; - private _streaming: boolean = false; - private _startTime: number = 0; - private _currentTime: number = 0; - private _position: Vector3 = Vector3.Zero(); private _localDirection: Vector3 = new Vector3(1, 0, 0); private _volume: number = 1; private _isReadyToPlay: boolean = false; private _isDirectional: boolean = false; - private _readyToPlayCallback: Nullable<() => any>; - private _audioBuffer: Nullable; - private _soundSource: Nullable; - private _streamingSource: Nullable; - private _soundPanner: Nullable; + private _readyToPlayCallback: () => any; private _soundGain: Nullable; - private _inputAudioNode: Nullable; - private _outputAudioNode: Nullable; - // Used if you'd like to create a directional sound. - // If not set, the sound will be omnidirectional - private _coneInnerAngle: number = 360; - private _coneOuterAngle: number = 360; - private _coneOuterGain: number = 0; private _scene: Scene; private _connectedTransformNode: Nullable; private _customAttenuationFunction: (currentVolume: number, currentDistance: number, maxDistance: number, refDistance: number, rolloffFactor: number) => number; private _registerFunc: Nullable<(connectedMesh: TransformNode) => void>; private _isOutputConnected = false; - private _htmlAudioElement: Nullable; - private _urlType: "Unknown" | "String" | "Array" | "ArrayBuffer" | "MediaStream" | "AudioBuffer" | "MediaElement" = "Unknown"; - private _length?: number; - private _offset?: number; - private _tryToPlayTimeout: Nullable; - private _audioUnlockedObserver?: Nullable>; - private _url?: Nullable; + private _url: Nullable = null; + + // private readonly _audioEngineV2: AudioEngineV2; + private readonly _optionsV2: Partial; + private readonly _soundV2: _WebAudioSoundSource | _WebAudioStaticSound | _WebAudioStreamingSound; + /** * @internal */ @@ -202,15 +221,14 @@ export class Sound { * @param options Objects to provide with the current available options: autoplay, loop, volume, spatialSound, maxDistance, rolloffFactor, refDistance, distanceModel, panningModel, streaming */ constructor(name: string, urlOrArrayBuffer: any, scene?: Nullable, readyToPlayCallback: Nullable<() => void> = null, options?: ISoundOptions) { - this.name = name; scene = scene || EngineStore.LastCreatedScene; if (!scene) { return; } this._scene = scene; Sound._SceneComponentInitialization(scene); + this._readyToPlayCallback = readyToPlayCallback || (() => {}); - this._readyToPlayCallback = readyToPlayCallback; // Default custom attenuation function is a linear attenuation // eslint-disable-next-line @typescript-eslint/no-unused-vars this._customAttenuationFunction = (currentVolume: number, currentDistance: number, maxDistance: number, refDistance: number, rolloffFactor: number) => { @@ -220,248 +238,146 @@ export class Sound { return 0; } }; - if (options) { - this.autoplay = options.autoplay || false; - this._loop = options.loop || false; - // if volume === 0, we need another way to check this option - if (options.volume !== undefined) { - this._volume = options.volume; - } - this._spatialSound = options.spatialSound ?? false; - this.maxDistance = options.maxDistance ?? 100; - this.useCustomAttenuation = options.useCustomAttenuation ?? false; - this.rolloffFactor = options.rolloffFactor || 1; - this.refDistance = options.refDistance || 1; - this.distanceModel = options.distanceModel || "linear"; - this._playbackRate = options.playbackRate || 1; - this._streaming = options.streaming ?? false; - this._length = options.length; - this._offset = options.offset; + + options = options || {}; + + const optionsV2: Partial = { + analyzerEnabled: false, + autoplay: options.autoplay || false, + duration: options.length || 0, + loop: options.loop || false, + loopEnd: 0, + loopStart: 0, + maxInstances: 1, + outBus: null, + outBusAutoDefault: false, + playbackRate: options.playbackRate || 1, + pitch: 0, + skipCodecCheck: options.skipCodecCheck || false, + spatialDistanceModel: options.distanceModel, + spatialEnabled: options.spatialSound, + spatialMaxDistance: options.maxDistance, + spatialMinDistance: options.refDistance, + spatialRolloffFactor: options.rolloffFactor, + stereoEnabled: false, + stereoPan: 0, + startOffset: options.offset || 0, + volume: options.volume ?? 1, + }; + this._volume = options.volume ?? 1; + + if (_HasSpatialAudioOptions(optionsV2)) { + optionsV2.spatialAutoUpdate = false; + optionsV2.spatialConeInnerAngle = _SpatialAudioDefaults.coneInnerAngle; + optionsV2.spatialConeOuterAngle = _SpatialAudioDefaults.coneOuterAngle; + optionsV2.spatialConeOuterVolume = _SpatialAudioDefaults.coneOuterVolume; + optionsV2.spatialMinUpdateTime = 0; + optionsV2.spatialOrientation = _SpatialAudioDefaults.orientation; + optionsV2.spatialPanningModel = (this._scene.headphone ? "HRTF" : "equalpower") as "equalpower" | "HRTF"; + optionsV2.spatialPosition = _SpatialAudioDefaults.position; + optionsV2.spatialRotation = _SpatialAudioDefaults.rotation; + optionsV2.spatialRotationQuaternion = _SpatialAudioDefaults.rotationQuaternion; } - if (AbstractEngine.audioEngine?.canUseWebAudio && AbstractEngine.audioEngine.audioContext) { - this._soundGain = AbstractEngine.audioEngine.audioContext.createGain(); - this._soundGain.gain.value = this._volume; - this._inputAudioNode = this._soundGain; - this._outputAudioNode = this._soundGain; - if (this._spatialSound) { - this._createSpatialParameters(); - } - this._scene.mainSoundTrack.addSound(this); - let validParameter = true; - - // if no parameter is passed, you need to call setAudioBuffer yourself to prepare the sound - if (urlOrArrayBuffer) { - try { - if (typeof urlOrArrayBuffer === "string") { - this._urlType = "String"; - this._url = urlOrArrayBuffer; - } else if (urlOrArrayBuffer instanceof ArrayBuffer) { - this._urlType = "ArrayBuffer"; - } else if (urlOrArrayBuffer instanceof HTMLMediaElement) { - this._urlType = "MediaElement"; - } else if (urlOrArrayBuffer instanceof MediaStream) { - this._urlType = "MediaStream"; - } else if (urlOrArrayBuffer instanceof AudioBuffer) { - this._urlType = "AudioBuffer"; - } else if (Array.isArray(urlOrArrayBuffer)) { - this._urlType = "Array"; - } - - let urls: string[] = []; - let codecSupportedFound = false; - - switch (this._urlType) { - case "MediaElement": - this._streaming = true; - this._isReadyToPlay = true; - this._streamingSource = AbstractEngine.audioEngine.audioContext.createMediaElementSource(urlOrArrayBuffer); - - if (this.autoplay) { - this.play(0, this._offset, this._length); - } - - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - break; - case "MediaStream": - this._streaming = true; - this._isReadyToPlay = true; - this._streamingSource = AbstractEngine.audioEngine.audioContext.createMediaStreamSource(urlOrArrayBuffer); - - if (this.autoplay) { - this.play(0, this._offset, this._length); - } - - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - break; - case "ArrayBuffer": - if ((urlOrArrayBuffer).byteLength > 0) { - codecSupportedFound = true; - this._soundLoaded(urlOrArrayBuffer); - } - break; - case "AudioBuffer": - this._audioBufferLoaded(urlOrArrayBuffer); - break; - case "String": - urls.push(urlOrArrayBuffer); - // eslint-disable-next-line no-fallthrough - case "Array": - if (urls.length === 0) { - urls = urlOrArrayBuffer; - } - // If we found a supported format, we load it immediately and stop the loop - for (let i = 0; i < urls.length; i++) { - const url = urls[i]; - codecSupportedFound = - (options && options.skipCodecCheck) || - (url.indexOf(".mp3", url.length - 4) !== -1 && AbstractEngine.audioEngine.isMP3supported) || - (url.indexOf(".ogg", url.length - 4) !== -1 && AbstractEngine.audioEngine.isOGGsupported) || - url.indexOf(".wav", url.length - 4) !== -1 || - url.indexOf(".m4a", url.length - 4) !== -1 || - url.indexOf(".mp4", url.length - 4) !== -1 || - url.indexOf("blob:") !== -1; - if (codecSupportedFound) { - // Loading sound - if (!this._streaming) { - this._scene._loadFile( - url, - (data) => { - this._soundLoaded(data as ArrayBuffer); - }, - undefined, - true, - true, - (exception) => { - if (exception) { - Logger.Error("XHR " + exception.status + " error on: " + url + "."); - } - Logger.Error("Sound creation aborted."); - this._scene.mainSoundTrack.removeSound(this); - } - ); - } - // Streaming sound using HTML5 Audio tag - else { - this._htmlAudioElement = new Audio(url); - this._htmlAudioElement.controls = false; - this._htmlAudioElement.loop = this.loop; - Tools.SetCorsBehavior(url, this._htmlAudioElement); - this._htmlAudioElement.preload = "auto"; - this._htmlAudioElement.addEventListener( - "canplaythrough", - () => { - this._isReadyToPlay = true; - if (this.autoplay) { - this.play(0, this._offset, this._length); - } - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - }, - { once: true } - ); - document.body.appendChild(this._htmlAudioElement); - this._htmlAudioElement.load(); - } - break; - } - } - break; - default: - validParameter = false; - break; - } - - if (!validParameter) { - Logger.Error("Parameter must be a URL to the sound, an Array of URLs (.mp3 & .ogg) or an ArrayBuffer of the sound."); - } else { - if (!codecSupportedFound) { - this._isReadyToPlay = true; - // Simulating a ready to play event to avoid breaking code path - if (this._readyToPlayCallback) { - setTimeout(() => { - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - }, 1000); - } - } - } - } catch (ex) { - Logger.Error("Unexpected error. Sound creation aborted."); - this._scene.mainSoundTrack.removeSound(this); - } - } - } else { - // Adding an empty sound to avoid breaking audio calls for non Web Audio browsers - this._scene.mainSoundTrack.addSound(this); - if (AbstractEngine.audioEngine && !AbstractEngine.audioEngine.WarnedWebAudioUnsupported) { - Logger.Error("Web Audio is not supported by your browser."); - AbstractEngine.audioEngine.WarnedWebAudioUnsupported = true; - } - // Simulating a ready to play event to avoid breaking code for non web audio browsers - if (this._readyToPlayCallback) { - setTimeout(() => { - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - }, 1000); + this.useCustomAttenuation = options.useCustomAttenuation ?? false; + + let streaming = options?.streaming || false; + + const audioEngineV2 = (AbstractEngine.audioEngine as AudioEngine)._v2; + + const createSoundV2 = () => { + if (streaming) { + const streamingOptionsV2: Partial = { + preloadCount: 0, + ...optionsV2, + }; + + const sound = new _WebAudioStreamingSound(name, audioEngineV2, streamingOptionsV2); + + // eslint-disable-next-line github/no-then + void sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { + // eslint-disable-next-line github/no-then + void sound.preloadInstancesAsync(1).then(this._onReadyToPlay); + }); + + return sound; } + + const sound = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); + + // eslint-disable-next-line github/no-then + void sound._initAsync(urlOrArrayBuffer, optionsV2).then(this._onReadyToPlay); + + return sound; + }; + + // If no parameter is passed then the setAudioBuffer should be called to prepare the sound. + if (!urlOrArrayBuffer) { + // Create the sound but don't call _initAsync on it, yet. Call it later when `setAudioBuffer` is called. + this._soundV2 = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); + } else if (typeof urlOrArrayBuffer === "string") { + this._url = urlOrArrayBuffer; + this._soundV2 = createSoundV2(); + } else if (urlOrArrayBuffer instanceof ArrayBuffer) { + streaming = false; + this._soundV2 = createSoundV2(); + } else if (urlOrArrayBuffer instanceof HTMLMediaElement) { + streaming = true; + this._soundV2 = createSoundV2(); + } else if (urlOrArrayBuffer instanceof MediaStream) { + const node = new MediaStreamAudioSourceNode(audioEngineV2._audioContext, { mediaStream: urlOrArrayBuffer }); + this._soundV2 = new _WebAudioSoundSource(name, node, audioEngineV2, optionsV2); + // eslint-disable-next-line github/no-then + void this._soundV2._initAsync(optionsV2).then(this._onReadyToPlay); + } else if (urlOrArrayBuffer instanceof AudioBuffer) { + streaming = false; + this._soundV2 = createSoundV2(); + } else if (Array.isArray(urlOrArrayBuffer)) { + this._soundV2 = createSoundV2(); + } + + this._optionsV2 = optionsV2; + + if (!this._soundV2) { + Logger.Error("Parameter must be a URL to the sound, an Array of URLs (.mp3 & .ogg) or an ArrayBuffer of the sound."); + return; + } + + if (!(this._soundV2 instanceof _WebAudioSoundSource)) { + this._soundV2.onEndedObservable.add(this._onended); } } + private _onReadyToPlay = () => { + this._scene.mainSoundTrack.addSound(this); + this._isReadyToPlay = true; + this._readyToPlayCallback(); + }; + /** * Release the sound and its associated resources */ public dispose() { - if (AbstractEngine.audioEngine?.canUseWebAudio) { - if (this.isPlaying) { - this.stop(); - } - this._isReadyToPlay = false; - if (this.soundTrackId === -1) { - this._scene.mainSoundTrack.removeSound(this); - } else if (this._scene.soundTracks) { - this._scene.soundTracks[this.soundTrackId].removeSound(this); - } - if (this._soundGain) { - this._soundGain.disconnect(); - this._soundGain = null; - } - if (this._soundPanner) { - this._soundPanner.disconnect(); - this._soundPanner = null; - } - if (this._soundSource) { - this._soundSource.disconnect(); - this._soundSource = null; - } - this._audioBuffer = null; - - if (this._htmlAudioElement) { - this._htmlAudioElement.pause(); - this._htmlAudioElement.src = ""; - document.body.removeChild(this._htmlAudioElement); - this._htmlAudioElement = null; - } - - if (this._streamingSource) { - this._streamingSource.disconnect(); - this._streamingSource = null; - } - - if (this._connectedTransformNode && this._registerFunc) { - this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc); - this._connectedTransformNode = null; - } + if (this.isPlaying) { + this.stop(); + } + this._isReadyToPlay = false; + if (this.soundTrackId === -1) { + this._scene.mainSoundTrack.removeSound(this); + } else if (this._scene.soundTracks) { + this._scene.soundTracks[this.soundTrackId].removeSound(this); + } + if (this._soundGain) { + this._soundGain.disconnect(); + this._soundGain = null; + } - this._clearTimeoutsAndObservers(); + if (this._connectedTransformNode && this._registerFunc) { + this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc); + this._connectedTransformNode = null; } + + this._soundV2.dispose(); } /** @@ -480,44 +396,18 @@ export class Sound { return "Sound"; } - private _audioBufferLoaded(buffer: AudioBuffer) { - if (!AbstractEngine.audioEngine?.audioContext) { - return; - } - this._audioBuffer = buffer; - this._isReadyToPlay = true; - if (this.autoplay) { - this.play(0, this._offset, this._length); - } - if (this._readyToPlayCallback) { - this._readyToPlayCallback(); - } - } - - private _soundLoaded(audioData: ArrayBuffer) { - if (!AbstractEngine.audioEngine?.audioContext) { - return; - } - // eslint-disable-next-line @typescript-eslint/no-floating-promises - AbstractEngine.audioEngine.audioContext.decodeAudioData( - audioData, - (buffer) => { - this._audioBufferLoaded(buffer); - }, - (err: any) => { - Logger.Error("Error while decoding audio data for: " + this.name + " / Error: " + err); - } - ); - } - /** * Sets the data of the sound from an audiobuffer * @param audioBuffer The audioBuffer containing the data */ public setAudioBuffer(audioBuffer: AudioBuffer): void { - if (AbstractEngine.audioEngine?.canUseWebAudio) { - this._audioBuffer = audioBuffer; - this._isReadyToPlay = true; + if (this._isReadyToPlay) { + return; + } + + if (this._soundV2 instanceof _WebAudioStaticSound) { + // eslint-disable-next-line @typescript-eslint/no-floating-promises, github/no-then + this._soundV2._initAsync(audioBuffer, this._optionsV2).then(this._onReadyToPlay); } } @@ -533,107 +423,76 @@ export class Sound { this.rolloffFactor = options.rolloffFactor ?? this.rolloffFactor; this.refDistance = options.refDistance ?? this.refDistance; this.distanceModel = options.distanceModel ?? this.distanceModel; - this._playbackRate = options.playbackRate ?? this._playbackRate; - this._length = options.length ?? undefined; - this.spatialSound = options.spatialSound ?? this._spatialSound; - this._setOffset(options.offset ?? undefined); - this.setVolume(options.volume ?? this._volume); - this._updateSpatialParameters(); - if (this.isPlaying) { - if (this._streaming && this._htmlAudioElement) { - this._htmlAudioElement.playbackRate = this._playbackRate; - if (this._htmlAudioElement.loop !== this.loop) { - this._htmlAudioElement.loop = this.loop; - } - } else { - if (this._soundSource) { - this._soundSource.playbackRate.value = this._playbackRate; - if (this._soundSource.loop !== this.loop) { - this._soundSource.loop = this.loop; - } - if (this._offset !== undefined && this._soundSource.loopStart !== this._offset) { - this._soundSource.loopStart = this._offset; - } - if (this._length !== undefined && this._length !== this._soundSource.loopEnd) { - this._soundSource.loopEnd = (this._offset! | 0) + this._length; - } - } - } + if (options.playbackRate !== undefined) { + this.setPlaybackRate(options.playbackRate); } - } - } - - private _createSpatialParameters() { - if (AbstractEngine.audioEngine?.canUseWebAudio && AbstractEngine.audioEngine.audioContext) { - if (this._scene.headphone) { - this._panningModel = "HRTF"; + if (options.spatialSound !== undefined) { + this.spatialSound = options.spatialSound; } - this._soundPanner = this._soundPanner ?? AbstractEngine.audioEngine.audioContext.createPanner(); - if (this._soundPanner && this._outputAudioNode) { - this._updateSpatialParameters(); - this._soundPanner.connect(this._outputAudioNode); - this._inputAudioNode = this._soundPanner; + if (options.volume !== undefined) { + this.setVolume(options.volume); + } + if (this._soundV2 instanceof _WebAudioStaticSound) { + let updated = false; + if (options.offset !== undefined) { + this._optionsV2.startOffset = options.offset; + updated = true; + } + if (options.length !== undefined) { + this._soundV2.duration = options.length; + updated = true; + } + if (updated && this.isPaused) { + this.stop(); + } } - } - } - private _disableSpatialSound() { - if (!this._spatialSound) { - return; + this._updateSpatialParameters(); } - this._inputAudioNode = this._soundGain; - this._soundPanner?.disconnect(); - this._soundPanner = null; - this._spatialSound = false; } private _updateSpatialParameters() { - if (!this._spatialSound) { + if (!this.spatialSound) { return; } - if (this._soundPanner) { - if (this.useCustomAttenuation) { - // Tricks to disable in a way embedded Web Audio attenuation - this._soundPanner.distanceModel = "linear"; - this._soundPanner.maxDistance = Number.MAX_VALUE; - this._soundPanner.refDistance = 1; - this._soundPanner.rolloffFactor = 1; - this._soundPanner.panningModel = this._panningModel as any; - } else { - this._soundPanner.distanceModel = this.distanceModel as any; - this._soundPanner.maxDistance = this.maxDistance; - this._soundPanner.refDistance = this.refDistance; - this._soundPanner.rolloffFactor = this.rolloffFactor; - this._soundPanner.panningModel = this._panningModel as any; - } + + const spatial = this._soundV2.spatial; + + if (this.useCustomAttenuation) { + // Disable WebAudio attenuation. + spatial.distanceModel = "linear"; + spatial.minDistance = 1; + spatial.maxDistance = Number.MAX_VALUE; + spatial.rolloffFactor = 1; + spatial.panningModel = "equalpower"; } else { - this._createSpatialParameters(); + spatial.distanceModel = this.distanceModel; + spatial.minDistance = this.refDistance; + spatial.maxDistance = this.maxDistance; + spatial.rolloffFactor = this.rolloffFactor; + spatial.panningModel = this._optionsV2.spatialPanningModel || "equalpower"; } } /** * Switch the panning model to HRTF: * Renders a stereo output of higher quality than equalpower — it uses a convolution with measured impulse responses from human subjects. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public switchPanningModelToHRTF() { - this._panningModel = "HRTF"; - this._switchPanningModel(); + if (this.spatialSound) { + this._soundV2.spatial.panningModel = "HRTF"; + } } /** * Switch the panning model to Equal Power: * Represents the equal-power panning algorithm, generally regarded as simple and efficient. equalpower is the default value. - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-a-spatial-3d-sound + * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public switchPanningModelToEqualPower() { - this._panningModel = "equalpower"; - this._switchPanningModel(); - } - - private _switchPanningModel() { - if (AbstractEngine.audioEngine?.canUseWebAudio && this._spatialSound && this._soundPanner) { - this._soundPanner.panningModel = this._panningModel as any; + if (this.spatialSound) { + this._soundV2.spatial.panningModel = "equalpower"; } } @@ -642,11 +501,12 @@ export class Sound { * @param soundTrackAudioNode the sound track audio node to connect to */ public connectToSoundTrackAudioNode(soundTrackAudioNode: AudioNode): void { - if (AbstractEngine.audioEngine?.canUseWebAudio && this._outputAudioNode) { + const outputNode = this._soundV2._outNode; + if (outputNode) { if (this._isOutputConnected) { - this._outputAudioNode.disconnect(); + outputNode.disconnect(); } - this._outputAudioNode.connect(soundTrackAudioNode); + outputNode.connect(soundTrackAudioNode); this._isOutputConnected = true; } } @@ -662,14 +522,20 @@ export class Sound { Logger.Error("setDirectionalCone(): outer angle of the cone must be superior or equal to the inner angle."); return; } - this._coneInnerAngle = coneInnerAngle; - this._coneOuterAngle = coneOuterAngle; - this._coneOuterGain = coneOuterGain; + + this._optionsV2.spatialConeInnerAngle = D2r(coneInnerAngle); + this._optionsV2.spatialConeOuterAngle = D2r(coneOuterAngle); + this._optionsV2.spatialConeOuterVolume = coneOuterGain; + + this._soundV2.spatial.coneInnerAngle = this._optionsV2.spatialConeInnerAngle; + this._soundV2.spatial.coneOuterAngle = this._optionsV2.spatialConeOuterAngle; + this._soundV2.spatial.coneOuterVolume = coneOuterGain; + this._isDirectional = true; if (this.isPlaying && this.loop) { this.stop(); - this.play(0, this._offset, this._length); + this.play(0, this._optionsV2.startOffset, this._optionsV2.duration); } } @@ -677,22 +543,23 @@ export class Sound { * Gets or sets the inner angle for the directional cone. */ public get directionalConeInnerAngle(): number { - return this._coneInnerAngle; + return R2d(typeof this._optionsV2.spatialConeInnerAngle === "number" ? this._optionsV2.spatialConeInnerAngle : _SpatialAudioDefaults.coneInnerAngle); } /** * Gets or sets the inner angle for the directional cone. */ public set directionalConeInnerAngle(value: number) { - if (value != this._coneInnerAngle) { - if (this._coneOuterAngle < value) { + value = D2r(value); + + if (value != this._optionsV2.spatialConeInnerAngle) { + if (this.directionalConeOuterAngle < value) { Logger.Error("directionalConeInnerAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } - - this._coneInnerAngle = value; - if (AbstractEngine.audioEngine?.canUseWebAudio && this._spatialSound && this._soundPanner) { - this._soundPanner.coneInnerAngle = this._coneInnerAngle; + this._optionsV2.spatialConeInnerAngle = value; + if (this.spatialSound) { + this._soundV2.spatial.coneInnerAngle = value; } } } @@ -701,22 +568,23 @@ export class Sound { * Gets or sets the outer angle for the directional cone. */ public get directionalConeOuterAngle(): number { - return this._coneOuterAngle; + return R2d(typeof this._optionsV2.spatialConeOuterAngle === "number" ? this._optionsV2.spatialConeOuterAngle : _SpatialAudioDefaults.coneOuterAngle); } /** * Gets or sets the outer angle for the directional cone. */ public set directionalConeOuterAngle(value: number) { - if (value != this._coneOuterAngle) { - if (value < this._coneInnerAngle) { + value = D2r(value); + + if (value != this._optionsV2.spatialConeOuterAngle) { + if (value < this.directionalConeInnerAngle) { Logger.Error("directionalConeOuterAngle: outer angle of the cone must be superior or equal to the inner angle."); return; } - - this._coneOuterAngle = value; - if (AbstractEngine.audioEngine?.canUseWebAudio && this._spatialSound && this._soundPanner) { - this._soundPanner.coneOuterAngle = this._coneOuterAngle; + this._optionsV2.spatialConeOuterAngle = value; + if (this.spatialSound) { + this._soundV2.spatial.coneOuterAngle = value; } } } @@ -726,22 +594,15 @@ export class Sound { * @param newPosition Defines the new position */ public setPosition(newPosition: Vector3): void { - if (newPosition.equals(this._position)) { + if (this._optionsV2.spatialPosition && newPosition.equals(this._optionsV2.spatialPosition)) { return; } - this._position.copyFrom(newPosition); - - if ( - AbstractEngine.audioEngine?.canUseWebAudio && - this._spatialSound && - this._soundPanner && - !isNaN(this._position.x) && - !isNaN(this._position.y) && - !isNaN(this._position.z) - ) { - this._soundPanner.positionX.value = this._position.x; - this._soundPanner.positionY.value = this._position.y; - this._soundPanner.positionZ.value = this._position.z; + if (!this._optionsV2.spatialPosition) { + this._optionsV2.spatialPosition = Vector3.Zero(); + } + this._optionsV2.spatialPosition.copyFrom(newPosition); + if (this.spatialSound && !isNaN(newPosition.x) && !isNaN(newPosition.y) && !isNaN(newPosition.z)) { + this._soundV2.spatial.position = newPosition; } } @@ -752,38 +613,37 @@ export class Sound { public setLocalDirectionToMesh(newLocalDirection: Vector3): void { this._localDirection = newLocalDirection; - if (AbstractEngine.audioEngine?.canUseWebAudio && this._connectedTransformNode && this.isPlaying) { + if (this._connectedTransformNode && this.isPlaying) { this._updateDirection(); } } private _updateDirection() { - if (!this._connectedTransformNode || !this._soundPanner) { + if (!this._connectedTransformNode || !this.spatialSound) { return; } const mat = this._connectedTransformNode.getWorldMatrix(); const direction = Vector3.TransformNormal(this._localDirection, mat); direction.normalize(); - this._soundPanner.orientationX.value = direction.x; - this._soundPanner.orientationY.value = direction.y; - this._soundPanner.orientationZ.value = direction.z; + + this._soundV2.spatial.orientation = direction; } /** @internal */ public updateDistanceFromListener() { - if (AbstractEngine.audioEngine?.canUseWebAudio && this._connectedTransformNode && this.useCustomAttenuation && this._soundGain && this._scene.activeCamera) { + if (this._soundV2._outNode && this._connectedTransformNode && this.useCustomAttenuation && this._soundGain && this._scene.activeCamera) { const distance = this._scene.audioListenerPositionProvider ? this._connectedTransformNode.position.subtract(this._scene.audioListenerPositionProvider()).length() : this._connectedTransformNode.getDistanceToCamera(this._scene.activeCamera); - this._soundGain.gain.value = this._customAttenuationFunction(this._volume, distance, this.maxDistance, this.refDistance, this.rolloffFactor); + this._soundV2.volume = this._customAttenuationFunction(this._volume, distance, this.maxDistance, this.refDistance, this.rolloffFactor); } } /** * Sets a new custom attenuation function for the sound. * @param callback Defines the function used for the attenuation - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#creating-your-own-custom-attenuation-function + * @see https://doc.babylonjs.com/legacy/audio#creating-your-own-custom-attenuation-function */ public setAttenuationFunction(callback: (currentVolume: number, currentDistance: number, maxDistance: number, refDistance: number, rolloffFactor: number) => number): void { this._customAttenuationFunction = callback; @@ -796,222 +656,64 @@ export class Sound { * @param length (optional) Sound duration (in seconds) */ public play(time?: number, offset?: number, length?: number): void { - if (this._isReadyToPlay && this._scene.audioEnabled && AbstractEngine.audioEngine?.audioContext) { + AbstractEngine.audioEngine?.unlock(); + + // WebAudio sound sources have no `play` function because they are always playing. + if (this._soundV2 instanceof _WebAudioSoundSource) { + return; + } + + if (this._isReadyToPlay && this._scene.audioEnabled) { + // The sound can only resume from pause when the `time`, `offset` and `length` args are not set. + if (this._soundV2.state === SoundState.Paused && (time !== undefined || offset !== undefined || length !== undefined)) { + this._soundV2.stop(); + } + try { - this._clearTimeoutsAndObservers(); - - let startTime = time ? AbstractEngine.audioEngine?.audioContext.currentTime + time : AbstractEngine.audioEngine?.audioContext.currentTime; - if (!this._soundSource || !this._streamingSource) { - if (this._spatialSound && this._soundPanner) { - if (!isNaN(this._position.x) && !isNaN(this._position.y) && !isNaN(this._position.z)) { - this._soundPanner.positionX.value = this._position.x; - this._soundPanner.positionY.value = this._position.y; - this._soundPanner.positionZ.value = this._position.z; - } - if (this._isDirectional) { - this._soundPanner.coneInnerAngle = this._coneInnerAngle; - this._soundPanner.coneOuterAngle = this._coneOuterAngle; - this._soundPanner.coneOuterGain = this._coneOuterGain; - if (this._connectedTransformNode) { - this._updateDirection(); - } else { - this._soundPanner.setOrientation(this._localDirection.x, this._localDirection.y, this._localDirection.z); - } - } - } - } - if (this._streaming) { - if (!this._streamingSource && this._htmlAudioElement) { - this._streamingSource = AbstractEngine.audioEngine.audioContext.createMediaElementSource(this._htmlAudioElement); - this._htmlAudioElement.onended = () => { - this._onended(); - }; - this._htmlAudioElement.playbackRate = this._playbackRate; - } - if (this._streamingSource) { - this._streamingSource.disconnect(); - if (this._inputAudioNode) { - this._streamingSource.connect(this._inputAudioNode); - } - } - if (this._htmlAudioElement) { - // required to manage properly the new suspended default state of Chrome - // When the option 'streaming: true' is used, we need first to wait for - // the audio engine to be unlocked by a user gesture before trying to play - // an HTML Audio element - const tryToPlay = () => { - if (AbstractEngine.audioEngine?.unlocked) { - if (!this._htmlAudioElement) { - return; - } - - this._htmlAudioElement.currentTime = offset ?? 0; - const playPromise = this._htmlAudioElement.play(); - - // In browsers that don’t yet support this functionality, - // playPromise won’t be defined. - if (playPromise !== undefined) { - // eslint-disable-next-line github/no-then - playPromise.catch(() => { - // Automatic playback failed. - // Waiting for the audio engine to be unlocked by user click on unmute - AbstractEngine.audioEngine?.lock(); - if (this.loop || this.autoplay) { - this._audioUnlockedObserver = AbstractEngine.audioEngine?.onAudioUnlockedObservable.addOnce(() => { - tryToPlay(); - }); - } - }); - } - } else { - if (this.loop || this.autoplay) { - this._audioUnlockedObserver = AbstractEngine.audioEngine?.onAudioUnlockedObservable.addOnce(() => { - tryToPlay(); - }); - } - } - }; - tryToPlay(); - } - } else { - const tryToPlay = () => { - if (AbstractEngine.audioEngine?.audioContext) { - length = length || this._length; - - if (offset !== undefined) { - this._setOffset(offset); - } - - if (this._soundSource) { - const oldSource = this._soundSource; - oldSource.onended = () => { - oldSource.disconnect(); - }; - } - this._soundSource = AbstractEngine.audioEngine?.audioContext.createBufferSource(); - if (this._soundSource && this._inputAudioNode) { - this._soundSource.buffer = this._audioBuffer; - this._soundSource.connect(this._inputAudioNode); - this._soundSource.loop = this.loop; - if (offset !== undefined) { - this._soundSource.loopStart = offset; - } - if (length !== undefined) { - this._soundSource.loopEnd = (offset! | 0) + length; - } - this._soundSource.playbackRate.value = this._playbackRate; - this._soundSource.onended = () => { - this._onended(); - }; - startTime = time ? AbstractEngine.audioEngine?.audioContext.currentTime + time : AbstractEngine.audioEngine.audioContext.currentTime; - const actualOffset = ((this.isPaused ? this.currentTime : 0) + (this._offset ?? 0)) % this._soundSource.buffer!.duration; - this._soundSource.start(startTime, actualOffset, this.loop ? undefined : length); - } - } - }; - - if (AbstractEngine.audioEngine?.audioContext.state === "suspended") { - // Wait a bit for FF as context seems late to be ready. - this._tryToPlayTimeout = setTimeout(() => { - if (AbstractEngine.audioEngine?.audioContext!.state === "suspended") { - // Automatic playback failed. - // Waiting for the audio engine to be unlocked by user click on unmute - AbstractEngine.audioEngine.lock(); - if (this.loop || this.autoplay) { - this._audioUnlockedObserver = AbstractEngine.audioEngine.onAudioUnlockedObservable.addOnce(() => { - tryToPlay(); - }); - } - } else { - tryToPlay(); - } - }, 500); - } else { - tryToPlay(); - } - } - this._startTime = startTime; - this.isPlaying = true; - this.isPaused = false; + TmpPlayOptions.duration = length || 0; + TmpPlayOptions.startOffset = offset !== undefined ? offset || this._optionsV2.startOffset! : this._optionsV2.startOffset!; + TmpPlayOptions.waitTime = time || 0; + this._soundV2.play(TmpPlayOptions); } catch (ex) { Logger.Error("Error while trying to play audio: " + this.name + ", " + ex.message); } } } - private _onended() { - this.isPlaying = false; - this._startTime = 0; - this._currentTime = 0; + private _onended = () => { if (this.onended) { this.onended(); } this.onEndedObservable.notifyObservers(this); - } + }; /** * Stop the sound * @param time (optional) Stop the sound after X seconds. Stop immediately (0) by default. */ public stop(time?: number): void { - if (this.isPlaying) { - this._clearTimeoutsAndObservers(); - if (this._streaming) { - if (this._htmlAudioElement) { - this._htmlAudioElement.pause(); - // Test needed for Firefox or it will generate an Invalid State Error - if (this._htmlAudioElement.currentTime > 0) { - this._htmlAudioElement.currentTime = 0; - } - } else { - this._streamingSource?.disconnect(); - } - this.isPlaying = false; - } else if (AbstractEngine.audioEngine?.audioContext && this._soundSource) { - const stopTime = time ? AbstractEngine.audioEngine.audioContext.currentTime + time : undefined; - this._soundSource.onended = () => { - this.isPlaying = false; - this.isPaused = false; - this._startTime = 0; - this._currentTime = 0; - if (this._soundSource) { - this._soundSource.onended = () => void 0; - } - this._onended(); - }; - this._soundSource.stop(stopTime); - } else { - this.isPlaying = false; - } - } else if (this.isPaused) { - this.isPaused = false; - this._startTime = 0; - this._currentTime = 0; + // WebAudio sound sources have no `stop` function because they are always playing. + if (this._soundV2 instanceof _WebAudioSoundSource) { + return; } + + TmpStopOptions.waitTime = time || 0; + this._soundV2.stop(TmpStopOptions); + + // Set autoplay to `false` so `isPlaying` correctly returns `false`. + this._optionsV2.autoplay = false; } /** * Put the sound in pause */ public pause(): void { - if (this.isPlaying) { - this._clearTimeoutsAndObservers(); - if (this._streaming) { - if (this._htmlAudioElement) { - this._htmlAudioElement.pause(); - } else { - this._streamingSource?.disconnect(); - } - this.isPlaying = false; - this.isPaused = true; - } else if (AbstractEngine.audioEngine?.audioContext && this._soundSource) { - this._soundSource.onended = () => void 0; - this._soundSource.stop(); - this.isPlaying = false; - this.isPaused = true; - this._currentTime += AbstractEngine.audioEngine.audioContext.currentTime - this._startTime; - } + // WebAudio sound sources have no `pause` function because they are always playing. + if (this._soundV2 instanceof _WebAudioSoundSource) { + return; } + + this._soundV2.pause(); } /** @@ -1020,15 +722,8 @@ export class Sound { * @param time Define time for gradual change to new volume */ public setVolume(newVolume: number, time?: number): void { - if (AbstractEngine.audioEngine?.canUseWebAudio && this._soundGain) { - if (time && AbstractEngine.audioEngine.audioContext) { - this._soundGain.gain.cancelScheduledValues(AbstractEngine.audioEngine.audioContext.currentTime); - this._soundGain.gain.setValueAtTime(this._soundGain.gain.value, AbstractEngine.audioEngine.audioContext.currentTime); - this._soundGain.gain.linearRampToValueAtTime(newVolume, AbstractEngine.audioEngine.audioContext.currentTime + time); - } else { - this._soundGain.gain.value = newVolume; - } - } + TmpRampOptions.duration = time || 0; + this._soundV2.setVolume(newVolume, TmpRampOptions); this._volume = newVolume; } @@ -1037,13 +732,8 @@ export class Sound { * @param newPlaybackRate Define the playback rate the sound should be played at */ public setPlaybackRate(newPlaybackRate: number): void { - this._playbackRate = newPlaybackRate; - if (this.isPlaying) { - if (this._streaming && this._htmlAudioElement) { - this._htmlAudioElement.playbackRate = this._playbackRate; - } else if (this._soundSource) { - this._soundSource.playbackRate.value = this._playbackRate; - } + if (this._soundV2 instanceof _WebAudioStaticSound) { + this._soundV2.playbackRate = newPlaybackRate; } } @@ -1052,7 +742,11 @@ export class Sound { * @returns the play back rate of the sound */ public getPlaybackRate(): number { - return this._playbackRate; + if (this._soundV2 instanceof _WebAudioStaticSound) { + return this._soundV2.playbackRate; + } + + return 1; } /** @@ -1066,7 +760,7 @@ export class Sound { /** * Attach the sound to a dedicated mesh * @param transformNode The transform node to connect the sound with - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#attaching-a-sound-to-a-mesh + * @see https://doc.babylonjs.com/legacy/audio#attaching-a-sound-to-a-mesh */ public attachToMesh(transformNode: TransformNode): void { if (this._connectedTransformNode && this._registerFunc) { @@ -1074,12 +768,11 @@ export class Sound { this._registerFunc = null; } this._connectedTransformNode = transformNode; - if (!this._spatialSound) { - this._spatialSound = true; - this._createSpatialParameters(); + if (!this.spatialSound) { + this.spatialSound = true; if (this.isPlaying && this.loop) { this.stop(); - this.play(0, this._offset, this._length); + this.play(0, this._optionsV2.startOffset, this._optionsV2.duration); } } this._onRegisterAfterWorldMatrixUpdate(this._connectedTransformNode); @@ -1089,7 +782,7 @@ export class Sound { /** * Detach the sound from the previously attached mesh - * @see https://doc.babylonjs.com/features/featuresDeepDive/audio/playingSoundsMusic#attaching-a-sound-to-a-mesh + * @see https://doc.babylonjs.com/legacy/audio#attaching-a-sound-to-a-mesh */ public detachFromMesh() { if (this._connectedTransformNode && this._registerFunc) { @@ -1107,7 +800,7 @@ export class Sound { const boundingInfo = mesh.getBoundingInfo(); this.setPosition(boundingInfo.boundingSphere.centerWorld); } - if (AbstractEngine.audioEngine?.canUseWebAudio && this._isDirectional && this.isPlaying) { + if (this._isDirectional && this.isPlaying) { this._updateDirection(); } } @@ -1117,48 +810,27 @@ export class Sound { * @returns the new sound clone */ public clone(): Nullable { - if (!this._streaming) { - const setBufferAndRun = () => { - _RetryWithInterval( - () => this._isReadyToPlay, - () => { - clonedSound._audioBuffer = this.getAudioBuffer(); - clonedSound._isReadyToPlay = true; - if (clonedSound.autoplay) { - clonedSound.play(0, this._offset, this._length); - } - }, - undefined, - 300 - ); - }; - - const currentOptions = { - autoplay: this.autoplay, - loop: this.loop, - volume: this._volume, - spatialSound: this._spatialSound, - maxDistance: this.maxDistance, - useCustomAttenuation: this.useCustomAttenuation, - rolloffFactor: this.rolloffFactor, - refDistance: this.refDistance, - distanceModel: this.distanceModel, - }; - - const clonedSound = new Sound(this.name + "_cloned", new ArrayBuffer(0), this._scene, null, currentOptions); - if (this.useCustomAttenuation) { - clonedSound.setAttenuationFunction(this._customAttenuationFunction); - } - clonedSound.setPosition(this._position); - clonedSound.setPlaybackRate(this._playbackRate); - setBufferAndRun(); - - return clonedSound; - } - // Can't clone a streaming sound - else { + if (!(this._soundV2 instanceof _WebAudioStaticSound)) { return null; } + + const currentOptions: ISoundOptions = { + autoplay: this.autoplay, + loop: this.loop, + volume: this._volume, + spatialSound: this.spatialSound, + maxDistance: this.maxDistance, + useCustomAttenuation: this.useCustomAttenuation, + rolloffFactor: this.rolloffFactor, + refDistance: this.refDistance, + distanceModel: this.distanceModel, + }; + const clonedSound = new Sound(this.name + "_cloned", this._soundV2.buffer, this._scene, null, currentOptions); + (clonedSound._optionsV2 as any) = this._optionsV2; + if (this.useCustomAttenuation) { + clonedSound.setAttenuationFunction(this._customAttenuationFunction); + } + return clonedSound; } /** @@ -1166,7 +838,10 @@ export class Sound { * @returns the audio buffer */ public getAudioBuffer(): Nullable { - return this._audioBuffer; + if (this._soundV2 instanceof _WebAudioStaticSound) { + return this._soundV2.buffer._audioBuffer; + } + return null; } /** @@ -1174,7 +849,8 @@ export class Sound { * @returns the source node */ public getSoundSource(): Nullable { - return this._soundSource; + // return this._soundSource; + return null; } /** @@ -1196,31 +872,31 @@ export class Sound { autoplay: this.autoplay, loop: this.loop, volume: this._volume, - spatialSound: this._spatialSound, + spatialSound: this.spatialSound, maxDistance: this.maxDistance, rolloffFactor: this.rolloffFactor, refDistance: this.refDistance, distanceModel: this.distanceModel, - playbackRate: this._playbackRate, - panningModel: this._panningModel, + playbackRate: this.getPlaybackRate(), + panningModel: this._soundV2.spatial.panningModel, soundTrackId: this.soundTrackId, metadata: this.metadata, }; - if (this._spatialSound) { + if (this.spatialSound) { if (this._connectedTransformNode) { serializationObject.connectedMeshId = this._connectedTransformNode.id; } - serializationObject.position = this._position.asArray(); + serializationObject.position = this._soundV2.spatial.position.asArray(); serializationObject.refDistance = this.refDistance; serializationObject.distanceModel = this.distanceModel; serializationObject.isDirectional = this._isDirectional; serializationObject.localDirectionToMesh = this._localDirection.asArray(); - serializationObject.coneInnerAngle = this._coneInnerAngle; - serializationObject.coneOuterAngle = this._coneOuterAngle; - serializationObject.coneOuterGain = this._coneOuterGain; + serializationObject.coneInnerAngle = this.directionalConeInnerAngle; + serializationObject.coneOuterAngle = this.directionalConeOuterAngle; + serializationObject.coneOuterGain = this._soundV2.spatial.coneOuterVolume; } return serializationObject; @@ -1237,13 +913,11 @@ export class Sound { public static Parse(parsedSound: any, scene: Scene, rootUrl: string, sourceSound?: Sound): Sound { const soundName = parsedSound.name; let soundUrl; - if (parsedSound.url) { soundUrl = rootUrl + parsedSound.url; } else { soundUrl = rootUrl + soundName; } - const options = { autoplay: parsedSound.autoplay, loop: parsedSound.loop, @@ -1255,9 +929,7 @@ export class Sound { distanceModel: parsedSound.distanceModel, playbackRate: parsedSound.playbackRate, }; - let newSound: Sound; - if (!sourceSound) { newSound = new Sound( soundName, @@ -1274,21 +946,22 @@ export class Sound { _RetryWithInterval( () => sourceSound._isReadyToPlay, () => { - newSound._audioBuffer = sourceSound.getAudioBuffer(); + const audioBuffer = sourceSound.getAudioBuffer(); + if (audioBuffer) { + newSound.setAudioBuffer(audioBuffer); + } newSound._isReadyToPlay = true; if (newSound.autoplay) { - newSound.play(0, newSound._offset, newSound._length); + newSound.play(0, sourceSound._optionsV2.startOffset, sourceSound._optionsV2.duration); } }, undefined, 300 ); }; - newSound = new Sound(soundName, new ArrayBuffer(0), scene, null, options); setBufferAndRun(); } - if (parsedSound.position) { const soundPosition = Vector3.FromArray(parsedSound.position); newSound.setPosition(soundPosition); @@ -1306,35 +979,11 @@ export class Sound { newSound.attachToMesh(connectedMesh); } } - if (parsedSound.metadata) { newSound.metadata = parsedSound.metadata; } - return newSound; } - - private _setOffset(value?: number) { - if (this._offset === value) { - return; - } - if (this.isPaused) { - this.stop(); - this.isPaused = false; - } - this._offset = value; - } - - private _clearTimeoutsAndObservers() { - if (this._tryToPlayTimeout) { - clearTimeout(this._tryToPlayTimeout); - this._tryToPlayTimeout = null; - } - if (this._audioUnlockedObserver) { - AbstractEngine.audioEngine?.onAudioUnlockedObservable.remove(this._audioUnlockedObserver); - this._audioUnlockedObserver = null; - } - } } // Register Class Name diff --git a/packages/dev/core/src/AudioV2/abstractAudio/abstractAudioOutNode.ts b/packages/dev/core/src/AudioV2/abstractAudio/abstractAudioOutNode.ts index 49802508e35..4492163b765 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/abstractAudioOutNode.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/abstractAudioOutNode.ts @@ -25,7 +25,7 @@ export abstract class AbstractAudioOutNode extends AbstractNamedAudioNode { } /** - * The analyzer features of the bus. + * The audio analyzer features. */ public get analyzer(): AbstractAudioAnalyzer { return this._analyzer ?? (this._analyzer = new _AudioAnalyzer(this._subGraph)); @@ -34,7 +34,6 @@ export abstract class AbstractAudioOutNode extends AbstractNamedAudioNode { /** * The audio output volume. */ - public get volume(): number { return _GetVolumeAudioProperty(this._subGraph, "volume"); } diff --git a/packages/dev/core/src/AudioV2/abstractAudio/abstractSound.ts b/packages/dev/core/src/AudioV2/abstractAudio/abstractSound.ts index e2143b5fdb3..d79241e6ffc 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/abstractSound.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/abstractSound.ts @@ -63,8 +63,8 @@ export abstract class AbstractSound extends AbstractSoundSource { */ public readonly onEndedObservable = new Observable(); - protected constructor(name: string, engine: AudioEngineV2) { - super(name, engine, AudioNodeType.HAS_INPUTS_AND_OUTPUTS); // Inputs are for instances. + protected constructor(name: string, engine: AudioEngineV2, options: Partial) { + super(name, engine, options, AudioNodeType.HAS_INPUTS_AND_OUTPUTS); // Inputs are for instances. } /** diff --git a/packages/dev/core/src/AudioV2/abstractAudio/abstractSoundSource.ts b/packages/dev/core/src/AudioV2/abstractAudio/abstractSoundSource.ts index f7bb30ee596..f1cd127abe0 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/abstractSoundSource.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/abstractSoundSource.ts @@ -28,10 +28,21 @@ export interface ISoundSourceOptions extends IAbstractAudioOutNodeOptions, ISpat * Abstract class representing a sound in the audio engine. */ export abstract class AbstractSoundSource extends AbstractAudioOutNode { + private readonly _spatialAutoUpdate: boolean = true; + private readonly _spatialMinUpdateTime: number = 0; private _outBus: Nullable = null; + private _spatial: Nullable = null; - protected constructor(name: string, engine: AudioEngineV2, nodeType: AudioNodeType = AudioNodeType.HAS_OUTPUTS) { + protected constructor(name: string, engine: AudioEngineV2, options: Partial, nodeType: AudioNodeType = AudioNodeType.HAS_OUTPUTS) { super(name, engine, nodeType); + + if (typeof options.spatialAutoUpdate === "boolean") { + this._spatialAutoUpdate = options.spatialAutoUpdate; + } + + if (typeof options.spatialMinUpdateTime === "number") { + this._spatialMinUpdateTime = options.spatialMinUpdateTime; + } } /** @@ -65,9 +76,14 @@ export abstract class AbstractSoundSource extends AbstractAudioOutNode { } /** - * The spatial features of the sound. + * The spatial audio features. */ - public abstract spatial: AbstractSpatialAudio; + public get spatial(): AbstractSpatialAudio { + if (this._spatial) { + return this._spatial; + } + return this._initSpatialProperty(); + } /** * The stereo features of the sound. @@ -80,10 +96,33 @@ export abstract class AbstractSoundSource extends AbstractAudioOutNode { public override dispose(): void { super.dispose(); + this._spatial?.dispose(); + this._spatial = null; + this._outBus = null; } + protected abstract _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio; + + protected _initSpatialProperty(): AbstractSpatialAudio { + return (this._spatial = this._createSpatialProperty(this._spatialAutoUpdate, this._spatialMinUpdateTime)); + } + private _onOutBusDisposed = () => { this._outBus = null; }; + + /** @internal */ + public get _isSpatial(): boolean { + return this._spatial !== null; + } + + public set _isSpatial(value: boolean) { + if (value && !this._spatial) { + this._initSpatialProperty(); + } else if (!value && this._spatial) { + this._spatial.dispose(); + this._spatial = null; + } + } } diff --git a/packages/dev/core/src/AudioV2/abstractAudio/audioBus.ts b/packages/dev/core/src/AudioV2/abstractAudio/audioBus.ts index 770bc5da6e9..5ea49b9838b 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/audioBus.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/audioBus.ts @@ -28,10 +28,21 @@ export interface IAudioBusOptions extends IAbstractAudioBusOptions, ISpatialAudi * Audio buses are created by the {@link CreateAudioBusAsync} function. */ export abstract class AudioBus extends AbstractAudioBus { + private readonly _spatialAutoUpdate: boolean = true; + private readonly _spatialMinUpdateTime: number = 0; private _outBus: Nullable = null; + private _spatial: Nullable = null; - protected constructor(name: string, engine: AudioEngineV2) { + protected constructor(name: string, engine: AudioEngineV2, options: Partial) { super(name, engine); + + if (typeof options.spatialAutoUpdate === "boolean") { + this._spatialAutoUpdate = options.spatialAutoUpdate; + } + + if (typeof options.spatialMinUpdateTime === "number") { + this._spatialMinUpdateTime = options.spatialMinUpdateTime; + } } /** @@ -66,9 +77,14 @@ export abstract class AudioBus extends AbstractAudioBus { } /** - * The spatial features of the audio bus. + * The spatial audio features. */ - public abstract readonly spatial: AbstractSpatialAudio; + public get spatial(): AbstractSpatialAudio { + if (this._spatial) { + return this._spatial; + } + return this._initSpatialProperty(); + } /** * The stereo features of the audio bus. @@ -80,9 +96,19 @@ export abstract class AudioBus extends AbstractAudioBus { */ public override dispose(): void { super.dispose(); + + this._spatial?.dispose(); + this._spatial = null; + this._outBus = null; } + protected abstract _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio; + + protected _initSpatialProperty(): AbstractSpatialAudio { + return (this._spatial = this._createSpatialProperty(this._spatialAutoUpdate, this._spatialMinUpdateTime)); + } + private _onOutBusDisposed = () => { this.outBus = this.engine.defaultMainBus; }; diff --git a/packages/dev/core/src/AudioV2/abstractAudio/staticSound.ts b/packages/dev/core/src/AudioV2/abstractAudio/staticSound.ts index 6c9df434f8c..ce26d6d4f7b 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/staticSound.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/staticSound.ts @@ -114,8 +114,8 @@ export abstract class StaticSound extends AbstractSound { */ public abstract readonly buffer: StaticSoundBuffer; - protected constructor(name: string, engine: AudioEngineV2) { - super(name, engine); + protected constructor(name: string, engine: AudioEngineV2, options: Partial) { + super(name, engine, options); } /** diff --git a/packages/dev/core/src/AudioV2/abstractAudio/streamingSound.ts b/packages/dev/core/src/AudioV2/abstractAudio/streamingSound.ts index 796f3496a6d..b0bb44b8433 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/streamingSound.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/streamingSound.ts @@ -46,8 +46,8 @@ export abstract class StreamingSound extends AbstractSound { protected abstract override readonly _options: IStreamingSoundStoredOptions; - protected constructor(name: string, engine: AudioEngineV2) { - super(name, engine); + protected constructor(name: string, engine: AudioEngineV2, options: Partial) { + super(name, engine, options); } /** diff --git a/packages/dev/core/src/AudioV2/abstractAudio/subNodes/abstractAudioSubGraph.ts b/packages/dev/core/src/AudioV2/abstractAudio/subNodes/abstractAudioSubGraph.ts index 69d2dd32910..199ac84808b 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/subNodes/abstractAudioSubGraph.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/subNodes/abstractAudioSubGraph.ts @@ -109,7 +109,11 @@ export abstract class _AbstractAudioSubGraph { * * @internal */ - public async removeSubNodeAsync(subNode: _AbstractAudioSubNode): Promise { + public async removeSubNodeAsync(subNode: Nullable<_AbstractAudioSubNode>): Promise { + if (!subNode) { + return; + } + await this._createSubNodePromisesResolvedAsync(); const name = subNode.name; diff --git a/packages/dev/core/src/AudioV2/abstractAudio/subNodes/spatialAudioSubNode.ts b/packages/dev/core/src/AudioV2/abstractAudio/subNodes/spatialAudioSubNode.ts index 7e7dea794f1..17e45795d6d 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/subNodes/spatialAudioSubNode.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/subNodes/spatialAudioSubNode.ts @@ -24,6 +24,7 @@ export abstract class _SpatialAudioSubNode extends _AbstractAudioSubNode { public abstract distanceModel: DistanceModelType; public abstract maxDistance: number; public abstract minDistance: number; + public abstract orientation: Vector3; public abstract panningModel: PanningModelType; public abstract position: Vector3; public abstract rolloffFactor: number; @@ -68,6 +69,7 @@ export abstract class _SpatialAudioSubNode extends _AbstractAudioSubNode { this.distanceModel = options.spatialDistanceModel ?? _SpatialAudioDefaults.distanceModel; this.maxDistance = options.spatialMaxDistance ?? _SpatialAudioDefaults.maxDistance; this.minDistance = options.spatialMinDistance ?? _SpatialAudioDefaults.minDistance; + this.orientation = options.spatialOrientation ?? _SpatialAudioDefaults.orientation; this.panningModel = options.spatialPanningModel ?? _SpatialAudioDefaults.panningModel; this.rolloffFactor = options.spatialRolloffFactor ?? _SpatialAudioDefaults.rolloffFactor; diff --git a/packages/dev/core/src/AudioV2/abstractAudio/subProperties/abstractSpatialAudio.ts b/packages/dev/core/src/AudioV2/abstractAudio/subProperties/abstractSpatialAudio.ts index 07bf9146dd5..7538ea71476 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/subProperties/abstractSpatialAudio.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/subProperties/abstractSpatialAudio.ts @@ -10,6 +10,7 @@ export const _SpatialAudioDefaults = { distanceModel: "linear" as DistanceModelType, maxDistance: 10000 as number, minDistance: 1 as number, + orientation: Vector3.Right(), panningModel: "equalpower" as PanningModelType, position: Vector3.Zero(), rolloffFactor: 1 as number, @@ -76,6 +77,10 @@ export interface ISpatialAudioOptions { * - The spatialization's position and rotation will not update faster than this time, but they may update slower depending on the frame rate. */ spatialMinUpdateTime: number; + /** + * The spatial orientation used to determine the direction of the audio source. Defaults to (0, 0, -1). + */ + spatialOrientation: Vector3; /** * Possible values are: * - `"equalpower"`: Represents the equal-power panning algorithm, generally regarded as simple and efficient. @@ -123,6 +128,7 @@ export function _HasSpatialAudioOptions(options: Partial): options.spatialMaxDistance !== undefined || options.spatialMinDistance !== undefined || options.spatialMinUpdateTime !== undefined || + options.spatialOrientation !== undefined || options.spatialPanningModel !== undefined || options.spatialPosition !== undefined || options.spatialRolloffFactor !== undefined || @@ -194,6 +200,11 @@ export abstract class AbstractSpatialAudio { */ public abstract minUpdateTime: number; + /** + * The spatial orientation used to determine the direction of the audio source. Defaults to (0, 0, -1). + */ + public abstract orientation: Vector3; + /** * The spatial panning model. Defaults to "equalpower". * @@ -216,12 +227,12 @@ export abstract class AbstractSpatialAudio { public abstract rolloffFactor: number; /** - * The spatial rotation. Defaults to (0, 0, 0). + * The spatial rotation used to determine the direction of the audio source. Defaults to (0, 0, 0). */ public abstract rotation: Vector3; /** - * The spatial rotation quaternion. Defaults to (0, 0, 0, 1). + * The spatial rotation quaternion used to determine the direction of the audio source. Defaults to (0, 0, 0, 1). */ public abstract rotationQuaternion: Quaternion; @@ -248,4 +259,6 @@ export abstract class AbstractSpatialAudio { * This is called automatically by default and only needs to be called manually if automatic updates are disabled. */ public abstract update(): void; + + public abstract dispose(): void; } diff --git a/packages/dev/core/src/AudioV2/abstractAudio/subProperties/spatialAudio.ts b/packages/dev/core/src/AudioV2/abstractAudio/subProperties/spatialAudio.ts index b4303ed0257..2defb8a7a20 100644 --- a/packages/dev/core/src/AudioV2/abstractAudio/subProperties/spatialAudio.ts +++ b/packages/dev/core/src/AudioV2/abstractAudio/subProperties/spatialAudio.ts @@ -16,6 +16,7 @@ export abstract class _SpatialAudio extends AbstractSpatialAudio { private _distanceModel: DistanceModelType = _SpatialAudioDefaults.distanceModel; private _maxDistance: number = _SpatialAudioDefaults.maxDistance; private _minDistance: number = _SpatialAudioDefaults.minDistance; + private _orientation: Vector3; private _panningModel: PanningModelType = _SpatialAudioDefaults.panningModel; private _position: Vector3; private _rolloffFactor: number = _SpatialAudioDefaults.rolloffFactor; @@ -29,10 +30,12 @@ export abstract class _SpatialAudio extends AbstractSpatialAudio { const subNode = _GetSpatialAudioSubNode(subGraph); if (subNode) { + this._orientation = subNode.orientation.clone(); this._position = subNode.position.clone(); this._rotation = subNode.rotation.clone(); this._rotationQuaternion = subNode.rotationQuaternion.clone(); } else { + this._orientation = _SpatialAudioDefaults.orientation.clone(); this._position = _SpatialAudioDefaults.position.clone(); this._rotation = _SpatialAudioDefaults.rotation.clone(); this._rotationQuaternion = _SpatialAudioDefaults.rotationQuaternion.clone(); @@ -44,6 +47,12 @@ export abstract class _SpatialAudio extends AbstractSpatialAudio { this._subGraph = subGraph; } + /** @internal */ + public dispose(): void { + // eslint-disable-next-line @typescript-eslint/no-floating-promises + this._subGraph.removeSubNodeAsync(_GetSpatialAudioSubNode(this._subGraph)); + } + /** @internal */ public get coneInnerAngle(): number { return this._coneInnerAngle; @@ -113,6 +122,16 @@ export abstract class _SpatialAudio extends AbstractSpatialAudio { _SetSpatialAudioProperty(this._subGraph, "minDistance", value); } + /** @internal */ + public get orientation(): Vector3 { + return this._orientation; + } + + public set orientation(value: Vector3) { + this._orientation = value; + this._updateRotation(); + } + /** @internal */ public get panningModel(): PanningModelType { return this._panningModel; @@ -228,9 +247,18 @@ export abstract class _SpatialAudio extends AbstractSpatialAudio { if (!subNode.rotationQuaternion.equalsWithEpsilon(this._rotationQuaternion)) { subNode.rotationQuaternion.copyFrom(this._rotationQuaternion); subNode._updateRotation(); + this._orientation.copyFrom(subNode.orientation); + this._rotation.copyFrom(subNode.rotation); } else if (!subNode.rotation.equalsWithEpsilon(this._rotation)) { subNode.rotation.copyFrom(this._rotation); subNode._updateRotation(); + this._orientation.copyFrom(subNode.orientation); + this._rotationQuaternion.copyFrom(subNode.rotationQuaternion); + } else if (!subNode.orientation.equalsWithEpsilon(this._orientation)) { + subNode.orientation.copyFrom(this._orientation); + subNode._updateRotation(); + this._rotation.copyFrom(subNode.rotation); + this._rotationQuaternion.copyFrom(subNode.rotationQuaternion); } } } diff --git a/packages/dev/core/src/AudioV2/webAudio/subNodes/spatialWebAudioSubNode.ts b/packages/dev/core/src/AudioV2/webAudio/subNodes/spatialWebAudioSubNode.ts index 5ffcfef411c..a8c69b7ec0a 100644 --- a/packages/dev/core/src/AudioV2/webAudio/subNodes/spatialWebAudioSubNode.ts +++ b/packages/dev/core/src/AudioV2/webAudio/subNodes/spatialWebAudioSubNode.ts @@ -7,7 +7,6 @@ import type { IWebAudioInNode } from "../webAudioNode"; const TmpMatrix = Matrix.Zero(); const TmpQuaternion = new Quaternion(); -const TmpVector = Vector3.Zero(); function D2r(degrees: number): number { return (degrees * Math.PI) / 180; @@ -25,6 +24,7 @@ export async function _CreateSpatialAudioSubNodeAsync(engine: _WebAudioEngine): /** @internal */ export class _SpatialWebAudioSubNode extends _SpatialAudioSubNode { + private _lastOrientation: Vector3 = Vector3.Zero(); private _lastPosition: Vector3 = Vector3.Zero(); private _lastRotation: Vector3 = Vector3.Zero(); private _lastRotationQuaternion: Quaternion = new Quaternion(); @@ -38,6 +38,8 @@ export class _SpatialWebAudioSubNode extends _SpatialAudioSubNode { /** @internal */ public override readonly engine: _WebAudioEngine; + /** @internal */ + public readonly orientation: Vector3 = _SpatialAudioDefaults.orientation.clone(); /** @internal */ public readonly position = _SpatialAudioDefaults.position.clone(); /** @internal */ @@ -170,12 +172,6 @@ export class _SpatialWebAudioSubNode extends _SpatialAudioSubNode { return; } - // If attached and there is a ramp in progress, we assume another update is coming soon that we can wait for. - // We don't do this for unattached nodes because there may not be another update coming. - if (this.isAttached && (this._positionX.isRamping || this._positionY.isRamping || this._positionZ.isRamping)) { - return; - } - this._positionX.targetValue = this.position.x; this._positionY.targetValue = this.position.y; this._positionZ.targetValue = this.position.z; @@ -185,28 +181,27 @@ export class _SpatialWebAudioSubNode extends _SpatialAudioSubNode { /** @internal */ public _updateRotation(): void { - // If attached and there is a ramp in progress, we assume another update is coming soon that we can wait for. - // We don't do this for unattached nodes because there may not be another update coming. - if (this.isAttached && (this._orientationX.isRamping || this._orientationY.isRamping || this._orientationZ.isRamping)) { - return; - } - + let rotated = false; if (!this._lastRotationQuaternion.equalsWithEpsilon(this.rotationQuaternion)) { TmpQuaternion.copyFrom(this.rotationQuaternion); this._lastRotationQuaternion.copyFrom(this.rotationQuaternion); + rotated = true; } else if (!this._lastRotation.equalsWithEpsilon(this.rotation)) { Quaternion.FromEulerAnglesToRef(this.rotation.x, this.rotation.y, this.rotation.z, TmpQuaternion); this._lastRotation.copyFrom(this.rotation); - } else { + rotated = true; + } else if (this._lastOrientation.equalsWithEpsilon(this.orientation)) { return; } - Matrix.FromQuaternionToRef(TmpQuaternion, TmpMatrix); - Vector3.TransformNormalToRef(Vector3.RightReadOnly, TmpMatrix, TmpVector); + if (rotated) { + Matrix.FromQuaternionToRef(TmpQuaternion, TmpMatrix); + Vector3.TransformNormalToRef(Vector3.RightReadOnly, TmpMatrix, this.orientation); + } - this._orientationX.targetValue = TmpVector.x; - this._orientationY.targetValue = TmpVector.y; - this._orientationZ.targetValue = TmpVector.z; + this._orientationX.targetValue = this.orientation.x; + this._orientationY.targetValue = this.orientation.y; + this._orientationZ.targetValue = this.orientation.z; } protected override _connect(node: IWebAudioInNode): boolean { diff --git a/packages/dev/core/src/AudioV2/webAudio/subNodes/webAudioBusAndSoundSubGraph.ts b/packages/dev/core/src/AudioV2/webAudio/subNodes/webAudioBusAndSoundSubGraph.ts index 44292def7a6..dd0418b3998 100644 --- a/packages/dev/core/src/AudioV2/webAudio/subNodes/webAudioBusAndSoundSubGraph.ts +++ b/packages/dev/core/src/AudioV2/webAudio/subNodes/webAudioBusAndSoundSubGraph.ts @@ -62,6 +62,7 @@ export abstract class _WebAudioBusAndSoundSubGraph extends _WebAudioBaseSubGraph try { const node = super._createSubNode(name); return node; + // eslint-disable-next-line @typescript-eslint/no-unused-vars } catch (e) {} switch (name) { diff --git a/packages/dev/core/src/AudioV2/webAudio/subProperties/spatialWebAudio.ts b/packages/dev/core/src/AudioV2/webAudio/subProperties/spatialWebAudio.ts index 9771fe686ce..a101eec9fd9 100644 --- a/packages/dev/core/src/AudioV2/webAudio/subProperties/spatialWebAudio.ts +++ b/packages/dev/core/src/AudioV2/webAudio/subProperties/spatialWebAudio.ts @@ -24,7 +24,9 @@ export class _SpatialWebAudio extends _SpatialAudio { } /** @internal */ - public dispose(): void { + public override dispose(): void { + super.dispose(); + this._updaterComponent.dispose(); this._updaterComponent = null!; } diff --git a/packages/dev/core/src/AudioV2/webAudio/webAudioBus.ts b/packages/dev/core/src/AudioV2/webAudio/webAudioBus.ts index b507d87b28e..f45bc931bfe 100644 --- a/packages/dev/core/src/AudioV2/webAudio/webAudioBus.ts +++ b/packages/dev/core/src/AudioV2/webAudio/webAudioBus.ts @@ -2,8 +2,7 @@ import type { Nullable } from "core/types"; import type { AbstractAudioNode } from "../abstractAudio/abstractAudioNode"; import type { IAudioBusOptions } from "../abstractAudio/audioBus"; import { AudioBus } from "../abstractAudio/audioBus"; -import { _HasSpatialAudioOptions } from "../abstractAudio/subProperties/abstractSpatialAudio"; -import type { _SpatialAudio } from "../abstractAudio/subProperties/spatialAudio"; +import { _HasSpatialAudioOptions, type AbstractSpatialAudio } from "../abstractAudio/subProperties/abstractSpatialAudio"; import { _StereoAudio } from "../abstractAudio/subProperties/stereoAudio"; import { _WebAudioBusAndSoundSubGraph } from "./subNodes/webAudioBusAndSoundSubGraph"; import { _SpatialWebAudio } from "./subProperties/spatialWebAudio"; @@ -12,9 +11,6 @@ import type { IWebAudioInNode, IWebAudioSuperNode } from "./webAudioNode"; /** @internal */ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { - private _spatial: Nullable<_SpatialAudio> = null; - private readonly _spatialAutoUpdate: boolean = true; - private readonly _spatialMinUpdateTime: number = 0; private _stereo: Nullable<_StereoAudio> = null; protected _subGraph: _WebAudioBusAndSoundSubGraph; @@ -24,15 +20,7 @@ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { /** @internal */ public constructor(name: string, engine: _WebAudioEngine, options: Partial) { - super(name, engine); - - if (typeof options.spatialAutoUpdate === "boolean") { - this._spatialAutoUpdate = options.spatialAutoUpdate; - } - - if (typeof options.spatialMinUpdateTime === "number") { - this._spatialMinUpdateTime = options.spatialMinUpdateTime; - } + super(name, engine, options); this._subGraph = new _WebAudioBus._SubGraph(this); } @@ -59,7 +47,6 @@ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { public override dispose(): void { super.dispose(); - this._spatial = null; this._stereo = null; this.engine._removeNode(this); @@ -75,14 +62,6 @@ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { return this._subGraph._outNode; } - /** @internal */ - public override get spatial(): _SpatialAudio { - if (this._spatial) { - return this._spatial; - } - return this._initSpatialProperty(); - } - /** @internal */ public override get stereo(): _StereoAudio { return this._stereo ?? (this._stereo = new _StereoAudio(this._subGraph)); @@ -93,6 +72,10 @@ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { return "_WebAudioBus"; } + protected override _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio { + return new _SpatialWebAudio(this._subGraph, autoUpdate, minUpdateTime); + } + protected override _connect(node: IWebAudioInNode): boolean { const connected = super._connect(node); @@ -121,14 +104,6 @@ export class _WebAudioBus extends AudioBus implements IWebAudioSuperNode { return true; } - private _initSpatialProperty(): _SpatialAudio { - if (!this._spatial) { - this._spatial = new _SpatialWebAudio(this._subGraph, this._spatialAutoUpdate, this._spatialMinUpdateTime); - } - - return this._spatial; - } - private static _SubGraph = class extends _WebAudioBusAndSoundSubGraph { protected override _owner: _WebAudioBus; diff --git a/packages/dev/core/src/AudioV2/webAudio/webAudioEngine.ts b/packages/dev/core/src/AudioV2/webAudio/webAudioEngine.ts index 85c85275327..e2c5e633604 100644 --- a/packages/dev/core/src/AudioV2/webAudio/webAudioEngine.ts +++ b/packages/dev/core/src/AudioV2/webAudio/webAudioEngine.ts @@ -382,6 +382,9 @@ export class _WebAudioEngine extends AudioEngineV2 { } this._resumePromise = this._audioContext.resume(); + + this.stateChangedObservable.notifyObservers(this.state); + return this._resumePromise; } diff --git a/packages/dev/core/src/AudioV2/webAudio/webAudioSoundSource.ts b/packages/dev/core/src/AudioV2/webAudio/webAudioSoundSource.ts index 05d018a2245..43474d5fe76 100644 --- a/packages/dev/core/src/AudioV2/webAudio/webAudioSoundSource.ts +++ b/packages/dev/core/src/AudioV2/webAudio/webAudioSoundSource.ts @@ -1,9 +1,9 @@ import type { Nullable } from "../../types"; -import type { AbstractAudioNode } from "../abstractAudio"; +import type { AbstractAudioNode } from "../abstractAudio/abstractAudioNode"; import type { ISoundSourceOptions } from "../abstractAudio/abstractSoundSource"; import { AbstractSoundSource } from "../abstractAudio/abstractSoundSource"; +import type { AbstractSpatialAudio } from "../abstractAudio/subProperties/abstractSpatialAudio"; import { _HasSpatialAudioOptions } from "../abstractAudio/subProperties/abstractSpatialAudio"; -import type { _SpatialAudio } from "../abstractAudio/subProperties/spatialAudio"; import { _StereoAudio } from "../abstractAudio/subProperties/stereoAudio"; import { _WebAudioBusAndSoundSubGraph } from "./subNodes/webAudioBusAndSoundSubGraph"; import { _SpatialWebAudio } from "./subProperties/spatialWebAudio"; @@ -12,9 +12,6 @@ import type { IWebAudioInNode } from "./webAudioNode"; /** @internal */ export class _WebAudioSoundSource extends AbstractSoundSource { - private _spatial: Nullable<_SpatialWebAudio> = null; - private readonly _spatialAutoUpdate: boolean = true; - private readonly _spatialMinUpdateTime: number = 0; private _stereo: Nullable<_StereoAudio> = null; protected _subGraph: _WebAudioBusAndSoundSubGraph; @@ -28,15 +25,7 @@ export class _WebAudioSoundSource extends AbstractSoundSource { /** @internal */ public constructor(name: string, webAudioNode: AudioNode, engine: _WebAudioEngine, options: Partial) { - super(name, engine); - - if (typeof options.spatialAutoUpdate === "boolean") { - this._spatialAutoUpdate = options.spatialAutoUpdate; - } - - if (typeof options.spatialMinUpdateTime === "number") { - this._spatialMinUpdateTime = options.spatialMinUpdateTime; - } + super(name, engine, options); this._audioContext = this.engine._audioContext; this._webAudioNode = webAudioNode; @@ -72,14 +61,6 @@ export class _WebAudioSoundSource extends AbstractSoundSource { return this._subGraph._outNode; } - /** @internal */ - public override get spatial(): _SpatialAudio { - if (this._spatial) { - return this._spatial; - } - return this._initSpatialProperty(); - } - /** @internal */ public override get stereo(): _StereoAudio { return this._stereo ?? (this._stereo = new _StereoAudio(this._subGraph)); @@ -89,9 +70,6 @@ export class _WebAudioSoundSource extends AbstractSoundSource { public override dispose(): void { super.dispose(); - this._spatial?.dispose(); - this._spatial = null; - this._stereo = null; this._subGraph.dispose(); @@ -133,12 +111,8 @@ export class _WebAudioSoundSource extends AbstractSoundSource { return true; } - private _initSpatialProperty(): _SpatialAudio { - if (!this._spatial) { - this._spatial = new _SpatialWebAudio(this._subGraph, this._spatialAutoUpdate, this._spatialMinUpdateTime); - } - - return this._spatial; + protected override _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio { + return new _SpatialWebAudio(this._subGraph, autoUpdate, minUpdateTime); } private static _SubGraph = class extends _WebAudioBusAndSoundSubGraph { diff --git a/packages/dev/core/src/AudioV2/webAudio/webAudioStaticSound.ts b/packages/dev/core/src/AudioV2/webAudio/webAudioStaticSound.ts index 6ecceb906e3..5ad248e81e3 100644 --- a/packages/dev/core/src/AudioV2/webAudio/webAudioStaticSound.ts +++ b/packages/dev/core/src/AudioV2/webAudio/webAudioStaticSound.ts @@ -6,8 +6,7 @@ import type { IStaticSoundBufferCloneOptions, IStaticSoundBufferOptions } from " import { StaticSoundBuffer } from "../abstractAudio/staticSoundBuffer"; import type { IStaticSoundInstanceOptions } from "../abstractAudio/staticSoundInstance"; import { _StaticSoundInstance } from "../abstractAudio/staticSoundInstance"; -import { _HasSpatialAudioOptions } from "../abstractAudio/subProperties/abstractSpatialAudio"; -import type { _SpatialAudio } from "../abstractAudio/subProperties/spatialAudio"; +import { _HasSpatialAudioOptions, type AbstractSpatialAudio } from "../abstractAudio/subProperties/abstractSpatialAudio"; import { _StereoAudio } from "../abstractAudio/subProperties/stereoAudio"; import { _CleanUrl, _FileExtensionRegex } from "../audioUtils"; import { SoundState } from "../soundState"; @@ -22,9 +21,6 @@ type StaticSoundSourceType = ArrayBuffer | AudioBuffer | StaticSoundBuffer | str /** @internal */ export class _WebAudioStaticSound extends StaticSound implements IWebAudioSuperNode { private _buffer: _WebAudioStaticSoundBuffer; - private _spatial: Nullable<_SpatialWebAudio> = null; - private readonly _spatialAutoUpdate: boolean = true; - private readonly _spatialMinUpdateTime: number = 0; private _stereo: Nullable<_StereoAudio> = null; protected override readonly _options: IStaticSoundStoredOptions; @@ -38,15 +34,7 @@ export class _WebAudioStaticSound extends StaticSound implements IWebAudioSuperN /** @internal */ public constructor(name: string, engine: _WebAudioEngine, options: Partial) { - super(name, engine); - - if (typeof options.spatialAutoUpdate === "boolean") { - this._spatialAutoUpdate = options.spatialAutoUpdate; - } - - if (typeof options.spatialMinUpdateTime === "number") { - this._spatialMinUpdateTime = options.spatialMinUpdateTime; - } + super(name, engine, options); this._options = { autoplay: options.autoplay ?? false, @@ -108,14 +96,6 @@ export class _WebAudioStaticSound extends StaticSound implements IWebAudioSuperN return this._subGraph._outNode; } - /** @internal */ - public override get spatial(): _SpatialAudio { - if (this._spatial) { - return this._spatial; - } - return this._initSpatialProperty(); - } - /** @internal */ public override get stereo(): _StereoAudio { return this._stereo ?? (this._stereo = new _StereoAudio(this._subGraph)); @@ -134,9 +114,6 @@ export class _WebAudioStaticSound extends StaticSound implements IWebAudioSuperN public override dispose(): void { super.dispose(); - this._spatial?.dispose(); - this._spatial = null; - this._stereo = null; this._subGraph.dispose(); @@ -182,12 +159,12 @@ export class _WebAudioStaticSound extends StaticSound implements IWebAudioSuperN return true; } - private _initSpatialProperty(): _SpatialAudio { - if (!this._spatial) { - this._spatial = new _SpatialWebAudio(this._subGraph, this._spatialAutoUpdate, this._spatialMinUpdateTime); - } + protected override _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio { + return new _SpatialWebAudio(this._subGraph, autoUpdate, minUpdateTime); + } - return this._spatial; + public _getOptions(): IStaticSoundStoredOptions { + return this._options; } private static _SubGraph = class extends _WebAudioBusAndSoundSubGraph { @@ -365,6 +342,10 @@ class _WebAudioStaticSoundInstance extends _StaticSoundInstance implements IWebA this._state = SoundState.Stopped; } + if (this.state === SoundState.Paused) { + this._enginePauseTime = 0; + } + this._options.startOffset = value; if (restart) { @@ -425,7 +406,7 @@ class _WebAudioStaticSoundInstance extends _StaticSoundInstance implements IWebA let startOffset = this._options.startOffset; if (this._state === SoundState.Paused) { - startOffset += this.currentTime; + startOffset += this._enginePauseTime; startOffset %= this._sound.buffer.duration; } @@ -513,7 +494,10 @@ class _WebAudioStaticSoundInstance extends _StaticSoundInstance implements IWebA protected _onEnded = () => { this._enginePlayTime = 0; - this.onEndedObservable.notifyObservers(this); + if (this._state !== SoundState.Paused) { + this.onEndedObservable.notifyObservers(this); + } + this._deinitSourceNode(); }; diff --git a/packages/dev/core/src/AudioV2/webAudio/webAudioStreamingSound.ts b/packages/dev/core/src/AudioV2/webAudio/webAudioStreamingSound.ts index 21dffa94b8b..ebe5eb62218 100644 --- a/packages/dev/core/src/AudioV2/webAudio/webAudioStreamingSound.ts +++ b/packages/dev/core/src/AudioV2/webAudio/webAudioStreamingSound.ts @@ -6,8 +6,7 @@ import type {} from "../abstractAudio/abstractSound"; import type { IStreamingSoundOptions, IStreamingSoundPlayOptions, IStreamingSoundStoredOptions } from "../abstractAudio/streamingSound"; import { StreamingSound } from "../abstractAudio/streamingSound"; import { _StreamingSoundInstance } from "../abstractAudio/streamingSoundInstance"; -import { _HasSpatialAudioOptions } from "../abstractAudio/subProperties/abstractSpatialAudio"; -import type { _SpatialAudio } from "../abstractAudio/subProperties/spatialAudio"; +import { _HasSpatialAudioOptions, type AbstractSpatialAudio } from "../abstractAudio/subProperties/abstractSpatialAudio"; import { _StereoAudio } from "../abstractAudio/subProperties/stereoAudio"; import { _CleanUrl } from "../audioUtils"; import { SoundState } from "../soundState"; @@ -20,9 +19,6 @@ type StreamingSoundSourceType = HTMLMediaElement | string | string[]; /** @internal */ export class _WebAudioStreamingSound extends StreamingSound implements IWebAudioSuperNode { - private _spatial: Nullable<_SpatialAudio> = null; - private readonly _spatialAutoUpdate: boolean = true; - private readonly _spatialMinUpdateTime: number = 0; private _stereo: Nullable<_StereoAudio> = null; protected override readonly _options: IStreamingSoundStoredOptions; @@ -39,15 +35,7 @@ export class _WebAudioStreamingSound extends StreamingSound implements IWebAudio /** @internal */ public constructor(name: string, engine: _WebAudioEngine, options: Partial) { - super(name, engine); - - if (typeof options.spatialAutoUpdate === "boolean") { - this._spatialAutoUpdate = options.spatialAutoUpdate; - } - - if (typeof options.spatialMinUpdateTime === "number") { - this._spatialMinUpdateTime = options.spatialMinUpdateTime; - } + super(name, engine, options); this._options = { autoplay: options.autoplay ?? false, @@ -105,14 +93,6 @@ export class _WebAudioStreamingSound extends StreamingSound implements IWebAudio return this._subGraph._outNode; } - /** @internal */ - public override get spatial(): _SpatialAudio { - if (this._spatial) { - return this._spatial; - } - return this._initSpatialProperty(); - } - /** @internal */ public override get stereo(): _StereoAudio { return this._stereo ?? (this._stereo = new _StereoAudio(this._subGraph)); @@ -122,7 +102,6 @@ export class _WebAudioStreamingSound extends StreamingSound implements IWebAudio public override dispose(): void { super.dispose(); - this._spatial = null; this._stereo = null; this._subGraph.dispose(); @@ -168,12 +147,12 @@ export class _WebAudioStreamingSound extends StreamingSound implements IWebAudio return true; } - private _initSpatialProperty(): _SpatialAudio { - if (!this._spatial) { - this._spatial = new _SpatialWebAudio(this._subGraph, this._spatialAutoUpdate, this._spatialMinUpdateTime); - } + protected override _createSpatialProperty(autoUpdate: boolean, minUpdateTime: number): AbstractSpatialAudio { + return new _SpatialWebAudio(this._subGraph, autoUpdate, minUpdateTime); + } - return this._spatial; + public _getOptions(): IStreamingSoundStoredOptions { + return this._options; } private static _SubGraph = class extends _WebAudioBusAndSoundSubGraph { diff --git a/packages/dev/core/test/unit/Audio/audioEngine.test.ts b/packages/dev/core/test/unit/Audio/audioEngine.test.ts index 16f3e8e4cf9..ac08565b80b 100644 --- a/packages/dev/core/test/unit/Audio/audioEngine.test.ts +++ b/packages/dev/core/test/unit/Audio/audioEngine.test.ts @@ -7,6 +7,8 @@ import { AbstractEngine, NullEngine } from "core/Engines"; import { Scene } from "core/scene"; import { Sound } from "core/Audio/sound"; +import type { AudioContextMock } from "./helpers/mockedAudioObjects"; + import { MockedAudioObjects } from "./helpers/mockedAudioObjects"; import { AudioTestSamples } from "./helpers/audioTestSamples"; import { AudioTestHelper } from "./helpers/audioTestHelper"; @@ -31,14 +33,14 @@ describe("AudioEngine", () => { }); afterEach(() => { + mock.dispose(); + (mock as any) = null; + scene.dispose(); (scene as any) = null; engine.dispose(); (engine as any) = null; - - mock.dispose(); - (mock as any) = null; }); it("unlocked is initialized to false when browser requires user interaction", () => { @@ -68,6 +70,7 @@ describe("AudioEngine", () => { jest.useFakeTimers(); const audioEngine = createAudioEngine("suspended"); + (audioEngine._v2._audioContext as unknown as AudioContextMock).requireUserInteraction = true; const arrayBuffer = AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"); const sound = new Sound(expect.getState().currentTestName, arrayBuffer); diff --git a/packages/dev/core/test/unit/Audio/helpers/audioTestHelper.ts b/packages/dev/core/test/unit/Audio/helpers/audioTestHelper.ts index a9ffd929fe8..0c06db23f2f 100644 --- a/packages/dev/core/test/unit/Audio/helpers/audioTestHelper.ts +++ b/packages/dev/core/test/unit/Audio/helpers/audioTestHelper.ts @@ -1,12 +1,6 @@ import { MockedAudioObjects } from "./mockedAudioObjects"; export class AudioTestHelper { - static SoundWasStarted() { - // When a Sound object actually starts playing, it creates an audio buffer source. We use this here to know - // if a sound started playing or not. - return MockedAudioObjects.Instance.audioBufferSourceWasCreated; - } - /** * Advance timers by 500ms to trigger the Sound class's timeout used for double-checking the audio context state. * diff --git a/packages/dev/core/test/unit/Audio/helpers/audioTestSamples.ts b/packages/dev/core/test/unit/Audio/helpers/audioTestSamples.ts index f4a111abe37..ffd291b0c9a 100644 --- a/packages/dev/core/test/unit/Audio/helpers/audioTestSamples.ts +++ b/packages/dev/core/test/unit/Audio/helpers/audioTestSamples.ts @@ -1,4 +1,3 @@ - export class AudioTestSamples { static Initialize() { if (AudioTestSamples._Initialized) { @@ -33,16 +32,32 @@ export class AudioTestSamples { private static _Map = new Map(); } +export class AudioBuffer { + private _channelData: Float32Array; + + channels: number; + duration: number; + length: number; + sampleRate: number; + + constructor(channelCount: number, sampleRate: number, channelData: Float32Array) { + this._channelData = channelData; + + this.channels = channelCount; + this.duration = channelData.length / channelCount / sampleRate; + this.length = channelData.length / channelCount; + this.sampleRate = sampleRate; + } + + getChannelData(channel: number): Float32Array { + return this._channelData; + } +} + class AudioSample { constructor(channelCount: number, sampleRate: number, channelData: Float32Array) { this.arrayBuffer = AudioSample._CreateArrayBuffer(); - this.audioBuffer = { - channels: channelCount, - duration: (channelData.length / channelCount) / sampleRate, - length: channelData.length / channelCount, - sampleRate: sampleRate, - getChannelData: () => channelData - } as unknown as AudioBuffer; + this.audioBuffer = new AudioBuffer(channelCount, sampleRate, channelData); } arrayBuffer: ArrayBuffer; @@ -55,5 +70,5 @@ class AudioSample { const arrayBufferView = new Uint32Array(arrayBuffer); arrayBufferView[0] = AudioSample._CurrentArrayBufferIndex++; return arrayBuffer; - } + }; } diff --git a/packages/dev/core/test/unit/Audio/helpers/mockedAudioObjects.ts b/packages/dev/core/test/unit/Audio/helpers/mockedAudioObjects.ts index 9c70e14c748..3b5a105a5e6 100644 --- a/packages/dev/core/test/unit/Audio/helpers/mockedAudioObjects.ts +++ b/packages/dev/core/test/unit/Audio/helpers/mockedAudioObjects.ts @@ -1,6 +1,8 @@ import { Engine } from "core/Engines"; -import { AudioTestSamples } from "./audioTestSamples"; +import { AudioBuffer, AudioTestSamples } from "./audioTestSamples"; + +const realSetTimeout = jest.requireActual("timers").setTimeout; class AudioParamMock { public cancelScheduledValues = jest.fn().mockName("cancelScheduledValues"); @@ -13,24 +15,59 @@ class AudioParamMock { } class AudioNodeMock { + private readonly _connections = new Array(); + public connect(destination: any) { - this._destination = destination; + this._connections.push(destination); } public disconnect() { - this._destination = null; + this._connections.length = 0; } - public get destination() { - return this._destination; + public get connections() { + return this._connections; } +} - private _destination: any = null; +class AnalyserNodeMock extends AudioNodeMock { + fftSize = 2048; + frequencyBinCount = this.fftSize / 2; + minDecibels = -100; + maxDecibels = -30; + smoothingTimeConstant = 0.8; + + getFloatFrequencyData = jest.fn().mockName("getFloatFrequencyData"); + getByteFrequencyData = jest.fn().mockName("getByteFrequencyData"); + getFloatTimeDomainData = jest.fn().mockName("getFloatTimeDomainData"); + getByteTimeDomainData = jest.fn().mockName("getByteTimeDomainData"); } class AudioBufferSourceNodeMock extends AudioNodeMock { + private readonly _onEndedListeners = new Array<() => void>(); + + buffer = { + duration: 0, + }; + detune = new AudioParamMock(); + loop = false; + loopEnd = 0; + loopStart = 0; + playbackRate = new AudioParamMock(); + startTime = 0; + onended = () => void 0; + constructor(audioContext: AudioContextMock, options?: any) { + super(); + + audioContext.addAudioBufferSource(this); + + if (options && options.buffer) { + this.buffer = options.buffer; + } + } + start = jest .fn() .mockName("start") @@ -43,19 +80,32 @@ class AudioBufferSourceNodeMock extends AudioNodeMock { .mockName("stop") .mockImplementation(() => { this.onended(); + + for (const listener of this._onEndedListeners) { + listener(); + } }); - buffer = { - duration: 0, - }; - loop = false; - loopEnd = 0; - loopStart = 0; - playbackRate = { - value: 1, - }; + addEventListener = jest + .fn() + .mockName("addEventListener") + .mockImplementation((type: string, listener: () => void) => { + if (type === "ended") { + this._onEndedListeners.push(listener); + } + }); - startTime = 0; + removeEventListener = jest + .fn() + .mockName("removeEventListener") + .mockImplementation((type: string, listener: () => void) => { + if (type === "ended") { + const index = this._onEndedListeners.indexOf(listener); + if (index !== -1) { + this._onEndedListeners.splice(index, 1); + } + } + }); } class GainNodeMock extends AudioNodeMock { @@ -65,18 +115,141 @@ class GainNodeMock extends AudioNodeMock { class MediaElementAudioSourceNodeMock extends AudioNodeMock {} class PannerNodeMock extends AudioNodeMock { - positionX = new AudioParamMock(); - positionY = new AudioParamMock(); - positionZ = new AudioParamMock(); coneInnerAngle = new AudioParamMock(); coneOuterAngle = new AudioParamMock(); coneOuterGain = new AudioParamMock(); + orientationX = new AudioParamMock(); + orientationY = new AudioParamMock(); + orientationZ = new AudioParamMock(); + positionX = new AudioParamMock(); + positionY = new AudioParamMock(); + positionZ = new AudioParamMock(); setOrientation = jest.fn().mockName("setOrientation"); } +class StereoPannerNodeMock extends AudioNodeMock { + pan = new AudioParamMock(); +} + +export class AudioContextMock { + private _audioBufferSources = new Array(); + + currentTime = 0; + destination = new AudioNodeMock(); + state = "running"; + + requireUserInteraction = false; + + get audioBufferSource() { + return this._audioBufferSources[this._audioBufferSources.length - 1]; + } + + get audioBufferSourceWasCreated() { + return 0 < this._audioBufferSources.length; + } + + addAudioBufferSource(audioBufferSource: AudioBufferSourceNodeMock) { + this._audioBufferSources.push(audioBufferSource); + } + + dispose() { + this._audioBufferSources.length = 0; + } + + incrementCurrentTime(seconds: number) { + this.currentTime += seconds; + + for (const audioBufferSource of this._audioBufferSources) { + const currentTime = this.currentTime; + if (audioBufferSource.startTime + audioBufferSource.buffer.duration <= currentTime) { + audioBufferSource.stop(); + } + } + } + + close = jest + .fn() + .mockName("close") + .mockImplementation(() => { + this.state = "closed"; + return Promise.resolve(); + }); + + createBufferSource = jest + .fn() + .mockName("createBufferSource") + .mockImplementation(() => { + const bufferSource = new AudioBufferSourceNodeMock(this); + this.addAudioBufferSource(bufferSource); + return bufferSource; + }); + + createGain = jest + .fn() + .mockName("createGain") + .mockImplementation(() => { + // Note that when creating a single Sound object, createGain() is called three times: + // 1) from AudioEngine._initializeAudioContext() to create the master gain. + // 2) from Sound constructor. + // 3) from main SoundTrack._initializeSoundTrackAudioGraph(). + return new GainNodeMock(); + }); + + createMediaElementSource = jest + .fn() + .mockName("createMediaElementSource") + .mockImplementation((mediaElement: HTMLMediaElement) => { + // Streaming sounds need to be able to create a media element source node. + return new MediaElementAudioSourceNodeMock(); + }); + + createPanner = jest + .fn() + .mockName("createPanner") + .mockImplementation(() => { + return new PannerNodeMock(); + }); + + decodeAudioData = jest + .fn() + .mockName("decodeAudioData") + .mockImplementation((data: ArrayBuffer, success?: (buffer: AudioBuffer) => void) => { + if (success) { + success(AudioTestSamples.GetAudioBuffer(data)); + } + }); + + resume = jest + .fn() + .mockName("resume") + .mockImplementation(() => { + if (!this.requireUserInteraction) { + this.state = "running"; + } + + return Promise.resolve(); + }); + + suspend = jest + .fn() + .mockName("suspend") + .mockImplementation(() => { + this.state = "suspended"; + return Promise.resolve(); + }); + + addEventListener = jest.fn().mockName("addEventListener"); + removeEventListener = jest.fn().mockName("removeEventListener"); +} + +class OfflineAudioContextMock {} + +class AudioContext extends AudioContextMock {} +class OfflineAudioContext extends OfflineAudioContextMock {} + export class MockedAudioObjects { - static Instance: MockedAudioObjects; + private _previousAudioContext: any; constructor() { MockedAudioObjects.Instance = this; @@ -84,135 +257,100 @@ export class MockedAudioObjects { document.body.appendChild = jest.fn().mockName("appendChild"); document.body.removeChild = jest.fn().mockName("removeChild"); + global.fetch = jest + .fn() + .mockName("fetch") + .mockResolvedValue({ + arrayBuffer: () => Promise.resolve(new ArrayBuffer(8)), + } as Response); + global.Audio = jest .fn() .mockName("Audio") .mockImplementation(() => { + let canPlayThroughListener: () => void = () => void 0; + return { - addEventListener: jest.fn().mockName("addEventListener"), + addEventListener: jest + .fn() + .mockName("addEventListener") + .mockImplementation((type: string, listener: () => void) => { + if (type === "canplaythrough") { + canPlayThroughListener = listener; + } + }), + removeEventListener: jest.fn().mockName("removeEventListener"), canPlayType: jest.fn().mockName("canPlayType").mockReturnValue(""), + children: [], controls: true, crossOrigin: null, loop: false, - load: jest.fn().mockName("load"), + load: jest + .fn() + .mockName("load") + .mockImplementation(() => { + // Simulate that the audio is ready to play through after load() is called. + realSetTimeout(() => { + canPlayThroughListener(); + }, 0); + }), pause: jest.fn().mockName("pause"), + play: jest.fn().mockName("play").mockReturnValue(Promise.resolve()), preload: "none", }; }); - global.AudioBuffer = jest.fn().mockName("AudioBuffer"); global.MediaStream = jest.fn().mockName("MediaStream"); // AudioContext mock. this._previousAudioContext = window.AudioContext; - window.AudioContext = jest - .fn() - .mockName("AudioContext") - .mockImplementation(() => { - return { - currentTime: 0, - state: "running", - close: jest - .fn() - .mockName("close") - .mockImplementation(() => { - this.audioContext.state = "closed"; - return Promise.resolve(); - }), - createBufferSource: jest - .fn() - .mockName("createBufferSource") - .mockImplementation(() => { - const bufferSource = new AudioBufferSourceNodeMock(); - this._audioBufferSources.push(bufferSource); - return bufferSource; - }), - createGain: jest - .fn() - .mockName("createGain") - .mockImplementation(() => { - // Note that when creating a single Sound object, createGain() is called three times: - // 1) from AudioEngine._initializeAudioContext() to create the master gain. - // 2) from Sound constructor. - // 3) from main SoundTrack._initializeSoundTrackAudioGraph(). - return new GainNodeMock(); - }), - createMediaElementSource: jest - .fn() - .mockName("createMediaElementSource") - .mockImplementation((mediaElement: HTMLMediaElement) => { - // Streaming sounds need to be able to create a media element source node. - return new MediaElementAudioSourceNodeMock(); - }), - createPanner: jest - .fn() - .mockName("createPanner") - .mockImplementation(() => { - return new PannerNodeMock(); - }), - decodeAudioData: jest - .fn() - .mockName("decodeAudioData") - .mockImplementation((data: ArrayBuffer, success: (buffer: AudioBuffer) => void) => { - success(AudioTestSamples.GetAudioBuffer(data)); - }), - resume: jest - .fn() - .mockName("resume") - .mockImplementation(() => { - this.audioContext.state = "running"; - return Promise.resolve(); - }), - suspend: jest - .fn() - .mockName("suspend") - .mockImplementation(() => { - this.audioContext.state = "suspended"; - return Promise.resolve(); - }), - addEventListener: jest.fn().mockName("addEventListener"), - removeEventListener: jest.fn().mockName("removeEventListener"), - }; - }) as any; + window.AudioContext = AudioContext as any; + window.OfflineAudioContext = OfflineAudioContext as any; + + window.AudioBuffer = AudioBuffer as any; - window.OfflineAudioContext = window.AudioContext as any; + window.AnalyserNode = AnalyserNodeMock as any; + window.AudioBufferSourceNode = AudioBufferSourceNodeMock as any; window.GainNode = GainNodeMock as any; + window.MediaElementAudioSourceNode = MediaElementAudioSourceNodeMock as any; + window.PannerNode = PannerNodeMock as any; + window.StereoPannerNode = StereoPannerNodeMock as any; } get audioBufferSource() { - return this._audioBufferSources[this._audioBufferSources.length - 1]; + return this.audioContext.audioBufferSource; } get audioBufferSourceWasCreated() { - return 0 < this._audioBufferSources.length; + return this.audioContext.audioBufferSourceWasCreated; } get audioContext() { - // Return the audio context as `any` so its `state` property can be set. - return Engine.audioEngine!.audioContext! as any; + const audioEngine = Engine.audioEngine; + if (!audioEngine) { + throw new Error("No audio engine available"); + } + + return audioEngine!.audioContext as unknown as AudioContextMock; } dispose() { - this._audioBufferSources.length = 0; + this.audioContext.dispose(); window.AudioContext = this._previousAudioContext; } incrementCurrentTime(seconds: number) { - this.audioContext.currentTime += seconds; - this._audioBufferSources.forEach((audioBufferSource) => { - if (audioBufferSource.startTime + audioBufferSource.buffer.duration <= this.audioContext.currentTime) { - audioBufferSource.stop(); - } - }); + this.audioContext.incrementCurrentTime(seconds); } - nodeIsGainNode(node: AudioNode) { - return node instanceof GainNodeMock; - } + connectsToPannerNode(node: AudioNodeMock) { + for (const connection of node.connections) { + if (connection instanceof PannerNodeMock || this.connectsToPannerNode(connection)) { + return true; + } + } - nodeIsPannerNode(node: AudioNode) { - return node instanceof PannerNodeMock; + return false; } - private _previousAudioContext: any; - private _audioBufferSources = new Array(); + static Instance: MockedAudioObjects; } diff --git a/packages/dev/core/test/unit/Audio/sound.test.ts b/packages/dev/core/test/unit/Audio/sound.test.ts index 92558defd42..d273aeba5d5 100644 --- a/packages/dev/core/test/unit/Audio/sound.test.ts +++ b/packages/dev/core/test/unit/Audio/sound.test.ts @@ -2,6 +2,9 @@ * @jest-environment jsdom */ +import type { ISoundOptions } from "core/Audio"; +import type { Nullable } from "core/types"; + import { AudioEngine, Sound } from "core/Audio"; import { AbstractEngine, NullEngine } from "core/Engines"; import { Scene } from "core/scene"; @@ -9,16 +12,58 @@ import { Scene } from "core/scene"; import { AudioTestHelper } from "./helpers/audioTestHelper"; import { AudioTestSamples } from "./helpers/audioTestSamples"; import { MockedAudioObjects } from "./helpers/mockedAudioObjects"; +import { SoundState } from "../../../src/AudioV2/soundState"; +import { StaticSound } from "../../../src/AudioV2/abstractAudio/staticSound"; +import { StreamingSound } from "../../../src/AudioV2/abstractAudio/streamingSound"; // Required for timers (eg. setTimeout) to work. jest.useFakeTimers(); +const realSetTimeout = jest.requireActual("timers").setTimeout; +const realClearTimeout = jest.requireActual("timers").clearTimeout; + +async function CreateSoundAsync( + name: string, + urlOrArrayBuffer: any, + scene?: Nullable, + readyToPlayCallback: Nullable<() => void> = null, + options?: ISoundOptions +): Promise { + const callstack = new Error().stack; + + return new Promise((resolve, reject) => { + const timer = realSetTimeout(() => { + throw new Error("Sound creation timed out.\n" + callstack); + }, 1000); + + const sound = new Sound( + name, + urlOrArrayBuffer, + scene, + () => { + realClearTimeout(timer); + readyToPlayCallback?.(); + resolve(sound); + }, + options + ); + }); +} + +async function ZeroTimeoutAsync(): Promise { + return new Promise((resolve) => { + realSetTimeout(() => { + resolve(); + }, 0); + }); +} + describe("Sound with no scene", () => { it("constructor does not set scene if no scene is given", () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer) as any; + const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - expect(sound._scene).toBeUndefined(); + expect((sound as any)._scene).toBeUndefined(); }); }); @@ -31,42 +76,42 @@ describe("Sound", () => { let scene: Scene; beforeEach(() => { - mock = new MockedAudioObjects; - engine = new NullEngine; + mock = new MockedAudioObjects(); + engine = new NullEngine(); scene = new Scene(engine); - audioEngine = AbstractEngine.audioEngine = new AudioEngine(null, new AudioContext, null); + audioEngine = AbstractEngine.audioEngine = new AudioEngine(null, new AudioContext(), null); }); afterEach(() => { + mock.dispose(); + (mock as any) = null; + scene.dispose(); (scene as any) = null; engine.dispose(); (engine as any) = null; (audioEngine as any) = null; - - mock.dispose(); - (mock as any) = null; }); - it("constructor initializes AudioSceneComponent", () => { + it("constructor initializes AudioSceneComponent", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); + await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); expect(scene!._getComponent("Audio")).not.toBeNull(); }); - it("constructor sets given readyToPlayCallback", () => { + it("constructor sets given readyToPlayCallback", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); const readyToPlayCallback = jest.fn(); - new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, scene, readyToPlayCallback); + await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, scene, readyToPlayCallback); - expect(readyToPlayCallback).toBeCalled(); + expect(readyToPlayCallback).toHaveBeenCalled(); }); - it("constructor sets up a linear custom attenuation function by default", () => { + it("constructor sets up a linear custom attenuation function by default", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer) as any; + const sound = (await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer)) as any; expect(sound._customAttenuationFunction(1, 0, 100, 0, 0)).toBeCloseTo(1); expect(sound._customAttenuationFunction(1, 10, 100, 0, 0)).toBeCloseTo(0.9); @@ -81,14 +126,14 @@ describe("Sound", () => { expect(sound._customAttenuationFunction(1, 100, 100, 0, 0)).toBeCloseTo(0); }); - it("constructor sets state correctly when given no options", () => { + it("constructor sets state correctly when given no options", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer) as any; + const sound = (await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer)) as any; expect(sound.autoplay).toBe(false); expect(sound.currentTime).toBe(0); - expect(sound.directionalConeInnerAngle).toBe(360); - expect(sound.directionalConeOuterAngle).toBe(360); + expect(sound.directionalConeInnerAngle).toBeCloseTo(360); + expect(sound.directionalConeOuterAngle).toBeCloseTo(360); expect(sound.distanceModel).toBe("linear"); expect(sound.isPaused).toBe(false); expect(sound.isPlaying).toBe(false); @@ -103,228 +148,88 @@ describe("Sound", () => { expect(sound.useCustomAttenuation).toBe(false); expect(sound.getAudioBuffer()).toBe(audioSample.audioBuffer); expect(sound.getPlaybackRate()).toBe(1); - expect(sound.getSoundGain()).toBe(mock.audioContext.createGain.mock.results[0].value); expect(sound.getVolume()).toBe(1); expect(sound._scene).toBe(scene); }); - it("constructor sets boolean options correctly when given false", () => { + it("constructor sets boolean options correctly when given false", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { + const sound = (await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer, null, null, { autoplay: false, loop: false, spatialSound: false, streaming: false, - useCustomAttenuation: false - }) as any; + useCustomAttenuation: false, + })) as any; expect(sound.autoplay).toBe(false); expect(sound.loop).toBe(false); expect(sound.spatialSound).toBe(false); - expect(sound._streaming).toBe(false); + expect(sound._soundV2 instanceof StaticSound).toBe(true); expect(sound.useCustomAttenuation).toBe(false); }); - it("constructor sets boolean options correctly when given true", () => { - const sound = new Sound(expect.getState().currentTestName, "https://example.com/any.mp3", null, null, { + it("constructor sets boolean options correctly when given true", async () => { + const sound = (await CreateSoundAsync(expect.getState().currentTestName, "https://example.com/any.mp3", null, null, { autoplay: true, loop: true, skipCodecCheck: true, spatialSound: true, streaming: true, - useCustomAttenuation: true - }) as any; + useCustomAttenuation: true, + })) as any; expect(sound.autoplay).toBe(true); expect(sound.loop).toBe(true); expect(sound.spatialSound).toBe(true); - expect(sound._streaming).toBe(true); + expect(sound._soundV2 instanceof StreamingSound).toBe(true); expect(sound.useCustomAttenuation).toBe(true); }); - it("constructor sets number options correctly", () => { + it("constructor sets number options correctly", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { + const sound = (await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { length: 1, maxDistance: 2, offset: 3, playbackRate: 4, refDistance: 5, rolloffFactor: 6, - volume: 7 - }) as any; + volume: 7, + })) as any; - expect(sound._length).toBe(1); + expect(sound._soundV2.duration).toBe(1); expect(sound.maxDistance).toBe(2); - expect(sound._offset).toBe(3); - expect(sound._playbackRate).toBe(4); - expect(sound.refDistance).toBe(5); - expect(sound.rolloffFactor).toBe(6); + expect(sound._optionsV2.startOffset).toBe(3); + expect(sound.getPlaybackRate()).toBe(4); + expect(sound._optionsV2.spatialMinDistance).toBe(5); + expect(sound._optionsV2.spatialRolloffFactor).toBe(6); expect(sound.getVolume()).toBe(7); }); - it("constructor sets string options correctly", () => { + it("constructor sets string options correctly", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound1 = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "linear" }); - const sound2 = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "inverse" }); - const sound3 = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "exponential" }); + const sound1 = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "linear" }); + const sound2 = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "inverse" }); + const sound3 = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, { distanceModel: "exponential" }); expect(sound1.distanceModel).toBe("linear"); expect(sound2.distanceModel).toBe("inverse"); expect(sound3.distanceModel).toBe("exponential"); }); - it("constructor does codec check when no options are given", () => { - expect(audioEngine.isMP3supported).toBe(false); - scene!._loadFile = jest.fn().mockName("scene._loadFile"); - - new Sound(expect.getState().currentTestName, "test.mp3"); - - expect(scene!._loadFile).toHaveBeenCalledTimes(0); - }); - - it("constructor does codec check when skipCodecCheck option is false", () => { - expect(audioEngine.isMP3supported).toBe(false); - scene!._loadFile = jest.fn().mockName("scene._loadFile"); - - new Sound(expect.getState().currentTestName, "test.mp3", null, null, { skipCodecCheck: false }); - - expect(scene!._loadFile).toHaveBeenCalledTimes(0); - }); - - it("constructor skips codec check when skipCodecCheck option is true", () => { - expect(audioEngine.isMP3supported).toBe(false); - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.mp3", null, null, { skipCodecCheck: true }); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.mp3"); - }); - - it("constructor loads given .mp3 when supported", () => { - (AbstractEngine.audioEngine as any).isMP3supported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.mp3"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.mp3"); - }); - - it("constructor loads given .ogg when supported", () => { - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.ogg"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.ogg"); - }); - - it("constructor loads given .wav", () => { - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.wav"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.wav"); - }); - - it("constructor loads given .m4a", () => { - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.m4a"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.m4a"); - }); - - it("constructor loads given .mp4", () => { - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "test.mp4"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.mp4"); - }); - - it("constructor loads given blob", () => { - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, "blob:test"); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("blob:test"); - }); - - it("constructor skips given .ogg when not supported", () => { - (AbstractEngine.audioEngine as any).isMP3supported = true; - (AbstractEngine.audioEngine as any).isOGGsupported = false; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, [ "test.ogg", "test.mp3" ]); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.mp3"); - }); - - it("constructor skips given .mp3 when not supported", () => { - (AbstractEngine.audioEngine as any).isMP3supported = false; - (AbstractEngine.audioEngine as any).isOGGsupported = true; - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, [ "test.mp3", "test.ogg" ]); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.ogg"); - }); - - it("constructor loads first supported file", () => { - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, [ "test.jpg", "test.png", "test.wav" ]); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.wav"); - }); - - it("constructor loads only first supported file when given multiple supported files", () => { - const sceneLoadFileMock = jest.fn().mockName("scene._loadFile"); - scene!._loadFile = sceneLoadFileMock; - - new Sound(expect.getState().currentTestName, [ "test.mp4", "test.m4a" ]); - - expect(sceneLoadFileMock).toHaveBeenCalledTimes(1); - expect(sceneLoadFileMock.mock.calls[0][0]).toBe("test.mp4"); - }); - - it("sets isPlaying to true when play is called", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz")); + it("sets isPlaying to true when play is called", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz")); sound.play(); expect(sound.isPlaying).toBe(true); }); - it("updates currentTime when play is called and audio context time advances", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz")); - mock.audioContext.currentTime = 0.1 + it("updates currentTime when play is called and audio context time advances", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz").audioBuffer); + mock.audioContext.currentTime = 0.1; sound.play(); mock.incrementCurrentTime(0.2); @@ -332,25 +237,25 @@ describe("Sound", () => { expect(sound.currentTime).toBeCloseTo(0.2); }); - it("starts the buffer source at the constructor's given offset when play is called", () => { + it("starts the buffer source at the constructor's given offset when play is called", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); const options = { - offset: 0.1 + offset: 0.1, }; - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, options); + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, options); sound.play(); - expect(mock.audioBufferSource.start).toBeCalledWith(0, 0.1, undefined); + expect(mock.audioBufferSource.start).toHaveBeenCalledWith(0, 0.1, undefined); }); - it("resumes the buffer source node at the time it was paused at after playing from the constructor's given offset", () => { + it("resumes the buffer source node at the time it was paused at after playing from the constructor's given offset", async () => { const pausedAtTime = 0.2; const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); const options = { - offset: 0.1 + offset: 0.1, }; - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, options); + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer, null, null, options); mock.audioContext.currentTime = 0.1; sound.play(); @@ -363,9 +268,9 @@ describe("Sound", () => { expect(args[1]).toBeCloseTo(options.offset + pausedAtTime); }); - it("restarts the buffer source at the given positive offset when play, stop, play, pause, and play are called", () => { + it("restarts the buffer source at the given positive offset when play, stop, play, pause, and play are called", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); mock.audioContext.currentTime = 0.1; sound.play(); @@ -378,12 +283,12 @@ describe("Sound", () => { mock.incrementCurrentTime(0.1); sound.play(0, 0.9); - expect(mock.audioBufferSource.start).toBeCalledWith(mock.audioContext.currentTime, 0.9, undefined); + expect(mock.audioBufferSource.start).toHaveBeenCalledWith(mock.audioContext.currentTime, 0.9, undefined); }); - it("restarts the buffer source at the given zero offset when play, stop, play, pause, and play are called", () => { + it("restarts the buffer source at the given zero offset when play, stop, play, pause, and play are called", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer); mock.audioContext.currentTime = 0.1; sound.play(); @@ -396,16 +301,16 @@ describe("Sound", () => { mock.incrementCurrentTime(0.1); sound.play(0, 0); - expect(mock.audioBufferSource.start).toBeCalledWith(mock.audioContext.currentTime, 0, undefined); + expect(mock.audioBufferSource.start).toHaveBeenCalledWith(mock.audioContext.currentTime, 0, undefined); }); - it("restarts the buffer source at the given offset when play, pause, updateOptions, and play are called", () => { + it("restarts the buffer source at the given offset when play, pause, updateOptions, and play are called", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); const options = { - offset: 0.1 + offset: 0.1, }; - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer, null, null, options); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer, null, null, options); + mock.audioContext.currentTime = 0.1; sound.play(); mock.incrementCurrentTime(0.2); @@ -413,13 +318,13 @@ describe("Sound", () => { sound.updateOptions({ offset: 0.4 }); sound.play(); - expect(mock.audioBufferSource.start).toBeCalledWith(mock.audioContext.currentTime, 0.4, undefined); + expect(mock.audioBufferSource.start).toHaveBeenCalledWith(mock.audioContext.currentTime, 0.4, undefined); }); - it("resets current time to zero when stopped while playing", () => { + it("resets current time to zero when stopped while playing", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); + mock.audioContext.currentTime = 0.1; sound.play(); mock.incrementCurrentTime(0.2); @@ -428,10 +333,10 @@ describe("Sound", () => { expect(sound.currentTime).toBe(0); }); - it("resets current time to zero when stopped while paused", () => { + it("resets current time to zero when stopped while paused", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); + mock.audioContext.currentTime = 0.1; sound.play(); mock.incrementCurrentTime(0.2); @@ -441,10 +346,10 @@ describe("Sound", () => { expect(sound.currentTime).toBe(0); }); - it("sets current time to time it was paused at", () => { + it("sets current time to time it was paused at", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer); + mock.audioContext.currentTime = 0.1; sound.play(); mock.incrementCurrentTime(0.2); @@ -453,10 +358,10 @@ describe("Sound", () => { expect(sound.currentTime).toBeCloseTo(0.2); }); - it("calls onended when stopped", () => { + it("calls onended when stopped", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); + mock.audioContext.currentTime = 0.1; const onended = jest.fn().mockName("onended"); sound.onended = onended; @@ -467,10 +372,10 @@ describe("Sound", () => { expect(onended.mock.calls.length).toBe(1); }); - it("calls onended when sound buffer reaches end", () => { + it("calls onended when sound buffer reaches end", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.arrayBuffer); + mock.audioContext.currentTime = 0.1; const onended = jest.fn().mockName("onended"); sound.onended = onended; @@ -480,10 +385,10 @@ describe("Sound", () => { expect(onended.mock.calls.length).toBe(1); }); - it("does not call onended when paused", () => { + it("does not call onended when paused", async () => { const audioSample = AudioTestSamples.Get("silence, 1 second, 1 channel, 48000 kHz"); - const sound = new Sound(expect.getState().currentTestName, audioSample.arrayBuffer); - mock.audioContext.currentTime = 0.1 + const sound = await CreateSoundAsync(expect.getState().currentTestName, audioSample.audioBuffer); + mock.audioContext.currentTime = 0.1; const onended = jest.fn().mockName("onended"); sound.onended = onended; @@ -496,49 +401,59 @@ describe("Sound", () => { // For historical reasons, a sound's `isPlaying` property is set to `true` when it is constructed with the autoplay // option set, even if the audio context state is suspended. - it("sets isPlaying to true when constructed with autoplay option set while audio context is suspended", () => { + it("sets isPlaying to true when constructed with autoplay option set while audio context is suspended", async () => { mock.audioContext.state = "suspended"; - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { autoplay: true }); + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + autoplay: true, + }); expect(sound.isPlaying).toBe(true); }); - it("sets isPlaying to false when stopped while audio context is suspended", () => { + it("sets isPlaying to false when stopped while audio context is suspended", async () => { mock.audioContext.state = "suspended"; - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { autoplay: true }); + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + autoplay: true, + }); sound.stop(); expect(sound.isPlaying).toBe(false); }); - it("does not autoplay after 500 ms when stopped before audio context is resumed", () => { + it("does not autoplay after 500 ms when stopped before audio context is resumed", async () => { mock.audioContext.state = "suspended"; - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { autoplay: true }); + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + autoplay: true, + }); sound.stop(); mock.audioContext.state = "running"; jest.advanceTimersByTime(500); - expect(AudioTestHelper.SoundWasStarted()).toBe(false); + expect((sound as any)._soundV2.state).toBe(SoundState.Stopped); }); - it("does not autoplay when stopped before audio engine is unlocked", () => { + it("does not autoplay when stopped before audio engine is unlocked", async () => { mock.audioContext.state = "suspended"; - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { autoplay: true }); + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + autoplay: true, + }); AudioTestHelper.WaitForAudioContextSuspendedDoubleCheck(); sound.stop(); AbstractEngine.audioEngine!.unlock(); return AudioTestHelper.WhenAudioContextResumes(() => { - expect(AudioTestHelper.SoundWasStarted()).toBe(false); + expect((sound as any)._soundV2.state).toBe(SoundState.Stopped); }); }); - it("does not autoplay when played and stopped before audio engine is unlocked", () => { + it("does not autoplay when played and stopped before audio engine is unlocked", async () => { mock.audioContext.state = "suspended"; - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { autoplay: true }); + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + autoplay: true, + }); sound.play(); AudioTestHelper.WaitForAudioContextSuspendedDoubleCheck(); @@ -546,69 +461,95 @@ describe("Sound", () => { AbstractEngine.audioEngine!.unlock(); return AudioTestHelper.WhenAudioContextResumes(() => { - expect(AudioTestHelper.SoundWasStarted()).toBe(false); + expect((sound as any)._soundV2.state).toBe(SoundState.Stopped); }); }); - it("connects to gain node when not spatialized via constructor", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: false }); + it("connects to gain node when not spatialized via constructor", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: false, + }); sound.play(); - expect(mock.nodeIsGainNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(false); }); - it("connects to panner node when spatialized via constructor", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: true }); + it("connects to panner node when spatialized via constructor", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: true, + }); sound.play(); - expect(mock.nodeIsPannerNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(true); }); - it("connects to panner node when spatialized via property", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: false }); + it("connects to panner node when spatialized via property", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: false, + }); + sound.spatialSound = true; + await ZeroTimeoutAsync(); sound.play(); - expect(mock.nodeIsPannerNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(true); }); - it("connects to panner node when spatialized via updateOptions", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: false }); + it("connects to panner node when spatialized via updateOptions", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: false, + }); + sound.updateOptions({ spatialSound: true }); + await ZeroTimeoutAsync(); sound.play(); - expect(mock.nodeIsPannerNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(true); }); - it("connects to gain node when unspatialized via property", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: true }); + it("connects to gain node when unspatialized via property", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: true, + }); + sound.spatialSound = false; + await ZeroTimeoutAsync(); sound.play(); - expect(mock.nodeIsGainNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(false); }); - it("connects to gain node when unspatialized via updateOptions", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: true }); + it("connects to gain node when unspatialized via updateOptions", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: true, + }); + sound.updateOptions({ spatialSound: false }); + await ZeroTimeoutAsync(); sound.play(); - expect(mock.nodeIsGainNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(false); }); - it("connects to panner node when playing and spatialSound property is set to false before being set to true", () => { - const sound = new Sound(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { spatialSound: true }); + it("connects to panner node when playing and spatialSound property is set to false before being set to true", async () => { + const sound = await CreateSoundAsync(expect.getState().currentTestName, AudioTestSamples.GetArrayBuffer("silence, 1 second, 1 channel, 48000 kHz"), null, null, { + spatialSound: true, + }); sound.play(); + sound.spatialSound = false; + await ZeroTimeoutAsync(); + sound.spatialSound = true; + await ZeroTimeoutAsync(); - expect(mock.nodeIsPannerNode(mock.audioBufferSource.destination)).toBe(true); + expect(mock.connectsToPannerNode(mock.audioBufferSource)).toBe(true); }); }); From eb0bcc400d2e627ebe78e9ef364b5de75aaa6aaf Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Mon, 17 Nov 2025 15:25:39 -0500 Subject: [PATCH 02/14] Fix case where `Sound` is constructed but audio engine is not available --- packages/dev/core/src/Audio/sound.ts | 45 ++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 0c0c79a121b..70de0bb57a6 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -77,7 +77,10 @@ export class Sound { if (this._soundV2 instanceof _WebAudioSoundSource) { return; } - this._soundV2._getOptions().autoplay = value; + + if (this._soundV2) { + this._soundV2._getOptions().autoplay = value; + } } /** @@ -91,7 +94,10 @@ export class Sound { if (this._soundV2 instanceof _WebAudioSoundSource) { return; } - this._soundV2.loop = value; + + if (this._soundV2) { + this._soundV2.loop = value; + } } /** @@ -108,7 +114,7 @@ export class Sound { * Is this sound currently played. */ public get isPlaying(): boolean { - return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2.state === SoundState.Started || this._optionsV2.autoplay!; + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2?.state === SoundState.Started || this._optionsV2.autoplay!; } /** @@ -137,7 +143,10 @@ export class Sound { } public set maxDistance(value: number) { this._optionsV2.spatialMaxDistance = value; - this._soundV2.spatial.maxDistance = value; + + if (this._soundV2) { + this._soundV2.spatial.maxDistance = value; + } } /** * Define the distance attenuation model the sound will follow. @@ -148,7 +157,10 @@ export class Sound { } public set distanceModel(value: "linear" | "inverse" | "exponential") { this._optionsV2.spatialDistanceModel = value; - this._soundV2.spatial.distanceModel = value; + + if (this._soundV2) { + this._soundV2.spatial.distanceModel = value; + } } /** * @internal @@ -177,7 +189,7 @@ export class Sound { * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public get spatialSound(): boolean { - return this._soundV2._isSpatial; + return this._soundV2?._isSpatial ?? false; } /** @@ -185,7 +197,9 @@ export class Sound { * @see https://doc.babylonjs.com/legacy/audio#creating-a-spatial-3d-sound */ public set spatialSound(newValue: boolean) { - this._soundV2._isSpatial = newValue; + if (this._soundV2) { + this._soundV2._isSpatial = newValue; + } } private _localDirection: Vector3 = new Vector3(1, 0, 0); @@ -279,10 +293,17 @@ export class Sound { optionsV2.spatialRotationQuaternion = _SpatialAudioDefaults.rotationQuaternion; } + this._optionsV2 = optionsV2; + this.useCustomAttenuation = options.useCustomAttenuation ?? false; let streaming = options?.streaming || false; + const audioEngine = AbstractEngine.audioEngine; + if (!audioEngine) { + return; + } + const audioEngineV2 = (AbstractEngine.audioEngine as AudioEngine)._v2; const createSoundV2 = () => { @@ -336,8 +357,6 @@ export class Sound { this._soundV2 = createSoundV2(); } - this._optionsV2 = optionsV2; - if (!this._soundV2) { Logger.Error("Parameter must be a URL to the sound, an Array of URLs (.mp3 & .ogg) or an ArrayBuffer of the sound."); return; @@ -692,6 +711,10 @@ export class Sound { * @param time (optional) Stop the sound after X seconds. Stop immediately (0) by default. */ public stop(time?: number): void { + if (!this._soundV2) { + return; + } + // WebAudio sound sources have no `stop` function because they are always playing. if (this._soundV2 instanceof _WebAudioSoundSource) { return; @@ -708,6 +731,10 @@ export class Sound { * Put the sound in pause */ public pause(): void { + if (!this._soundV2) { + return; + } + // WebAudio sound sources have no `pause` function because they are always playing. if (this._soundV2 instanceof _WebAudioSoundSource) { return; From 870db991673cc73df1eefeee7705273bb00a5eb4 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Mon, 17 Nov 2025 17:13:33 -0500 Subject: [PATCH 03/14] Fix sound not setting playback `loop` option correctly --- packages/dev/core/src/Audio/sound.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 70de0bb57a6..15e57a88325 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -27,13 +27,9 @@ const TmpRampOptions: IAudioParameterRampOptions = { shape: AudioParameterRampShape.Linear, }; -const TmpPlayOptions: IStaticSoundPlayOptions = { +const TmpPlayOptions: Partial = { duration: 0, - loop: false, - loopEnd: 0, - loopStart: 0, startOffset: 0, - volume: 1, waitTime: 0, }; From 8fb0eb79764ad1ff52d0ac2481addd79354a4884 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Mon, 17 Nov 2025 17:16:23 -0500 Subject: [PATCH 04/14] Show unmute button when sound is set to autoplay and engine is locked --- packages/dev/core/src/Audio/sound.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 15e57a88325..e106fca6f5b 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -253,7 +253,7 @@ export class Sound { const optionsV2: Partial = { analyzerEnabled: false, - autoplay: options.autoplay || false, + autoplay: false, // `false` for now, but will be set to given option later duration: options.length || 0, loop: options.loop || false, loopEnd: 0, @@ -323,7 +323,16 @@ export class Sound { const sound = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); // eslint-disable-next-line github/no-then - void sound._initAsync(urlOrArrayBuffer, optionsV2).then(this._onReadyToPlay); + void sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { + this._onReadyToPlay(); + + // We need to explicitly call this `Sound` class's `play` function when `autoplay` is `true` so the + // audio engine unlock mechanism is properly triggered. + if (options.autoplay) { + this._optionsV2.autoplay = true; + this.play(); + } + }); return sound; }; From f2464f66ad39e54ec2b89baae941c524661fde64 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 09:09:41 -0500 Subject: [PATCH 05/14] Fix `autoplay` property not being set correctly after `Sound` is constructed --- packages/dev/core/src/Audio/sound.ts | 44 ++++++++++------------------ 1 file changed, 15 insertions(+), 29 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index e106fca6f5b..0fd3d3913c0 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -66,17 +66,11 @@ export class Sound { * Does the sound autoplay once loaded. */ public get autoplay(): boolean { - return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2.autoplay; + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._optionsV2.autoplay!; } public set autoplay(value: boolean) { - if (this._soundV2 instanceof _WebAudioSoundSource) { - return; - } - - if (this._soundV2) { - this._soundV2._getOptions().autoplay = value; - } + this._optionsV2.autoplay = value; } /** @@ -289,7 +283,8 @@ export class Sound { optionsV2.spatialRotationQuaternion = _SpatialAudioDefaults.rotationQuaternion; } - this._optionsV2 = optionsV2; + this._optionsV2 = { ...optionsV2 }; + this._optionsV2.autoplay = options.autoplay || false; this.useCustomAttenuation = options.useCustomAttenuation ?? false; @@ -311,30 +306,17 @@ export class Sound { const sound = new _WebAudioStreamingSound(name, audioEngineV2, streamingOptionsV2); - // eslint-disable-next-line github/no-then - void sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { - // eslint-disable-next-line github/no-then - void sound.preloadInstancesAsync(1).then(this._onReadyToPlay); + sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { + sound.preloadInstancesAsync(1).then(this._onReadyToPlay); }); return sound; - } - - const sound = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); - - // eslint-disable-next-line github/no-then - void sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { - this._onReadyToPlay(); - - // We need to explicitly call this `Sound` class's `play` function when `autoplay` is `true` so the - // audio engine unlock mechanism is properly triggered. - if (options.autoplay) { - this._optionsV2.autoplay = true; - this.play(); - } - }); + } else { + const sound = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); + sound._initAsync(urlOrArrayBuffer, optionsV2).then(this._onReadyToPlay); - return sound; + return sound; + } }; // If no parameter is passed then the setAudioBuffer should be called to prepare the sound. @@ -376,6 +358,10 @@ export class Sound { this._scene.mainSoundTrack.addSound(this); this._isReadyToPlay = true; this._readyToPlayCallback(); + + if (this._optionsV2.autoplay) { + this.play(); + } }; /** From ca391fe1a76d538df971bbcc4a8857c7f91f68d1 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 09:33:18 -0500 Subject: [PATCH 06/14] Fix audio engine unlock when `useCustomUnlockedButton` is `true` See playground #KBA3JY#4. --- packages/dev/core/src/Audio/audioEngine.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/dev/core/src/Audio/audioEngine.ts b/packages/dev/core/src/Audio/audioEngine.ts index 4e3b182487f..dca8f279851 100644 --- a/packages/dev/core/src/Audio/audioEngine.ts +++ b/packages/dev/core/src/Audio/audioEngine.ts @@ -263,6 +263,7 @@ export class AudioEngine implements IAudioEngine { private async _triggerRunningStateAsync() { if (this._tryToRun) { + void this._v2._audioContext.resume(); return; } this._tryToRun = true; From f26c933f91951f4f70998c43592942289ad4edf3 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 09:35:54 -0500 Subject: [PATCH 07/14] Fix lint errors re: promises --- packages/dev/core/src/Audio/sound.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 0fd3d3913c0..b3073bcd551 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -306,14 +306,18 @@ export class Sound { const sound = new _WebAudioStreamingSound(name, audioEngineV2, streamingOptionsV2); - sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { - sound.preloadInstancesAsync(1).then(this._onReadyToPlay); + // eslint-disable-next-line github/no-then + void sound._initAsync(urlOrArrayBuffer, optionsV2).then(() => { + // eslint-disable-next-line github/no-then + void sound.preloadInstancesAsync(1).then(this._onReadyToPlay); }); return sound; } else { const sound = new _WebAudioStaticSound(name, audioEngineV2, optionsV2); - sound._initAsync(urlOrArrayBuffer, optionsV2).then(this._onReadyToPlay); + + // eslint-disable-next-line github/no-then + void sound._initAsync(urlOrArrayBuffer, optionsV2).then(this._onReadyToPlay); return sound; } From 811662b5a332975e3ebe07a2311c47945ecb9c57 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 10:56:50 -0500 Subject: [PATCH 08/14] Wait for Firefox to unlock audio context before playing after legit button press --- packages/dev/core/src/Audio/sound.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index b3073bcd551..68aea053f87 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -670,7 +670,8 @@ export class Sound { * @param length (optional) Sound duration (in seconds) */ public play(time?: number, offset?: number, length?: number): void { - AbstractEngine.audioEngine?.unlock(); + const audioEngine = AbstractEngine.audioEngine; + audioEngine?.unlock(); // WebAudio sound sources have no `play` function because they are always playing. if (this._soundV2 instanceof _WebAudioSoundSource) { @@ -687,7 +688,15 @@ export class Sound { TmpPlayOptions.duration = length || 0; TmpPlayOptions.startOffset = offset !== undefined ? offset || this._optionsV2.startOffset! : this._optionsV2.startOffset!; TmpPlayOptions.waitTime = time || 0; - this._soundV2.play(TmpPlayOptions); + + if (audioEngine?.unlocked) { + this._soundV2.play(TmpPlayOptions); + } else { + // Wait a bit for FF as context seems late to be ready. + setTimeout(() => { + (this._soundV2 as _WebAudioStaticSound | _WebAudioStreamingSound).play(TmpPlayOptions); + }, 500); + } } catch (ex) { Logger.Error("Error while trying to play audio: " + this.name + ", " + ex.message); } From 8132365f6d06ca4058bea7075f4cea7792b1cc8f Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 11:13:32 -0500 Subject: [PATCH 09/14] Change `cancelScheduledValues` arg to `0` to ensure they're canceled on Firefox If the arg `startTime` is even slightly in the future, Firefox throws an error. This change avoids that error and achieves the same desired result of canceling any scheduled values. --- .../AudioV2/webAudio/components/webAudioParameterComponent.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/dev/core/src/AudioV2/webAudio/components/webAudioParameterComponent.ts b/packages/dev/core/src/AudioV2/webAudio/components/webAudioParameterComponent.ts index 3549afe71d4..160e2fe17c6 100644 --- a/packages/dev/core/src/AudioV2/webAudio/components/webAudioParameterComponent.ts +++ b/packages/dev/core/src/AudioV2/webAudio/components/webAudioParameterComponent.ts @@ -62,7 +62,7 @@ export class _WebAudioParameterComponent { const startTime = this._engine.currentTime; if (shape === AudioParameterRampShape.None) { - this._param.cancelScheduledValues(startTime); + this._param.cancelScheduledValues(0); this._param.value = this._targetValue = value; this._rampEndTime = startTime; return; @@ -75,7 +75,7 @@ export class _WebAudioParameterComponent { return; } - this._param.cancelScheduledValues(startTime); + this._param.cancelScheduledValues(0); this._param.setValueCurveAtTime(_GetAudioParamCurveValues(shape, this._param.value, (this._targetValue = value)), startTime, duration); this._rampEndTime = startTime + duration; From 1635d77f947e697d445c6733e9fab0b810be6cb0 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 11:24:45 -0500 Subject: [PATCH 10/14] Fix autoplay issue --- packages/dev/core/src/Audio/sound.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 68aea053f87..7db9458eb9a 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -104,7 +104,7 @@ export class Sound { * Is this sound currently played. */ public get isPlaying(): boolean { - return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2?.state === SoundState.Started || this._optionsV2.autoplay!; + return this._soundV2 instanceof _WebAudioSoundSource ? true : this._soundV2?.state === SoundState.Started || (!this.isReady() && this._optionsV2.autoplay!); } /** @@ -726,9 +726,6 @@ export class Sound { TmpStopOptions.waitTime = time || 0; this._soundV2.stop(TmpStopOptions); - - // Set autoplay to `false` so `isPlaying` correctly returns `false`. - this._optionsV2.autoplay = false; } /** From 8b386d1b97d6132ec4b1e9eccd35d49732674cad Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 14:56:08 -0500 Subject: [PATCH 11/14] Return valid `GainNode` from `Sound.getSoundGain()` --- packages/dev/core/src/Audio/sound.ts | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 7db9458eb9a..d4c38783738 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -197,7 +197,6 @@ export class Sound { private _isReadyToPlay: boolean = false; private _isDirectional: boolean = false; private _readyToPlayCallback: () => any; - private _soundGain: Nullable; private _scene: Scene; private _connectedTransformNode: Nullable; private _customAttenuationFunction: (currentVolume: number, currentDistance: number, maxDistance: number, refDistance: number, rolloffFactor: number) => number; @@ -381,10 +380,6 @@ export class Sound { } else if (this._scene.soundTracks) { this._scene.soundTracks[this.soundTrackId].removeSound(this); } - if (this._soundGain) { - this._soundGain.disconnect(); - this._soundGain = null; - } if (this._connectedTransformNode && this._registerFunc) { this._connectedTransformNode.unregisterAfterWorldMatrixUpdate(this._registerFunc); @@ -646,7 +641,7 @@ export class Sound { /** @internal */ public updateDistanceFromListener() { - if (this._soundV2._outNode && this._connectedTransformNode && this.useCustomAttenuation && this._soundGain && this._scene.activeCamera) { + if (this._soundV2._outNode && this._connectedTransformNode && this.useCustomAttenuation && this._scene.activeCamera) { const distance = this._scene.audioListenerPositionProvider ? this._connectedTransformNode.position.subtract(this._scene.audioListenerPositionProvider()).length() : this._connectedTransformNode.getDistanceToCamera(this._scene.activeCamera); @@ -886,7 +881,7 @@ export class Sound { * @returns the gain node */ public getSoundGain(): Nullable { - return this._soundGain; + return this._soundV2._outNode as GainNode; } /** From d208ba922f13a1ac4bfd15556524ab1ae2cc7fe3 Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 16:13:01 -0500 Subject: [PATCH 12/14] Remove `stereoPan` option so stereo panner node doesn't override spatial node --- packages/dev/core/src/Audio/sound.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index d4c38783738..0cd34099a5a 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -263,7 +263,6 @@ export class Sound { spatialMinDistance: options.refDistance, spatialRolloffFactor: options.rolloffFactor, stereoEnabled: false, - stereoPan: 0, startOffset: options.offset || 0, volume: options.volume ?? 1, }; From 0c69771b48893f2827219dec1bd7f875a7a9a66c Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 13:28:21 -0500 Subject: [PATCH 13/14] Fix spatial `maxDistance` default for old `Sound` class New audio engine `maxDistance` defaults to `10000`, but old `Sound` class should default to `100`. This change fixes it so it defaults to `100` correctly in all cases. --- packages/dev/core/src/Audio/sound.ts | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 0cd34099a5a..35cccc26186 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -135,6 +135,7 @@ export class Sound { this._optionsV2.spatialMaxDistance = value; if (this._soundV2) { + this._initSpatial(); this._soundV2.spatial.maxDistance = value; } } @@ -149,6 +150,7 @@ export class Sound { this._optionsV2.spatialDistanceModel = value; if (this._soundV2) { + this._initSpatial(); this._soundV2.spatial.distanceModel = value; } } @@ -188,7 +190,11 @@ export class Sound { */ public set spatialSound(newValue: boolean) { if (this._soundV2) { - this._soundV2._isSpatial = newValue; + if (newValue) { + this._initSpatial(); + } else { + this._soundV2._isSpatial = false; + } } } @@ -273,6 +279,7 @@ export class Sound { optionsV2.spatialConeInnerAngle = _SpatialAudioDefaults.coneInnerAngle; optionsV2.spatialConeOuterAngle = _SpatialAudioDefaults.coneOuterAngle; optionsV2.spatialConeOuterVolume = _SpatialAudioDefaults.coneOuterVolume; + optionsV2.spatialMaxDistance = options.maxDistance || 100; optionsV2.spatialMinUpdateTime = 0; optionsV2.spatialOrientation = _SpatialAudioDefaults.orientation; optionsV2.spatialPanningModel = (this._scene.headphone ? "HRTF" : "equalpower") as "equalpower" | "HRTF"; @@ -489,6 +496,7 @@ export class Sound { */ public switchPanningModelToHRTF() { if (this.spatialSound) { + this._initSpatial(); this._soundV2.spatial.panningModel = "HRTF"; } } @@ -500,6 +508,7 @@ export class Sound { */ public switchPanningModelToEqualPower() { if (this.spatialSound) { + this._initSpatial(); this._soundV2.spatial.panningModel = "equalpower"; } } @@ -535,6 +544,7 @@ export class Sound { this._optionsV2.spatialConeOuterAngle = D2r(coneOuterAngle); this._optionsV2.spatialConeOuterVolume = coneOuterGain; + this._initSpatial(); this._soundV2.spatial.coneInnerAngle = this._optionsV2.spatialConeInnerAngle; this._soundV2.spatial.coneOuterAngle = this._optionsV2.spatialConeOuterAngle; this._soundV2.spatial.coneOuterVolume = coneOuterGain; @@ -567,6 +577,7 @@ export class Sound { } this._optionsV2.spatialConeInnerAngle = value; if (this.spatialSound) { + this._initSpatial(); this._soundV2.spatial.coneInnerAngle = value; } } @@ -592,6 +603,7 @@ export class Sound { } this._optionsV2.spatialConeOuterAngle = value; if (this.spatialSound) { + this._initSpatial(); this._soundV2.spatial.coneOuterAngle = value; } } @@ -610,6 +622,7 @@ export class Sound { } this._optionsV2.spatialPosition.copyFrom(newPosition); if (this.spatialSound && !isNaN(newPosition.x) && !isNaN(newPosition.y) && !isNaN(newPosition.z)) { + this._initSpatial(); this._soundV2.spatial.position = newPosition; } } @@ -635,9 +648,18 @@ export class Sound { const direction = Vector3.TransformNormal(this._localDirection, mat); direction.normalize(); + this._initSpatial(); this._soundV2.spatial.orientation = direction; } + private _initSpatial() { + this._soundV2._isSpatial = true; + if (this._optionsV2.spatialMaxDistance === undefined) { + this._optionsV2.spatialMaxDistance = 100; + this._soundV2.spatial.maxDistance = 100; + } + } + /** @internal */ public updateDistanceFromListener() { if (this._soundV2._outNode && this._connectedTransformNode && this.useCustomAttenuation && this._scene.activeCamera) { From c7ab966ef22bba7ad9be158810f78441be72e54f Mon Sep 17 00:00:00 2001 From: Andy Fillebrown Date: Tue, 18 Nov 2025 16:31:19 -0500 Subject: [PATCH 14/14] Fix spatial `distanceModel` default for old `Sound` class New audio engine `distanceModel` defaults to `inverse`, but old `Sound` class should default to `linear`. This change fixes it so it defaults to `linear` correctly in all cases. --- packages/dev/core/src/Audio/sound.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/dev/core/src/Audio/sound.ts b/packages/dev/core/src/Audio/sound.ts index 35cccc26186..fb504cc5ed1 100644 --- a/packages/dev/core/src/Audio/sound.ts +++ b/packages/dev/core/src/Audio/sound.ts @@ -279,7 +279,6 @@ export class Sound { optionsV2.spatialConeInnerAngle = _SpatialAudioDefaults.coneInnerAngle; optionsV2.spatialConeOuterAngle = _SpatialAudioDefaults.coneOuterAngle; optionsV2.spatialConeOuterVolume = _SpatialAudioDefaults.coneOuterVolume; - optionsV2.spatialMaxDistance = options.maxDistance || 100; optionsV2.spatialMinUpdateTime = 0; optionsV2.spatialOrientation = _SpatialAudioDefaults.orientation; optionsV2.spatialPanningModel = (this._scene.headphone ? "HRTF" : "equalpower") as "equalpower" | "HRTF"; @@ -654,6 +653,12 @@ export class Sound { private _initSpatial() { this._soundV2._isSpatial = true; + + if (this._optionsV2.spatialDistanceModel === undefined) { + this._optionsV2.spatialDistanceModel = "linear"; + this._soundV2.spatial.distanceModel = "linear"; + } + if (this._optionsV2.spatialMaxDistance === undefined) { this._optionsV2.spatialMaxDistance = 100; this._soundV2.spatial.maxDistance = 100;