diff --git a/apps/common-app/src/examples/AudioFile/AudioFile.tsx b/apps/common-app/src/examples/AudioFile/AudioFile.tsx
index 1addb6681..fcac79ab7 100644
--- a/apps/common-app/src/examples/AudioFile/AudioFile.tsx
+++ b/apps/common-app/src/examples/AudioFile/AudioFile.tsx
@@ -10,8 +10,10 @@ import { Button, Container, Spacer } from '../../components';
import { colors } from '../../styles';
import AudioPlayer from './AudioPlayer';
-const URL =
- 'https://software-mansion.github.io/react-native-audio-api/audio/voice/example-voice-01.mp3';
+// const remoteAsset =
+// 'https://software-mansion.github.io/react-native-audio-api/audio/voice/example-voice-01.mp3';
+
+import staticAsset from './voice-sample-landing.mp3';
const AudioFile: FC = () => {
const [isPlaying, setIsPlaying] = useState(false);
@@ -56,7 +58,7 @@ const AudioFile: FC = () => {
const fetchAudioBuffer = useCallback(async () => {
setIsLoading(true);
- await AudioPlayer.loadBuffer(URL);
+ await AudioPlayer.loadBuffer(staticAsset);
setIsLoading(false);
}, []);
@@ -83,7 +85,7 @@ const AudioFile: FC = () => {
const setup = async () => {
await fetchAudioBuffer();
await setupNotification();
- }
+ };
setup();
return () => {
AudioPlayer.reset();
@@ -92,7 +94,6 @@ const AudioFile: FC = () => {
}, [fetchAudioBuffer]);
useEffect(() => {
-
AudioManager.observeAudioInterruptions(true);
// Listen to notification control events
diff --git a/apps/common-app/src/examples/AudioFile/AudioPlayer.ts b/apps/common-app/src/examples/AudioFile/AudioPlayer.ts
index d30b7ffb5..5441e86c3 100644
--- a/apps/common-app/src/examples/AudioFile/AudioPlayer.ts
+++ b/apps/common-app/src/examples/AudioFile/AudioPlayer.ts
@@ -4,6 +4,7 @@ import type {
} from 'react-native-audio-api';
import {
AudioContext,
+ decodeAudioData,
PlaybackNotificationManager,
} from 'react-native-audio-api';
@@ -36,7 +37,7 @@ class AudioPlayer {
this.isPlaying = true;
PlaybackNotificationManager.update({
state: 'playing',
- })
+ });
if (this.audioContext.state === 'suspended') {
await this.audioContext.resume();
@@ -53,11 +54,16 @@ class AudioPlayer {
this.sourceNode.onPositionChanged = (event) => {
this.currentElapsedTime = event.value;
if (this.onPositionChanged) {
- this.onPositionChanged(this.currentElapsedTime / this.audioBuffer!.duration);
+ this.onPositionChanged(
+ this.currentElapsedTime / this.audioBuffer!.duration
+ );
}
};
- this.sourceNode.start(this.audioContext.currentTime, this.currentElapsedTime);
+ this.sourceNode.start(
+ this.audioContext.currentTime,
+ this.currentElapsedTime
+ );
};
pause = async () => {
@@ -71,7 +77,7 @@ class AudioPlayer {
await this.audioContext.suspend();
PlaybackNotificationManager.update({
state: 'paused',
- })
+ });
this.isPlaying = false;
};
@@ -94,19 +100,13 @@ class AudioPlayer {
}
};
- loadBuffer = async (url: string) => {
- const buffer = await fetch(url, {
+ loadBuffer = async (asset: string | number) => {
+ const buffer = await decodeAudioData(asset, 0, {
headers: {
'User-Agent':
'Mozilla/5.0 (Android; Mobile; rv:122.0) Gecko/122.0 Firefox/122.0',
},
- })
- .then((response) => response.arrayBuffer())
- .then((arrayBuffer) => this.audioContext.decodeAudioData(arrayBuffer))
- .catch((error) => {
- console.error('Error decoding audio data source:', error);
- return null;
- });
+ });
if (buffer) {
this.audioBuffer = buffer;
@@ -141,7 +141,7 @@ class AudioPlayer {
getElapsedTime = (): number => {
return this.currentElapsedTime;
- }
+ };
}
export default new AudioPlayer();
diff --git a/apps/common-app/src/examples/AudioFile/voice-sample-landing.mp3 b/apps/common-app/src/examples/AudioFile/voice-sample-landing.mp3
new file mode 100644
index 000000000..11bd9e60d
Binary files /dev/null and b/apps/common-app/src/examples/AudioFile/voice-sample-landing.mp3 differ
diff --git a/apps/common-app/types.d.ts b/apps/common-app/types.d.ts
new file mode 100644
index 000000000..e61eb2f3c
--- /dev/null
+++ b/apps/common-app/types.d.ts
@@ -0,0 +1 @@
+module '*.mp3';
diff --git a/apps/fabric-example/babel.config.js b/apps/fabric-example/babel.config.js
index f9d000c8c..da69d304d 100644
--- a/apps/fabric-example/babel.config.js
+++ b/apps/fabric-example/babel.config.js
@@ -2,6 +2,29 @@ module.exports = function (api) {
api.cache(false);
return {
presets: ['module:@react-native/babel-preset'],
- plugins: ['react-native-worklets/plugin'],
+ plugins: [
+ 'react-native-worklets/plugin',
+ [
+ 'module-resolver',
+ {
+ alias: {
+ 'common-app': '../common-app',
+ },
+ extensions: [
+ '.js',
+ '.jsx',
+ '.ts',
+ '.tsx',
+ '.ios.js',
+ '.android.js',
+ '.json',
+ '.ios.ts',
+ '.android.ts',
+ '.ios.tsx',
+ '.android.tsx',
+ ],
+ },
+ ],
+ ],
};
};
diff --git a/apps/fabric-example/ios/Podfile.lock b/apps/fabric-example/ios/Podfile.lock
index 9a386a86d..f1ea067d6 100644
--- a/apps/fabric-example/ios/Podfile.lock
+++ b/apps/fabric-example/ios/Podfile.lock
@@ -3287,7 +3287,7 @@ SPEC CHECKSUMS:
FBLazyVector: 309703e71d3f2f1ed7dc7889d58309c9d77a95a4
fmt: a40bb5bd0294ea969aaaba240a927bd33d878cdd
glog: 5683914934d5b6e4240e497e0f4a3b42d1854183
- hermes-engine: f93b5009d8ccd9429fe2a772351980df8a22a413
+ hermes-engine: 42d6f09ee6ede2feb220e2fb772e8bebb42ca403
RCT-Folly: 846fda9475e61ec7bcbf8a3fe81edfcaeb090669
RCTDeprecation: a41bbdd9af30bf2e5715796b313e44ec43eefff1
RCTRequired: 7be34aabb0b77c3cefe644528df0fa0afad4e4d0
diff --git a/apps/fabric-example/metro.config.js b/apps/fabric-example/metro.config.js
index 07d019274..17641f419 100644
--- a/apps/fabric-example/metro.config.js
+++ b/apps/fabric-example/metro.config.js
@@ -1,8 +1,9 @@
-const {getDefaultConfig, mergeConfig} = require('@react-native/metro-config');
+const { getDefaultConfig, mergeConfig } = require('@react-native/metro-config');
const path = require('path');
-const root = path.resolve(__dirname, '../..');
+const monorepoRoot = path.resolve(__dirname, '../..');
+const appsRoot = path.resolve(monorepoRoot, 'apps');
/**
* Metro configuration https://reactnative.dev/docs/metro
@@ -10,7 +11,8 @@ const root = path.resolve(__dirname, '../..');
* @type {import('@react-native/metro-config').MetroConfig}
*/
const config = {
- watchFolders: [root],
+ projectRoot: __dirname,
+ watchFolders: [monorepoRoot, appsRoot],
};
module.exports = mergeConfig(getDefaultConfig(__dirname), config);
diff --git a/packages/audiodocs/docs/core/base-audio-context.mdx b/packages/audiodocs/docs/core/base-audio-context.mdx
index c6f31682f..98546d9e0 100644
--- a/packages/audiodocs/docs/core/base-audio-context.mdx
+++ b/packages/audiodocs/docs/core/base-audio-context.mdx
@@ -256,25 +256,13 @@ Creates [`WorkletProcessingNode`](/docs/worklets/worklet-processing-node).
#### Returns `WorkletProcessingNode`.
-:::caution
-Supported file formats:
-- flac
-- mp3
-- ogg
-- opus
-- wav
-- aac
-- m4a
-- mp4
-
-Last three formats are decoded with ffmpeg, [see for more info](/docs/other/ffmpeg-info).
-:::
-
### `decodeAudioData`
Decodes audio data from either a file path or an ArrayBuffer. The optional `sampleRate` parameter lets you resample the decoded audio.
If not provided, the audio will be automatically resampled to match the audio context's `sampleRate`.
+**For the list of supported formats visit [this page](/docs/utils/decoding).**
+
@@ -285,18 +273,22 @@ If not provided, the audio will be automatically resampled to match the audio co
- input |
+ input |
ArrayBuffer |
ArrayBuffer with audio data. |
string |
- Path to audio file located on the device. |
+ Path to remote or local audio file. |
- sampleRate |
number |
- Target sample rate for the decoded audio. |
+ Asset module id. |
+
+
+ fetchOptions |
+ [RequestInit](https://github.com/facebook/react-native/blob/ac06f3bdc76a9fd7c65ab899e82bff5cad9b94b6/packages/react-native/src/types/globals.d.ts#L265) |
+ Additional headers parameters when passing url to fetch. |
diff --git a/packages/audiodocs/docs/system/recording-notification-manager.mdx b/packages/audiodocs/docs/system/recording-notification-manager.mdx
index 3eca35140..2f47391e9 100644
--- a/packages/audiodocs/docs/system/recording-notification-manager.mdx
+++ b/packages/audiodocs/docs/system/recording-notification-manager.mdx
@@ -1,4 +1,5 @@
---
+sidebar_label: RecordingNotificationManager
sidebar_position: 4
---
diff --git a/packages/audiodocs/docs/utils/decoding.mdx b/packages/audiodocs/docs/utils/decoding.mdx
index 7e530d44c..871bfd255 100644
--- a/packages/audiodocs/docs/utils/decoding.mdx
+++ b/packages/audiodocs/docs/utils/decoding.mdx
@@ -9,19 +9,25 @@ import { Optional, MobileOnly } from '@site/src/components/Badges';
You can decode audio data independently, without creating an AudioContext, using the exported functions [`decodeAudioData`](/docs/utils/decoding#decodeaudiodata) and
[`decodePCMInBase64`](/docs/utils/decoding#decodepcminbase64).
+:::warning
+Decoding on the web has to be done via `AudioContext` only.
+:::
+
If you already have an audio context, you can decode audio data directly using its [`decodeAudioData`](/docs/core/base-audio-context#decodeaudiodata) function;
the decoded audio will then be automatically resampled to match the context's `sampleRate`.
:::caution
Supported file formats:
-- aac
- flac
-- m4a
- mp3
-- mp4
- ogg
- opus
- wav
+- aac
+- m4a
+- mp4
+
+Last three formats are decoded with ffmpeg on the mobile, [see for more info](/docs/other/ffmpeg-info).
:::
### `decodeAudioData`
@@ -39,54 +45,77 @@ if not provided, the original sample rate from the file is used.
- input |
+ input |
ArrayBuffer |
ArrayBuffer with audio data. |
string |
- Path to audio file located on the device. |
+ Path to remote or local audio file. |
+
+
+ number |
+ Asset module id. |
sampleRate |
number |
Target sample rate for the decoded audio. |
+
+ fetchOptions |
+ [RequestInit](https://github.com/facebook/react-native/blob/ac06f3bdc76a9fd7c65ab899e82bff5cad9b94b6/packages/react-native/src/types/globals.d.ts#L265) |
+ Additional headers parameters when passing url to fetch. |
+
#### Returns `Promise`.
+:::caution
+If you are passing number to decode function, bear in mind that it uses Image component provided
+by React Native internally. By default only support .mp3, .wav, .mp4, .m4a, .aac audio file formats.
+If you want to use other types, refer to [this section](https://reactnative.dev/docs/images#static-non-image-resources) for more info.
+:::
+
-Example decoding with memory block
+Example decoding remote URL
```tsx
+import { decodeAudioData } from 'react-native-audio-api';
+
const url = ... // url to an audio
- const buffer = await fetch(url)
- .then((response) => response.arrayBuffer())
- // resample decoded audio to 48000 Hz
- .then((arrayBuffer) => decodeAudioData(arrayBuffer, 48000))
- .catch((error) => {
- console.error('Error decoding audio data source:', error);
- return null;
- });
+const buffer = await decodeAudioData(url);
```
+:::caution
+Internally decoding local files uses Image component to retrieve asset uri, but it does not work on the web platform.
+You can use expo-asset library for this purpose or retrieve ArrayBuffer on your own and pass it to decoding function.
+:::
Example using expo-asset library
```tsx
import { Asset } from 'expo-asset';
+import { AudioContext } from 'react-native-audio-api';
+
+const uri = await Asset.fromModule(require('@/assets/music/example.mp3'))
+ .downloadAsync()
+ .then((asset) => {
+ if (!asset.localUri) {
+ console.error('Failed to load audio asset');
+ }
+ return asset.localUri;
+ })
+
+const context = new AudioContext();
+if (uri) {
+ const buffer = await fetch(uri)
+ .then((response) => response.arrayBuffer())
+ .then((arrayBuffer) => context.decodeAudioData(arrayBuffer));
+ console.log('Audio buffer loaded:', buffer);
+}
-const buffer = await Asset.fromModule(require('@/assets/music/example.mp3'))
- .downloadAsync()
- .then((asset) => {
- if (!asset.localUri) {
- throw new Error('Failed to load audio asset');
- }
- // sampleRate not provided, so file will be decoded in original sampleRate
- return decodeAudioData(asset.localUri);
- })
```
diff --git a/packages/react-native-audio-api/src/core/AudioDecoder.ts b/packages/react-native-audio-api/src/core/AudioDecoder.ts
index 367f8f885..86c524a05 100644
--- a/packages/react-native-audio-api/src/core/AudioDecoder.ts
+++ b/packages/react-native-audio-api/src/core/AudioDecoder.ts
@@ -1,4 +1,12 @@
+import { Image } from 'react-native';
+
import { IAudioDecoder } from '../interfaces';
+import { DecodeDataInput } from '../types';
+import {
+ isBase64Source,
+ isDataBlobString,
+ isRemoteSource,
+} from '../utils/paths';
import AudioBuffer from './AudioBuffer';
class AudioDecoder {
@@ -9,35 +17,89 @@ class AudioDecoder {
this.decoder = global.createAudioDecoder();
}
+ private async decodeAudioDataImplementation(
+ input: DecodeDataInput,
+ sampleRate?: number,
+ fetchOptions?: RequestInit
+ ): Promise {
+ if (input instanceof ArrayBuffer) {
+ const buffer = await this.decoder.decodeWithMemoryBlock(
+ new Uint8Array(input),
+ sampleRate ?? 0
+ );
+ return new AudioBuffer(buffer);
+ }
+
+ const stringSource =
+ typeof input === 'number' ? Image.resolveAssetSource(input).uri : input;
+
+ // input is data:audio/...;base64,...
+ if (isBase64Source(stringSource)) {
+ throw new Error(
+ 'Base64 source decoding is not currently supported, to decode raw PCM base64 strings use decodePCMInBase64 method.'
+ );
+ }
+
+ // input is blob:...
+ if (isDataBlobString(stringSource)) {
+ throw new Error('Data Blob string decoding is not currently supported.');
+ }
+
+ // input is http(s)://...
+ if (isRemoteSource(stringSource)) {
+ const arrayBuffer = await fetch(stringSource, fetchOptions).then((res) =>
+ res.arrayBuffer()
+ );
+
+ const buffer = await this.decoder.decodeWithMemoryBlock(
+ new Uint8Array(arrayBuffer),
+ sampleRate ?? 0
+ );
+
+ return new AudioBuffer(buffer);
+ }
+
+ if (!(typeof input === 'string')) {
+ throw new TypeError('Input must be a module, uri or ArrayBuffer');
+ }
+
+ // Local file path
+ const filePath = stringSource.startsWith('file://')
+ ? stringSource.replace('file://', '')
+ : stringSource;
+
+ const buffer = await this.decoder.decodeWithFilePath(
+ filePath,
+ sampleRate ?? 0
+ );
+
+ return new AudioBuffer(buffer);
+ }
+
public static getInstance(): AudioDecoder {
if (!AudioDecoder.instance) {
AudioDecoder.instance = new AudioDecoder();
}
+
return AudioDecoder.instance;
}
public async decodeAudioDataInstance(
- input: string | ArrayBuffer,
- sampleRate?: number
+ input: DecodeDataInput,
+ sampleRate?: number,
+ fetchOptions?: RequestInit
): Promise {
- let buffer;
- if (typeof input === 'string') {
- // Remove the file:// prefix if it exists
- if (input.startsWith('file://')) {
- input = input.replace('file://', '');
- }
- buffer = await this.decoder.decodeWithFilePath(input, sampleRate ?? 0);
- } else if (input instanceof ArrayBuffer) {
- buffer = await this.decoder.decodeWithMemoryBlock(
- new Uint8Array(input),
- sampleRate ?? 0
- );
- }
+ const audioBuffer = await this.decodeAudioDataImplementation(
+ input,
+ sampleRate,
+ fetchOptions
+ );
- if (!buffer) {
- throw new Error('Unsupported input type or failed to decode audio');
+ if (!audioBuffer) {
+ throw new Error('Failed to decode audio data.');
}
- return new AudioBuffer(buffer);
+
+ return audioBuffer;
}
public async decodePCMInBase64Instance(
@@ -57,10 +119,15 @@ class AudioDecoder {
}
export async function decodeAudioData(
- input: string | ArrayBuffer,
- sampleRate?: number
+ input: DecodeDataInput,
+ sampleRate?: number,
+ fetchOptions?: RequestInit
): Promise {
- return AudioDecoder.getInstance().decodeAudioDataInstance(input, sampleRate);
+ return AudioDecoder.getInstance().decodeAudioDataInstance(
+ input,
+ sampleRate,
+ fetchOptions
+ );
}
export async function decodePCMInBase64(
diff --git a/packages/react-native-audio-api/src/core/BaseAudioContext.ts b/packages/react-native-audio-api/src/core/BaseAudioContext.ts
index 1718414e2..84353ad51 100644
--- a/packages/react-native-audio-api/src/core/BaseAudioContext.ts
+++ b/packages/react-native-audio-api/src/core/BaseAudioContext.ts
@@ -10,6 +10,7 @@ import {
AudioWorkletRuntime,
ContextState,
ConvolverNodeOptions,
+ DecodeDataInput,
IIRFilterNodeOptions,
PeriodicWaveConstraints,
} from '../types';
@@ -56,13 +57,10 @@ export default class BaseAudioContext {
}
public async decodeAudioData(
- input: string | ArrayBuffer,
- sampleRate?: number
+ input: DecodeDataInput,
+ fetchOptions?: RequestInit
): Promise {
- if (!(typeof input === 'string' || input instanceof ArrayBuffer)) {
- throw new TypeError('Input must be a string or ArrayBuffer');
- }
- return await decodeAudioData(input, sampleRate ?? this.sampleRate);
+ return await decodeAudioData(input, this.sampleRate, fetchOptions);
}
public async decodePCMInBase64(
diff --git a/packages/react-native-audio-api/src/types.ts b/packages/react-native-audio-api/src/types.ts
index 550d14c5a..1a6aecec9 100644
--- a/packages/react-native-audio-api/src/types.ts
+++ b/packages/react-native-audio-api/src/types.ts
@@ -149,3 +149,5 @@ export interface IIRFilterNodeOptions {
feedforward: number[];
feedback: number[];
}
+
+export type DecodeDataInput = number | string | ArrayBuffer;
diff --git a/packages/react-native-audio-api/src/utils/paths.ts b/packages/react-native-audio-api/src/utils/paths.ts
new file mode 100644
index 000000000..1d46adab0
--- /dev/null
+++ b/packages/react-native-audio-api/src/utils/paths.ts
@@ -0,0 +1,11 @@
+export function isRemoteSource(url: string): boolean {
+ return url.startsWith('http://') || url.startsWith('https://');
+}
+
+export function isBase64Source(data: string): boolean {
+ return data.startsWith('data:audio/') && data.includes(';base64,');
+}
+
+export function isDataBlobString(data: string): boolean {
+ return data.startsWith('blob:');
+}
diff --git a/packages/react-native-audio-api/src/web-core/AudioContext.tsx b/packages/react-native-audio-api/src/web-core/AudioContext.tsx
index ed800f75b..12ed194f2 100644
--- a/packages/react-native-audio-api/src/web-core/AudioContext.tsx
+++ b/packages/react-native-audio-api/src/web-core/AudioContext.tsx
@@ -1,28 +1,29 @@
+import { InvalidAccessError, NotSupportedError } from '../errors';
import {
- ContextState,
- PeriodicWaveConstraints,
- AudioContextOptions,
AudioBufferBaseSourceNodeOptions,
+ AudioContextOptions,
+ ContextState,
+ DecodeDataInput,
IIRFilterNodeOptions,
+ PeriodicWaveConstraints,
} from '../types';
-import { InvalidAccessError, NotSupportedError } from '../errors';
-import BaseAudioContext from './BaseAudioContext';
import AnalyserNode from './AnalyserNode';
-import AudioDestinationNode from './AudioDestinationNode';
import AudioBuffer from './AudioBuffer';
import AudioBufferSourceNode from './AudioBufferSourceNode';
+import AudioDestinationNode from './AudioDestinationNode';
+import BaseAudioContext from './BaseAudioContext';
import BiquadFilterNode from './BiquadFilterNode';
-import IIRFilterNode from './IIRFilterNode';
+import ConvolverNode from './ConvolverNode';
+import { ConvolverNodeOptions } from './ConvolverNodeOptions';
+import DelayNode from './DelayNode';
import GainNode from './GainNode';
+import IIRFilterNode from './IIRFilterNode';
import OscillatorNode from './OscillatorNode';
import PeriodicWave from './PeriodicWave';
import StereoPannerNode from './StereoPannerNode';
-import ConvolverNode from './ConvolverNode';
-import DelayNode from './DelayNode';
-import { ConvolverNodeOptions } from './ConvolverNodeOptions';
-import { globalWasmPromise, globalTag } from './custom/LoadCustomWasm';
import ConstantSourceNode from './ConstantSourceNode';
+import { globalTag, globalWasmPromise } from './custom/LoadCustomWasm';
import WaveShaperNode from './WaveShaperNode';
export default class AudioContext implements BaseAudioContext {
@@ -180,16 +181,30 @@ export default class AudioContext implements BaseAudioContext {
return new WaveShaperNode(this, this.context.createWaveShaper());
}
- async decodeAudioDataSource(source: string): Promise {
- const arrayBuffer = await fetch(source).then((response) =>
- response.arrayBuffer()
- );
+ async decodeAudioData(
+ source: DecodeDataInput,
+ fetchOptions?: RequestInit
+ ): Promise {
+ if (source instanceof ArrayBuffer) {
+ const decodedData = await this.context.decodeAudioData(source);
+ return new AudioBuffer(decodedData);
+ }
- return this.decodeAudioData(arrayBuffer);
- }
+ if (typeof source === 'string') {
+ const response = await fetch(source, fetchOptions);
+
+ if (!response.ok) {
+ throw new InvalidAccessError(
+ `Failed to fetch audio data from the provided source: ${source}`
+ );
+ }
+
+ const arrayBuffer = await response.arrayBuffer();
+ const decodedData = await this.context.decodeAudioData(arrayBuffer);
+ return new AudioBuffer(decodedData);
+ }
- async decodeAudioData(arrayBuffer: ArrayBuffer): Promise {
- return new AudioBuffer(await this.context.decodeAudioData(arrayBuffer));
+ throw new TypeError('Unsupported source for decodeAudioData: ' + source);
}
async close(): Promise {
diff --git a/packages/react-native-audio-api/src/web-core/BaseAudioContext.tsx b/packages/react-native-audio-api/src/web-core/BaseAudioContext.tsx
index d94b2be64..96d15cb8b 100644
--- a/packages/react-native-audio-api/src/web-core/BaseAudioContext.tsx
+++ b/packages/react-native-audio-api/src/web-core/BaseAudioContext.tsx
@@ -1,21 +1,21 @@
import {
ContextState,
- PeriodicWaveConstraints,
IIRFilterNodeOptions,
+ PeriodicWaveConstraints,
} from '../types';
import AnalyserNode from './AnalyserNode';
-import AudioDestinationNode from './AudioDestinationNode';
import AudioBuffer from './AudioBuffer';
import AudioBufferSourceNode from './AudioBufferSourceNode';
+import AudioDestinationNode from './AudioDestinationNode';
import BiquadFilterNode from './BiquadFilterNode';
+import ConstantSourceNode from './ConstantSourceNode';
+import ConvolverNode from './ConvolverNode';
import DelayNode from './DelayNode';
-import IIRFilterNode from './IIRFilterNode';
import GainNode from './GainNode';
+import IIRFilterNode from './IIRFilterNode';
import OscillatorNode from './OscillatorNode';
import PeriodicWave from './PeriodicWave';
import StereoPannerNode from './StereoPannerNode';
-import ConstantSourceNode from './ConstantSourceNode';
-import ConvolverNode from './ConvolverNode';
import WaveShaperNode from './WaveShaperNode';
export default interface BaseAudioContext {
@@ -47,6 +47,8 @@ export default interface BaseAudioContext {
): PeriodicWave;
createAnalyser(): AnalyserNode;
createWaveShaper(): WaveShaperNode;
- decodeAudioDataSource(source: string): Promise;
- decodeAudioData(arrayBuffer: ArrayBuffer): Promise;
+ decodeAudioData(
+ arrayBuffer: ArrayBuffer,
+ fetchOptions?: RequestInit
+ ): Promise;
}