Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions apps/common-app/src/examples/AudioFile/AudioFile.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@ import { Button, Container, Spacer } from '../../components';
import { colors } from '../../styles';
import AudioPlayer from './AudioPlayer';

const URL =
'https://software-mansion.github.io/react-native-audio-api/audio/voice/example-voice-01.mp3';
// const remoteAsset =
// 'https://software-mansion.github.io/react-native-audio-api/audio/voice/example-voice-01.mp3';

import staticAsset from './voice-sample-landing.mp3';

const AudioFile: FC = () => {
const [isPlaying, setIsPlaying] = useState(false);
Expand Down Expand Up @@ -56,7 +58,7 @@ const AudioFile: FC = () => {
const fetchAudioBuffer = useCallback(async () => {
setIsLoading(true);

await AudioPlayer.loadBuffer(URL);
await AudioPlayer.loadBuffer(staticAsset);

setIsLoading(false);
}, []);
Expand All @@ -83,7 +85,7 @@ const AudioFile: FC = () => {
const setup = async () => {
await fetchAudioBuffer();
await setupNotification();
}
};
setup();
return () => {
AudioPlayer.reset();
Expand All @@ -92,7 +94,6 @@ const AudioFile: FC = () => {
}, [fetchAudioBuffer]);

useEffect(() => {

AudioManager.observeAudioInterruptions(true);

// Listen to notification control events
Expand Down
28 changes: 14 additions & 14 deletions apps/common-app/src/examples/AudioFile/AudioPlayer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import type {
} from 'react-native-audio-api';
import {
AudioContext,
decodeAudioData,
PlaybackNotificationManager,
} from 'react-native-audio-api';

Expand Down Expand Up @@ -36,7 +37,7 @@ class AudioPlayer {
this.isPlaying = true;
PlaybackNotificationManager.update({
state: 'playing',
})
});

if (this.audioContext.state === 'suspended') {
await this.audioContext.resume();
Expand All @@ -53,11 +54,16 @@ class AudioPlayer {
this.sourceNode.onPositionChanged = (event) => {
this.currentElapsedTime = event.value;
if (this.onPositionChanged) {
this.onPositionChanged(this.currentElapsedTime / this.audioBuffer!.duration);
this.onPositionChanged(
this.currentElapsedTime / this.audioBuffer!.duration
);
}
};

this.sourceNode.start(this.audioContext.currentTime, this.currentElapsedTime);
this.sourceNode.start(
this.audioContext.currentTime,
this.currentElapsedTime
);
};

pause = async () => {
Expand All @@ -71,7 +77,7 @@ class AudioPlayer {
await this.audioContext.suspend();
PlaybackNotificationManager.update({
state: 'paused',
})
});

this.isPlaying = false;
};
Expand All @@ -94,19 +100,13 @@ class AudioPlayer {
}
};

loadBuffer = async (url: string) => {
const buffer = await fetch(url, {
loadBuffer = async (asset: string | number) => {
const buffer = await decodeAudioData(asset, 0, {
headers: {
'User-Agent':
'Mozilla/5.0 (Android; Mobile; rv:122.0) Gecko/122.0 Firefox/122.0',
},
})
.then((response) => response.arrayBuffer())
.then((arrayBuffer) => this.audioContext.decodeAudioData(arrayBuffer))
.catch((error) => {
console.error('Error decoding audio data source:', error);
return null;
});
});

if (buffer) {
this.audioBuffer = buffer;
Expand Down Expand Up @@ -141,7 +141,7 @@ class AudioPlayer {

getElapsedTime = (): number => {
return this.currentElapsedTime;
}
};
}

export default new AudioPlayer();
Binary file not shown.
1 change: 1 addition & 0 deletions apps/common-app/types.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
module '*.mp3';
25 changes: 24 additions & 1 deletion apps/fabric-example/babel.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,29 @@ module.exports = function (api) {
api.cache(false);
return {
presets: ['module:@react-native/babel-preset'],
plugins: ['react-native-worklets/plugin'],
plugins: [
'react-native-worklets/plugin',
[
'module-resolver',
{
alias: {
'common-app': '../common-app',
},
extensions: [
'.js',
'.jsx',
'.ts',
'.tsx',
'.ios.js',
'.android.js',
'.json',
'.ios.ts',
'.android.ts',
'.ios.tsx',
'.android.tsx',
],
},
],
],
};
};
2 changes: 1 addition & 1 deletion apps/fabric-example/ios/Podfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3287,7 +3287,7 @@ SPEC CHECKSUMS:
FBLazyVector: 309703e71d3f2f1ed7dc7889d58309c9d77a95a4
fmt: a40bb5bd0294ea969aaaba240a927bd33d878cdd
glog: 5683914934d5b6e4240e497e0f4a3b42d1854183
hermes-engine: f93b5009d8ccd9429fe2a772351980df8a22a413
hermes-engine: 42d6f09ee6ede2feb220e2fb772e8bebb42ca403
RCT-Folly: 846fda9475e61ec7bcbf8a3fe81edfcaeb090669
RCTDeprecation: a41bbdd9af30bf2e5715796b313e44ec43eefff1
RCTRequired: 7be34aabb0b77c3cefe644528df0fa0afad4e4d0
Expand Down
8 changes: 5 additions & 3 deletions apps/fabric-example/metro.config.js
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
const {getDefaultConfig, mergeConfig} = require('@react-native/metro-config');
const { getDefaultConfig, mergeConfig } = require('@react-native/metro-config');

const path = require('path');

const root = path.resolve(__dirname, '../..');
const monorepoRoot = path.resolve(__dirname, '../..');
const appsRoot = path.resolve(monorepoRoot, 'apps');

/**
* Metro configuration https://reactnative.dev/docs/metro
*
* @type {import('@react-native/metro-config').MetroConfig}
*/
const config = {
watchFolders: [root],
projectRoot: __dirname,
watchFolders: [monorepoRoot, appsRoot],
};

module.exports = mergeConfig(getDefaultConfig(__dirname), config);
28 changes: 10 additions & 18 deletions packages/audiodocs/docs/core/base-audio-context.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -256,25 +256,13 @@ Creates [`WorkletProcessingNode`](/docs/worklets/worklet-processing-node).

#### Returns `WorkletProcessingNode`.

:::caution
Supported file formats:
- flac
- mp3
- ogg
- opus
- wav
- aac
- m4a
- mp4

Last three formats are decoded with ffmpeg, [see for more info](/docs/other/ffmpeg-info).
:::

### `decodeAudioData`

Decodes audio data from either a file path or an ArrayBuffer. The optional `sampleRate` parameter lets you resample the decoded audio.
If not provided, the audio will be automatically resampled to match the audio context's `sampleRate`.

**For the list of supported formats visit [this page](/docs/utils/decoding).**

<table>
<thead>
<tr>
Expand All @@ -285,18 +273,22 @@ If not provided, the audio will be automatically resampled to match the audio co
</thead>
<tbody>
<tr>
<td rowspan="2" align="center"><code>input</code></td>
<td rowspan="3" align="center"><code>input</code></td>
<td align="center"><code>ArrayBuffer</code></td>
<td align="center">ArrayBuffer with audio data.</td>
</tr>
<tr>
<td align="center"><code>string</code></td>
<td align="center">Path to audio file located on the device.</td>
<td align="center">Path to remote or local audio file.</td>
</tr>
<tr>
<td align="center"><code>sampleRate</code><Optional /></td>
<td align="center"><code>number</code></td>
<td align="center">Target sample rate for the decoded audio.</td>
<td align="center">Asset module id. <MobileOnly/> </td>
</tr>
<tr>
<td align="center"><code>fetchOptions</code><Optional /></td>
<td align="center"><code>[RequestInit](https://github.com/facebook/react-native/blob/ac06f3bdc76a9fd7c65ab899e82bff5cad9b94b6/packages/react-native/src/types/globals.d.ts#L265)</code></td>
<td align="center">Additional headers parameters when passing url to fetch.</td>
</tr>
</tbody>
</table>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
---
sidebar_label: RecordingNotificationManager
sidebar_position: 4
---

Expand Down
75 changes: 52 additions & 23 deletions packages/audiodocs/docs/utils/decoding.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,25 @@ import { Optional, MobileOnly } from '@site/src/components/Badges';
You can decode audio data independently, without creating an AudioContext, using the exported functions [`decodeAudioData`](/docs/utils/decoding#decodeaudiodata) and
[`decodePCMInBase64`](/docs/utils/decoding#decodepcminbase64).

:::warning
Decoding on the web has to be done via `AudioContext` only.
:::

If you already have an audio context, you can decode audio data directly using its [`decodeAudioData`](/docs/core/base-audio-context#decodeaudiodata) function;
the decoded audio will then be automatically resampled to match the context's `sampleRate`.

:::caution
Supported file formats:
- aac
- flac
- m4a
- mp3
- mp4
- ogg
- opus
- wav
- aac
- m4a
- mp4

Last three formats are decoded with ffmpeg on the mobile, [see for more info](/docs/other/ffmpeg-info).
:::

### `decodeAudioData`
Expand All @@ -39,54 +45,77 @@ if not provided, the original sample rate from the file is used.
</thead>
<tbody>
<tr>
<td rowspan="2" align="center"><code>input</code></td>
<td rowspan="3" align="center"><code>input</code></td>
<td align="center"><code>ArrayBuffer</code></td>
<td align="center">ArrayBuffer with audio data.</td>
</tr>
<tr>
<td align="center"><code>string</code></td>
<td align="center">Path to audio file located on the device.</td>
<td align="center">Path to remote or local audio file.</td>
</tr>
<tr>
<td align="center"><code>number</code></td>
<td align="center">Asset module id. <MobileOnly/> </td>
</tr>
<tr>
<td align="center"><code>sampleRate</code><Optional /></td>
<td align="center"><code>number</code></td>
<td align="center">Target sample rate for the decoded audio.</td>
</tr>
<tr>
<td align="center"><code>fetchOptions</code><Optional /></td>
<td align="center"><code>[RequestInit](https://github.com/facebook/react-native/blob/ac06f3bdc76a9fd7c65ab899e82bff5cad9b94b6/packages/react-native/src/types/globals.d.ts#L265)</code></td>
<td align="center">Additional headers parameters when passing url to fetch.</td>
</tr>
</tbody>
</table>

#### Returns `Promise<AudioBuffer>`.

:::caution
If you are passing number to decode function, bear in mind that it uses Image component provided
by React Native internally. By default only support .mp3, .wav, .mp4, .m4a, .aac audio file formats.
If you want to use other types, refer to [this section](https://reactnative.dev/docs/images#static-non-image-resources) for more info.
:::

<details>
<summary>Example decoding with memory block</summary>
<summary>Example decoding remote URL</summary>
```tsx
import { decodeAudioData } from 'react-native-audio-api';

const url = ... // url to an audio

const buffer = await fetch(url)
.then((response) => response.arrayBuffer())
// resample decoded audio to 48000 Hz
.then((arrayBuffer) => decodeAudioData(arrayBuffer, 48000))
.catch((error) => {
console.error('Error decoding audio data source:', error);
return null;
});
const buffer = await decodeAudioData(url);
```
</details>

:::caution
Internally decoding local files uses Image component to retrieve asset uri, but it does not work on the web platform.
You can use expo-asset library for this purpose or retrieve ArrayBuffer on your own and pass it to decoding function.
:::
<details>
<summary>Example using expo-asset library</summary>
```tsx
import { Asset } from 'expo-asset';
import { AudioContext } from 'react-native-audio-api';

const uri = await Asset.fromModule(require('@/assets/music/example.mp3'))
.downloadAsync()
.then((asset) => {
if (!asset.localUri) {
console.error('Failed to load audio asset');
}
return asset.localUri;
})

const context = new AudioContext();
if (uri) {
const buffer = await fetch(uri)
.then((response) => response.arrayBuffer())
.then((arrayBuffer) => context.decodeAudioData(arrayBuffer));
console.log('Audio buffer loaded:', buffer);
}

const buffer = await Asset.fromModule(require('@/assets/music/example.mp3'))
.downloadAsync()
.then((asset) => {
if (!asset.localUri) {
throw new Error('Failed to load audio asset');
}
// sampleRate not provided, so file will be decoded in original sampleRate
return decodeAudioData(asset.localUri);
})
```
</details>

Expand Down
Loading