Skip to content

Commit d680811

Browse files
committed
Merge branch 'develop' into V8
# Conflicts: # examples/ExpoMessaging/yarn.lock # package/expo-package/yarn.lock
2 parents 4b91160 + e1f555f commit d680811

File tree

11 files changed

+445
-36
lines changed

11 files changed

+445
-36
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
[![NPM](https://img.shields.io/npm/v/stream-chat-react-native.svg)](https://www.npmjs.com/package/stream-chat-react-native)
1111
[![Build Status](https://github.com/GetStream/stream-chat-react-native/actions/workflows/release.yml/badge.svg)](https://github.com/GetStream/stream-chat-react-native/actions)
1212
[![Component Reference](https://img.shields.io/badge/docs-component%20reference-blue.svg)](https://getstream.io/chat/docs/sdk/reactnative)
13-
![JS Bundle Size](https://img.shields.io/badge/js_bundle_size-459%20KB-blue)
13+
![JS Bundle Size](https://img.shields.io/badge/js_bundle_size-450%20KB-blue)
1414

1515
<img align="right" src="https://getstream.imgix.net/images/ios-chat-tutorial/[email protected]?auto=format,enhance" width="50%" />
1616

examples/ExpoMessaging/app.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -51,16 +51,16 @@
5151
}
5252
],
5353
[
54-
"expo-av",
54+
"expo-video",
5555
{
56-
"microphonePermission": "$(PRODUCT_NAME) would like to use your microphone for voice recording."
56+
"supportsBackgroundPlayback": true,
57+
"supportsPictureInPicture": true
5758
}
5859
],
5960
[
60-
"expo-video",
61+
"expo-audio",
6162
{
62-
"supportsBackgroundPlayback": true,
63-
"supportsPictureInPicture": true
63+
"microphonePermission": "$(PRODUCT_NAME) would like to use your microphone for voice recording."
6464
}
6565
]
6666
]

examples/ExpoMessaging/components/ChatWrapper.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ const streami18n = new Streami18n({
1515
});
1616

1717
SqliteClient.logger = (level, message, extraData) => {
18-
console.log(level, `SqliteClient: ${message}`, extraData);
18+
// console.log(level, `SqliteClient: ${message}`, extraData);
1919
};
2020

2121
export const ChatWrapper = ({ children }: PropsWithChildren<{}>) => {

examples/ExpoMessaging/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
"@react-native-community/netinfo": "11.4.1",
1515
"@react-navigation/elements": "^1.3.31",
1616
"expo": "^53.0.12",
17-
"expo-av": "~15.1.6",
17+
"expo-audio": "~0.4.6",
1818
"expo-clipboard": "~7.1.4",
1919
"expo-constants": "~17.1.6",
2020
"expo-document-picker": "~13.1.5",

examples/ExpoMessaging/yarn.lock

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3633,10 +3633,10 @@ expo-asset@~11.1.5:
36333633
"@expo/image-utils" "^0.7.4"
36343634
expo-constants "~17.1.5"
36353635

3636-
expo-av@~15.1.6:
3637-
version "15.1.6"
3638-
resolved "https://registry.yarnpkg.com/expo-av/-/expo-av-15.1.6.tgz#f1c4a404672500feb0274144a64bb3a956e85bdd"
3639-
integrity sha512-5ZbeXdCmdckZHwtEV+8tRZqLlUWR96gkkUIxpyZAEvK0L+aI/BnyhDCpjnSKWwZo4ZA6lx8/su9kyFNV/mQ/sQ==
3636+
expo-audio@~0.4.6:
3637+
version "0.4.6"
3638+
resolved "https://registry.yarnpkg.com/expo-audio/-/expo-audio-0.4.6.tgz#39def71b91f2cb5c4d21a51aef6ffabcc4421ad4"
3639+
integrity sha512-/pgz0AnQHnyJWkPfTp/3gBDib6FZYnscpktFZgmPeTeCK8KkPV4+eV2oEDDbSe2ngUrqDA0WVO4MX2Mfeec9ZA==
36403640

36413641
expo-clipboard@~7.1.4:
36423642
version "7.1.4"

package/expo-package/package.json

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,16 @@
2424
"expo-image-picker": "*",
2525
"expo-media-library": "*",
2626
"expo-sharing": "*",
27-
"expo-video": "*"
27+
"expo-video": "*",
28+
"expo-audio": "*"
2829
},
2930
"peerDependenciesMeta": {
3031
"expo-av": {
3132
"optional": true
3233
},
34+
"expo-audio": {
35+
"optional": true
36+
},
3337
"expo-video": {
3438
"optional": true
3539
},
@@ -57,7 +61,8 @@
5761
},
5862
"devDependencies": {
5963
"expo": "^53.0.12",
60-
"expo-image-manipulator": "^12.0.5"
64+
"expo-image-manipulator": "^12.0.5",
65+
"expo-audio": "~0.4.6"
6166
},
6267
"scripts": {
6368
"prepack": " cp ../../README.md .",

package/expo-package/src/optionalDependencies/Audio.ts

Lines changed: 241 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,36 @@
1+
import { Platform } from 'react-native';
2+
13
import {
24
AndroidAudioEncoder,
35
AndroidOutputFormat,
46
ExpoAudioRecordingConfiguration as AudioRecordingConfiguration,
57
IOSAudioQuality,
68
IOSOutputFormat,
79
ExpoRecordingOptions as RecordingOptions,
10+
RecordingStatus,
811
} from 'stream-chat-react-native-core';
912

1013
import { AudioComponent, RecordingObject } from './AudioVideo';
1114

15+
let ExpoAudioComponent;
16+
let ExpoRecordingComponent;
17+
18+
try {
19+
const { AudioModule } = require('expo-audio');
20+
ExpoAudioComponent = AudioModule;
21+
ExpoRecordingComponent = AudioModule.AudioRecorder;
22+
} catch (e) {
23+
// do nothing
24+
}
25+
1226
const sleep = (ms: number) =>
1327
new Promise<void>((resolve) => {
1428
setTimeout(() => {
1529
resolve();
1630
}, ms);
1731
});
1832

19-
class _Audio {
33+
class _AudioExpoAV {
2034
recording: typeof RecordingObject | null = null;
2135
audioRecordingConfiguration: AudioRecordingConfiguration = {
2236
mode: {
@@ -105,8 +119,233 @@ class _Audio {
105119
};
106120
}
107121

122+
class _AudioExpoAudio {
123+
recording: typeof RecordingObject | null = null;
124+
audioRecordingConfiguration: AudioRecordingConfiguration = {
125+
mode: {
126+
allowsRecordingIOS: true,
127+
playsInSilentModeIOS: true,
128+
},
129+
options: {
130+
android: {
131+
audioEncoder: AndroidAudioEncoder.AAC,
132+
extension: '.aac',
133+
outputFormat: AndroidOutputFormat.AAC_ADTS,
134+
},
135+
ios: {
136+
audioQuality: IOSAudioQuality.HIGH,
137+
bitRate: 128000,
138+
extension: '.aac',
139+
numberOfChannels: 2,
140+
outputFormat: IOSOutputFormat.MPEG4AAC,
141+
sampleRate: 44100,
142+
},
143+
isMeteringEnabled: true,
144+
web: {},
145+
},
146+
};
147+
148+
startRecording = async (recordingOptions: RecordingOptions, onRecordingStatusUpdate) => {
149+
try {
150+
const permissions = await ExpoAudioComponent.getRecordingPermissionsAsync();
151+
const permissionsStatus = permissions.status;
152+
let permissionsGranted = permissions.granted;
153+
154+
// If permissions have not been determined yet, ask the user for permissions.
155+
if (permissionsStatus === 'undetermined') {
156+
const newPermissions = await ExpoAudioComponent.requestRecordingPermissionsAsync();
157+
permissionsGranted = newPermissions.granted;
158+
}
159+
160+
// If they are explicitly denied after this, exit early by throwing an error
161+
// that will be caught in the catch block below (as a single source of not
162+
// starting the player). The player would error itself anyway if we did not do
163+
// this, but there's no reason to run the asynchronous calls when we know
164+
// immediately that the player will not be run.
165+
if (!permissionsGranted) {
166+
throw new Error('Missing audio recording permission.');
167+
}
168+
await ExpoAudioComponent.setAudioModeAsync(
169+
expoAvToExpoAudioModeAdapter(this.audioRecordingConfiguration.mode),
170+
);
171+
const options = {
172+
...recordingOptions,
173+
...this.audioRecordingConfiguration.options,
174+
};
175+
176+
this.recording = new ExpoAudioRecordingAdapter(options);
177+
await this.recording.createAsync(
178+
Platform.OS === 'android' ? 100 : 60,
179+
onRecordingStatusUpdate,
180+
);
181+
return { accessGranted: true, recording: this.recording };
182+
} catch (error) {
183+
console.error('Failed to start recording', error);
184+
this.recording = null;
185+
return { accessGranted: false, recording: null };
186+
}
187+
};
188+
stopRecording = async () => {
189+
try {
190+
if (this.recording) {
191+
await this.recording.stopAndUnloadAsync();
192+
}
193+
this.recording = null;
194+
} catch (error) {
195+
console.log('Error stopping recoding', error);
196+
}
197+
};
198+
}
199+
200+
class ExpoAudioRecordingAdapter {
201+
private recording;
202+
private recordingStateInterval;
203+
private uri;
204+
private options;
205+
206+
constructor(options: RecordingOptions) {
207+
// Currently, expo-audio has a bug where isMeteringEnabled is not respected
208+
// whenever we pass it to the Recording class constructor - but rather it is
209+
// only respected whenever you pass it to prepareToRecordAsync. That in turn
210+
// however, means that all other audio related configuration will be overwritten
211+
// and forgotten. So, we snapshot the configuration whenever we create an instance
212+
// of a recorder and pass it to both places. Furthermore, the type of the options
213+
// in prepareToRecordAsync is wrong - it's supposed to be the flattened config;
214+
// otherwise none of the quality properties get respected either (only the top level
215+
// ones).
216+
this.options = flattenExpoAudioRecordingOptions(options);
217+
this.recording = new ExpoRecordingComponent(this.options);
218+
this.uri = null;
219+
}
220+
221+
createAsync = async (
222+
progressUpdateInterval: number = 500,
223+
onRecordingStatusUpdate: (status: RecordingStatus) => void,
224+
) => {
225+
this.recordingStateInterval = setInterval(() => {
226+
const status = this.recording.getStatus();
227+
onRecordingStatusUpdate(status);
228+
}, progressUpdateInterval);
229+
this.uri = null;
230+
await this.recording.prepareToRecordAsync(this.options);
231+
this.recording.record();
232+
};
233+
234+
stopAndUnloadAsync = async () => {
235+
clearInterval(this.recordingStateInterval);
236+
await this.recording.stop();
237+
this.uri = this.recording.uri;
238+
this.recording.release();
239+
};
240+
241+
getURI = () => this.uri;
242+
}
243+
108244
export const overrideAudioRecordingConfiguration = (
109245
audioRecordingConfiguration: AudioRecordingConfiguration,
110246
) => audioRecordingConfiguration;
111247

112-
export const Audio = AudioComponent ? new _Audio() : null;
248+
const flattenExpoAudioRecordingOptions = (
249+
options: RecordingOptions & {
250+
bitRate?: number;
251+
extension?: string;
252+
numberOfChannels?: number;
253+
sampleRate?: number;
254+
},
255+
) => {
256+
let commonOptions = {
257+
bitRate: options.bitRate,
258+
extension: options.extension,
259+
isMeteringEnabled: options.isMeteringEnabled ?? false,
260+
numberOfChannels: options.numberOfChannels,
261+
sampleRate: options.sampleRate,
262+
};
263+
264+
if (Platform.OS === 'ios') {
265+
commonOptions = {
266+
...commonOptions,
267+
...options.ios,
268+
};
269+
} else if (Platform.OS === 'android') {
270+
const audioEncoder = options.android.audioEncoder;
271+
const audioEncoderConfig = audioEncoder
272+
? { audioEncoder: expoAvToExpoAudioAndroidEncoderAdapter(audioEncoder) }
273+
: {};
274+
const outputFormat = options.android.outputFormat;
275+
const outputFormatConfig = outputFormat
276+
? { outputFormat: expoAvToExpoAudioAndroidOutputAdapter(outputFormat) }
277+
: {};
278+
commonOptions = {
279+
...commonOptions,
280+
...options.android,
281+
...audioEncoderConfig,
282+
...outputFormatConfig,
283+
};
284+
}
285+
return commonOptions;
286+
};
287+
288+
const expoAvToExpoAudioModeAdapter = (mode: AudioRecordingConfiguration['mode']) => {
289+
const {
290+
allowsRecordingIOS,
291+
interruptionModeAndroid,
292+
interruptionModeIOS,
293+
playsInSilentModeIOS,
294+
playThroughEarpieceAndroid,
295+
staysActiveInBackground,
296+
} = mode;
297+
298+
return {
299+
allowsRecording: allowsRecordingIOS,
300+
interruptionMode: interruptionModeIOS,
301+
interruptionModeAndroid,
302+
playsInSilentMode: playsInSilentModeIOS,
303+
shouldPlayInBackground: staysActiveInBackground,
304+
shouldRouteThroughEarpiece: playThroughEarpieceAndroid,
305+
};
306+
};
307+
308+
const expoAvToExpoAudioAndroidEncoderAdapter = (
309+
audioEncoder: AudioRecordingConfiguration['options']['android']['audioEncoder'],
310+
) => {
311+
const encoderMap = {
312+
0: 'default',
313+
1: 'amr_nb',
314+
2: 'amr_wb',
315+
3: 'aac',
316+
4: 'he_aac',
317+
5: 'aac_eld',
318+
};
319+
320+
return Object.keys(encoderMap).includes(audioEncoder.toString())
321+
? encoderMap[audioEncoder]
322+
: 'default';
323+
};
324+
325+
const expoAvToExpoAudioAndroidOutputAdapter = (
326+
outputFormat: AudioRecordingConfiguration['options']['android']['outputFormat'],
327+
) => {
328+
const outputFormatMap = {
329+
0: 'default',
330+
1: '3gp',
331+
2: 'mpeg4',
332+
3: 'amrnb',
333+
4: 'amrwb',
334+
5: 'default',
335+
6: 'aac_adts',
336+
7: 'default',
337+
8: 'mpeg2ts',
338+
9: 'webm',
339+
};
340+
341+
return Object.keys(outputFormatMap).includes(outputFormat.toString())
342+
? outputFormatMap[outputFormat]
343+
: 'default';
344+
};
345+
346+
// Always try to prioritize expo-audio if it's there.
347+
export const Audio = ExpoRecordingComponent
348+
? new _AudioExpoAudio()
349+
: AudioComponent
350+
? new _AudioExpoAV()
351+
: null;

0 commit comments

Comments
 (0)