Skip to content

Commit 40bd3b9

Browse files
MRN-864
1 parent f59d7be commit 40bd3b9

File tree

1 file changed

+57
-44
lines changed

1 file changed

+57
-44
lines changed

src/components/ChatInput.js

Lines changed: 57 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { useFocusEffect } from '@react-navigation/native';
2-
import React, { createRef } from 'react';
2+
import React, { createRef, useRef } from 'react';
33
import { Animated, Keyboard, PanResponder, Platform, StyleSheet, View } from 'react-native';
44
import AudioRecorderPlayer, {
55
AVEncoderAudioQualityIOSType,
@@ -82,6 +82,7 @@ export const cancelAudioRecord = () => {
8282
function ChatInput({ chatUser }) {
8383
userId = getUserIdFromJid(chatUser);
8484
const stringSet = getStringSet();
85+
const recordTimeoutRef = useRef(null);
8586
const themeColorPalatte = useThemeColorPalatte();
8687
const dispatch = useDispatch();
8788
const appState = useAppState();
@@ -244,64 +245,76 @@ function ChatInput({ chatUser }) {
244245

245246
const onStartRecord = async () => {
246247
try {
247-
const roomId = getCurrentCallRoomId();
248-
if (roomId) {
249-
showToast(stringSet.TOAST_MESSAGES.AUDIO_CANNOT_BE_RECORDED_WHILE_IN_CALL);
250-
return;
251-
}
252-
if (isAudioClicked) {
253-
return;
254-
}
255-
isAudioClicked = true;
256-
pauseAudio();
257-
audioRecordClick += 1;
258-
const res = await audioRecordPermission();
259-
if (res === 'granted') {
260-
fileName[userId] = `MFRN_${Date.now() + audioRecordClick}`;
261-
const path = Platform.select({
262-
ios: `file://${RNFS.DocumentDirectoryPath}/${fileName[userId]}.m4a`,
263-
android: `${RNFS.CachesDirectoryPath}/${fileName[userId]}.m4a`,
248+
clearTimeout(recordTimeoutRef.current); // Clear previous timeout if exists
249+
250+
recordTimeoutRef.current = setTimeout(async () => {
251+
if (getCurrentCallRoomId()) {
252+
return showToast(stringSet.TOAST_MESSAGES.AUDIO_CANNOT_BE_RECORDED_WHILE_IN_CALL);
253+
}
254+
if (isAudioClicked) {
255+
return;
256+
}
257+
258+
isAudioClicked = true;
259+
pauseAudio();
260+
audioRecordClick += 1;
261+
262+
if ((await audioRecordPermission()) !== 'granted') {
263+
return;
264+
}
265+
266+
const filePath = Platform.select({
267+
ios: `file://${RNFS.DocumentDirectoryPath}/MFRN_${Date.now() + audioRecordClick}.m4a`,
268+
android: `${RNFS.CachesDirectoryPath}/MFRN_${Date.now() + audioRecordClick}.m4a`,
264269
});
265270

266-
const audioSet = {
271+
await audioRecorderPlayer.startRecorder(filePath, {
267272
AudioEncoderAndroid: AudioEncoderAndroidType.AAC,
268273
AudioSourceAndroid: AudioSourceAndroidType.MIC,
269274
AVModeIOS: AVModeIOSOption.measurement,
270275
AVEncoderAudioQualityKeyIOS: AVEncoderAudioQualityIOSType.high,
271276
AVNumberOfChannelsKeyIOS: 2,
272277
AVFormatIDKeyIOS: AVEncodingOption.aac,
273-
};
274-
setTimeout(async () => {
275-
await audioRecorderPlayer.startRecorder(path, audioSet);
276-
dispatch(setAudioRecording({ userId, message: audioRecord.RECORDING }));
277-
audioRecorderPlayer.addRecordBackListener(onRecordBackListener);
278-
}, 100);
279-
}
278+
});
279+
280+
dispatch(setAudioRecording({ userId, message: audioRecord.RECORDING }));
281+
audioRecorderPlayer.addRecordBackListener(onRecordBackListener);
282+
}, 500);
280283
} catch (error) {
281284
console.error('Failed to start recording:', error);
282285
}
283286
};
284287

285288
const onStopRecord = async () => {
286289
try {
287-
if (!getAudioRecording(userId)) {
288-
return;
289-
}
290-
const result = await audioRecorderPlayer.stopRecorder();
291-
isAudioClicked = false;
292-
panRef.setValue({ x: 0.1, y: 0 });
293-
removeListener();
294-
dispatch(setAudioRecording({ userId, message: audioRecord.STOPPED }));
295-
if (uriPattern.test(result)) {
296-
fileInfo[userId] = await RNFS.stat(result);
297-
fileInfo[userId].fileCopyUri = result;
298-
fileInfo[userId].duration = getAudioRecordTime(userId);
299-
fileInfo[userId].name = fileName[userId];
300-
fileInfo[userId] = mediaObjContructor('AUDIO_RECORD', fileInfo[userId]);
301-
fileInfo[userId].audioType = 'recording';
302-
fileInfo[userId].type = 'audio/m4a';
303-
console.log('fileInfo[userId] ==>', JSON.stringify(fileInfo[userId], null, 2));
304-
}
290+
clearTimeout(recordTimeoutRef.current); // Clear previous timeout if exists
291+
292+
recordTimeoutRef.current = setTimeout(async () => {
293+
if (!getAudioRecording(userId)) {
294+
return;
295+
}
296+
297+
const result = await audioRecorderPlayer.stopRecorder();
298+
isAudioClicked = false;
299+
panRef.setValue({ x: 0.1, y: 0 });
300+
removeListener();
301+
dispatch(setAudioRecording({ userId, message: audioRecord.STOPPED }));
302+
303+
if (uriPattern.test(result)) {
304+
const fileStats = await RNFS.stat(result);
305+
fileInfo[userId] = {
306+
...mediaObjContructor('AUDIO_RECORD', {
307+
...fileStats,
308+
fileCopyUri: result,
309+
duration: getAudioRecordTime(userId),
310+
name: fileName[userId],
311+
audioType: 'recording',
312+
type: 'audio/m4a',
313+
}),
314+
};
315+
console.log('fileInfo[userId] ==>', JSON.stringify(fileInfo[userId], null, 2));
316+
}
317+
}, 500); // 500ms debounce
305318
} catch (error) {
306319
console.log('Failed to stop audio recording:', error);
307320
}

0 commit comments

Comments
 (0)