diff --git a/android/src/main/java/com/audiowaveform/AudioRecorder.kt b/android/src/main/java/com/audiowaveform/AudioRecorder.kt index 6a84919..8770dab 100644 --- a/android/src/main/java/com/audiowaveform/AudioRecorder.kt +++ b/android/src/main/java/com/audiowaveform/AudioRecorder.kt @@ -4,6 +4,7 @@ import android.Manifest import android.app.Activity import android.content.pm.PackageManager import android.media.MediaMetadataRetriever +import android.media.MediaPlayer import android.media.MediaRecorder import android.os.Build import android.util.Log @@ -54,10 +55,10 @@ class AudioRecorder { } fun getDecibel(recorder: MediaRecorder?): Double? { - if (useLegacyNormalization) { - if (recorder != null) { + if (recorder != null) { + if (useLegacyNormalization) { try { - val db = 20 * log10((recorder?.maxAmplitude?.toDouble() ?: (0.0 / 32768.0))) + val db = 20 * log10((recorder.maxAmplitude.toDouble() ?: (0.0 / 32768.0))) if (db == Double.NEGATIVE_INFINITY) { Log.e(Constants.LOG_TAG, "Microphone might be turned off") } else { @@ -68,21 +69,16 @@ class AudioRecorder { e.printStackTrace() return null } - } - else { - return null - } - } else { - if (recorder != null) { + } else { try { - return recorder?.maxAmplitude?.toDouble() ?: 0.0 + return recorder.maxAmplitude.toDouble() ?: 0.0 } catch (e: IllegalStateException) { e.printStackTrace() return null } - } else { - return null } + } else { + return null } } @@ -114,7 +110,10 @@ class AudioRecorder { promise.resolve(true) } catch (e: IllegalArgumentException) { Log.e(Constants.LOG_TAG, "Invalid MediaRecorder configuration", e) - promise.reject("CONFIGURATION_ERROR", "Invalid MediaRecorder configuration: ${e.message}") + promise.reject( + "CONFIGURATION_ERROR", + "Invalid MediaRecorder configuration: ${e.message}" + ) } catch (e: IOException) { Log.e(Constants.LOG_TAG, "Failed to stop initialize recorder") } @@ -130,34 +129,40 @@ class AudioRecorder { release() } isRecording = false - val tempArrayForCommunication : MutableList = mutableListOf() val duration = getDuration(path) - tempArrayForCommunication.add(path) - tempArrayForCommunication.add(duration.toString()) - promise.resolve(Arguments.fromList(tempArrayForCommunication)) + val response = Arguments.createArray().apply { + pushString(path) + pushString(duration.toString()) + } + + promise.resolve(response) + } else { promise.reject("Error", "Recorder is not recording or has already been stopped") } } catch (e: IllegalStateException) { - Log.e(Constants.LOG_TAG, "Failed to stop recording",e) + Log.e(Constants.LOG_TAG, "Failed to stop recording", e) } catch (e: RuntimeException) { Log.e(Constants.LOG_TAG, "Runtime exception when stopping recording", e) promise.reject("Error", "Runtime exception: ${e.message}") } } - private fun getDuration(path: String): String { - val mediaMetadataRetriever = MediaMetadataRetriever() - try { - mediaMetadataRetriever.setDataSource(path) - val duration = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION) - return duration ?: "-1" + /** + * Helper function to get the actual audio duration. + */ + private fun getDuration(filePath: String): Int { + val mediaPlayer = MediaPlayer() + return try { + mediaPlayer.setDataSource(filePath) + mediaPlayer.prepare() + val duration = mediaPlayer.duration + mediaPlayer.release() + duration } catch (e: Exception) { - Log.e(Constants.LOG_TAG, "Failed to get recording duration") - } finally { - mediaMetadataRetriever.release() + Log.e(Constants.LOG_TAG, "Failed to get audio duration", e) + 0 } - return "-1" } fun startRecorder(recorder: MediaRecorder?, useLegacy: Boolean, promise: Promise) { @@ -210,6 +215,7 @@ class AudioRecorder { MediaRecorder.AudioEncoder.AAC } } + Constants.vorbis -> MediaRecorder.AudioEncoder.VORBIS else -> MediaRecorder.AudioEncoder.AAC } @@ -227,6 +233,7 @@ class AudioRecorder { MediaRecorder.OutputFormat.MPEG_4 } } + Constants.amr_wb -> MediaRecorder.OutputFormat.AMR_WB Constants.amr_nb -> MediaRecorder.OutputFormat.AMR_NB Constants.webm -> MediaRecorder.OutputFormat.WEBM @@ -234,10 +241,14 @@ class AudioRecorder { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { MediaRecorder.OutputFormat.MPEG_2_TS } else { - Log.e(Constants.LOG_TAG, "Minimum android Q is required, Setting MPEG_4 output format.") + Log.e( + Constants.LOG_TAG, + "Minimum android Q is required, Setting MPEG_4 output format." + ) MediaRecorder.OutputFormat.MPEG_4 } } + Constants.aac_adts -> MediaRecorder.OutputFormat.AAC_ADTS else -> MediaRecorder.OutputFormat.MPEG_4 } diff --git a/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt b/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt index 25b9376..9e6bd9f 100644 --- a/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt +++ b/android/src/main/java/com/audiowaveform/AudioWaveformModule.kt @@ -21,7 +21,7 @@ import java.util.Collections import java.util.Date import java.util.Locale -class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJavaModule(context) { +class AudioWaveformModule(context: ReactApplicationContext) : ReactContextBaseJavaModule(context) { private var extractors = mutableMapOf() private var audioPlayers = mutableMapOf() private var audioRecorder: AudioRecorder = AudioRecorder() @@ -73,7 +73,27 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav } @ReactMethod - fun getDecibel(): Double? { + fun getDecibel(promise: Promise) { + // Check if recorder is initialized and recording + if (recorder == null || path == null) { + promise.reject("GET_DECIBEL", "No audio recording is running") + return + } + + try { + // Get the decibel level + val decibel = getDecibelLevel() + + // Resolve the promise with the decibel value + promise.resolve(decibel) + } catch (e: Exception) { + // Handle any exceptions and reject the promise + Log.e(Constants.LOG_TAG, "Failed to get decibel level", e) + promise.reject("GET_DECIBEL_ERROR", "Failed to retrieve decibel level: ${e.message}") + } + } + + private fun getDecibelLevel(): Double? { return audioRecorder.getDecibel(recorder) } @@ -88,37 +108,31 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav @RequiresApi(Build.VERSION_CODES.N) @ReactMethod - fun pauseRecording(promise: Promise){ + fun pauseRecording(promise: Promise) { audioRecorder.pauseRecording(recorder, promise) stopEmittingRecorderValue() } @RequiresApi(Build.VERSION_CODES.N) @ReactMethod - fun resumeRecording(promise: Promise){ + fun resumeRecording(promise: Promise) { audioRecorder.resumeRecording(recorder, promise) startEmittingRecorderValue() } @ReactMethod fun stopRecording(promise: Promise) { - if (audioRecorder == null || recorder == null || path == null) { - promise.reject("STOP_RECORDING_ERROR", "Recording resources not properly initialized") + if (recorder == null || path == null) { + promise.reject("STOP_RECORDING_ERROR", "No audio recording is running") return } try { - val currentTime = System.currentTimeMillis() - if (currentTime - startTime < 500) { - promise.reject("SHORT_RECORDING", "Recording is too short") - return - } - stopEmittingRecorderValue() audioRecorder.stopRecording(recorder, path!!, promise) recorder = null path = null - } catch (e: Exception) { + } catch (e: Exception) { Log.e(Constants.LOG_TAG, "Failed to stop recording", e) promise.reject("Error", "Failed to stop recording: ${e.message}") } @@ -129,8 +143,12 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav obj: ReadableMap, promise: Promise ) { - if(audioPlayers.filter { it.value?.isHoldingAudioTrack() == true }.count() >= MAX_NUMBER_OF_AUDIO_PLAYER) { - promise.reject(Constants.LOG_TAG, "Too many players have been initialized. Please stop some players before continuing") + if (audioPlayers.filter { it.value?.isHoldingAudioTrack() == true } + .count() >= MAX_NUMBER_OF_AUDIO_PLAYER) { + promise.reject( + Constants.LOG_TAG, + "Too many players have been initialized. Please stop some players before continuing" + ) return } @@ -164,7 +182,7 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav val speed = obj.getDouble(Constants.speed) val player = getPlayerOrReject(obj, promise, "startPlayer Error"); - player?.start(finishMode ?: 2, speed.toFloat(),promise) + player?.start(finishMode ?: 2, speed.toFloat(), promise) } @ReactMethod @@ -200,7 +218,7 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav ) promise.resolve(false) } - } catch(e: Exception) { + } catch (e: Exception) { promise.reject("seekTo Error", e.toString()) } } @@ -227,7 +245,7 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav val key = obj.getString(Constants.playerKey) val path = obj.getString(Constants.path) val noOfSamples = obj.getInt(Constants.noOfSamples) - if(key != null) { + if (key != null) { createOrUpdateExtractor(key, noOfSamples, path, promise) } else { Log.e(Constants.LOG_TAG, "Can not get waveform data Player key is null") @@ -237,8 +255,8 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav @ReactMethod fun stopAllPlayers(promise: Promise) { try { - audioPlayers.values.forEach{ - player -> player?.stop() + audioPlayers.values.forEach { player -> + player?.stop() } audioPlayers.clear() promise.resolve(true) @@ -250,8 +268,8 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav @ReactMethod fun stopAllWaveFormExtractors(promise: Promise) { try { - extractors.values.forEach{ - extractor -> extractor?.forceStop() + extractors.values.forEach { extractor -> + extractor?.forceStop() } extractors.clear() promise.resolve(true) @@ -277,7 +295,7 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav } else { promise.reject("setPlaybackSpeed Error", "Player key can't be null") } - } catch(e: Exception) { + } catch (e: Exception) { promise.reject("setPlaybackSpeed Error", e.toString()) } } @@ -300,7 +318,7 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav promise: Promise, ) { if (path == null) { - promise.reject("createOrUpdateExtractor Error" , "No Path Provided") + promise.reject("createOrUpdateExtractor Error", "No Path Provided") return } extractors[playerKey] = WaveformExtractor( @@ -310,20 +328,27 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav key = playerKey, extractorCallBack = object : ExtractorCallBack { override fun onProgress(value: Float) { - if (value == 1.0F) { + if (value == 1.0F) { extractors[playerKey]?.sampleData?.let { data -> val normalizedData = normalizeWaveformData(data, 0.12f) - val tempArrayForCommunication: MutableList> = mutableListOf(normalizedData) + val tempArrayForCommunication: MutableList> = + mutableListOf(normalizedData) promise.resolve(Arguments.fromList(tempArrayForCommunication)) } } } + override fun onReject(error: String?, message: String?) { - promise.reject(error ?: "Error", message ?: "An error is thrown while decoding the audio file") + promise.reject( + error ?: "Error", + message ?: "An error is thrown while decoding the audio file" + ) } + override fun onResolve(value: MutableList>) { promise.resolve(Arguments.fromList(value)) } + override fun onForceStop() { promise.resolve(Arguments.fromList(mutableListOf(emptyList()))) } @@ -332,11 +357,16 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav extractors[playerKey]?.startDecode(); } - private fun normalizeWaveformData(data: MutableList, scale: Float = 0.25f, threshold: Float = 0.01f): MutableList { + private fun normalizeWaveformData( + data: MutableList, + scale: Float = 0.25f, + threshold: Float = 0.01f + ): MutableList { val filteredData = data.filter { kotlin.math.abs(it) >= threshold } val maxAmplitude = filteredData.maxOrNull() ?: 1.0f return if (maxAmplitude > 0) { - data.map { if (kotlin.math.abs(it) < threshold) 0.0f else (it / maxAmplitude) * scale }.toMutableList() + data.map { if (kotlin.math.abs(it) < threshold) 0.0f else (it / maxAmplitude) * scale } + .toMutableList() } else { data } @@ -363,12 +393,12 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav var sampleRateVal = sampleRate.toInt(); var bitRateVal = bitRate.toInt(); - if(obj != null) { - if(obj.hasKey(Constants.bitRate)){ - bitRateVal = obj.getInt(Constants.bitRate); + if (obj != null) { + if (obj.hasKey(Constants.bitRate)) { + bitRateVal = obj.getInt(Constants.bitRate); } - if(obj.hasKey(Constants.sampleRate)){ + if (obj.hasKey(Constants.sampleRate)) { sampleRateVal = obj.getInt(Constants.sampleRate); } } @@ -413,17 +443,18 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav private val emitLiveRecordValue = object : Runnable { override fun run() { - val currentDecibel = getDecibel() + val currentDecibel = getDecibelLevel() val args: WritableMap = Arguments.createMap() if (currentDecibel == Double.NEGATIVE_INFINITY) { args.putDouble(Constants.currentDecibel, 0.0) } else { if (currentDecibel != null) { - args.putDouble(Constants.currentDecibel, currentDecibel/1000) + args.putDouble(Constants.currentDecibel, currentDecibel / 1000) } } handler.postDelayed(this, UpdateFrequency.Low.value) - reactApplicationContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java)?.emit(Constants.onCurrentRecordingWaveformData, args) + reactApplicationContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter::class.java) + ?.emit(Constants.onCurrentRecordingWaveformData, args) } } @@ -435,7 +466,11 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav handler.removeCallbacks(emitLiveRecordValue) } - private fun getPlayerOrReject(arguments: ReadableMap, promise: Promise, errorCode: String): AudioPlayer? { + private fun getPlayerOrReject( + arguments: ReadableMap, + promise: Promise, + errorCode: String + ): AudioPlayer? { val key = getPlayerKeyOrReject(arguments, promise, errorCode) return audioPlayers[key] ?: run { promise.reject(errorCode, "$errorCode: Player not in the list") @@ -443,7 +478,11 @@ class AudioWaveformModule(context: ReactApplicationContext): ReactContextBaseJav } } - private fun getPlayerKeyOrReject(arguments: ReadableMap, promise: Promise, errorCode: String): String? { + private fun getPlayerKeyOrReject( + arguments: ReadableMap, + promise: Promise, + errorCode: String + ): String? { return arguments.getString(Constants.playerKey) ?: run { promise.reject(errorCode, "$errorCode: Player key can't be null") null diff --git a/ios/AudioPlayer.swift b/ios/AudioPlayer.swift index 059f127..e1396af 100644 --- a/ios/AudioPlayer.swift +++ b/ios/AudioPlayer.swift @@ -9,148 +9,148 @@ import Foundation import AVKit class AudioPlayer: NSObject, AVAudioPlayerDelegate { - - private var seekToStart = true - private var stopWhenCompleted = false - private var timer: Timer? - private var player: AVAudioPlayer? - private var finishMode: FinishMode = FinishMode.stop + + private var seekToStart = true + private var stopWhenCompleted = false + private var timer: Timer? + private var player: AVAudioPlayer? + private var finishMode: FinishMode = FinishMode.stop private var updateFrequency = UpdateFrequency.medium - var plugin: AudioWaveform - var playerKey: String - var rnChannel: AnyObject - private var isComponentMounted: Bool = true // Add flag to track mounted state - - init(plugin: AudioWaveform, playerKey: String, channel: AnyObject) { - self.plugin = plugin - self.playerKey = playerKey - self.rnChannel = channel - super.init() - } - - func preparePlayer(_ path: String?, volume: Double?, updateFrequency: UpdateFrequency, time: Double, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) { - if(!(path ?? "").isEmpty) { - self.updateFrequency = updateFrequency - isComponentMounted = true - let audioUrl = URL.init(string: path!) - if(audioUrl == nil){ - reject(Constants.audioWaveforms, "Failed to initialise Url from provided audio file & If path contains `file://` try removing it", NSError(domain: Constants.audioWaveforms, code: 1)) - return - } - - do { - player = try AVAudioPlayer(contentsOf: audioUrl!) - player?.prepareToPlay() - player?.volume = Float(volume ?? 100.0) - player?.currentTime = Double(time / 1000) - player?.enableRate = true - resolve(true) - } catch let error as NSError { - reject(Constants.audioWaveforms, error.localizedDescription, error) - return - } - } else { - reject(Constants.audioWaveforms, "Audio file path can't be empty or null", NSError(domain: Constants.audioWaveforms, code: 1)) + var plugin: AudioWaveform + var playerKey: String + var rnChannel: AnyObject + private var isComponentMounted: Bool = true // Add flag to track mounted state + + init(plugin: AudioWaveform, playerKey: String, channel: AnyObject) { + self.plugin = plugin + self.playerKey = playerKey + self.rnChannel = channel + super.init() } - } - - func markPlayerAsUnmounted() { - isComponentMounted = false - } - - func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, - successfully flag: Bool) { - var finishType = FinishMode.stop.rawValue - switch self.finishMode { - case .loop: - self.player?.currentTime = 0 - self.player?.play() - finishType = FinishMode.loop.rawValue - case .pause: - self.player?.pause() - stopListening() - finishType = FinishMode.pause.rawValue - case .stop: - self.player?.stop() - stopListening() - self.player = nil - finishType = FinishMode.stop.rawValue + + func preparePlayer(_ path: String?, volume: Double?, updateFrequency: UpdateFrequency, time: Double, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) { + if(!(path ?? "").isEmpty) { + self.updateFrequency = updateFrequency + isComponentMounted = true + let audioUrl = URL.init(string: path!) + if(audioUrl == nil){ + reject(Constants.audioWaveforms, "Failed to initialise Url from provided audio file & If path contains `file://` try removing it", NSError(domain: Constants.audioWaveforms, code: 1)) + return + } + + do { + player = try AVAudioPlayer(contentsOf: audioUrl!) + player?.prepareToPlay() + player?.volume = Float(volume ?? 100.0) + player?.currentTime = Double(time / 1000) + player?.enableRate = true + resolve(true) + } catch let error as NSError { + reject(Constants.audioWaveforms, error.localizedDescription, error) + return + } + } else { + reject(Constants.audioWaveforms, "Audio file path can't be empty or null", NSError(domain: Constants.audioWaveforms, code: 1)) + } } - self.sendEvent(withName: Constants.onDidFinishPlayingAudio, body: [Constants.finishType: finishType, Constants.playerKey: playerKey]) - } - - - public func sendEvent(withName: String, body: Any?) { - guard isComponentMounted else { - return + + func markPlayerAsUnmounted() { + isComponentMounted = false } - EventEmitter.sharedInstance.dispatch(name: withName, body: body) - } - + + func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, + successfully flag: Bool) { + var finishType = FinishMode.stop.rawValue + switch self.finishMode { + case .loop: + self.player?.currentTime = 0 + self.player?.play() + finishType = FinishMode.loop.rawValue + case .pause: + self.player?.pause() + stopListening() + finishType = FinishMode.pause.rawValue + case .stop: + self.player?.stop() + stopListening() + self.player = nil + finishType = FinishMode.stop.rawValue + } + self.sendEvent(withName: Constants.onDidFinishPlayingAudio, body: [Constants.finishType: finishType, Constants.playerKey: playerKey]) + } + + + public func sendEvent(withName: String, body: Any?) { + guard isComponentMounted else { + return + } + EventEmitter.sharedInstance.dispatch(name: withName, body: body) + } + func startPlayer(_ finishMode: Int?, speed: Float, result: RCTPromiseResolveBlock) { - if(finishMode != nil && finishMode == 0) { - self.finishMode = FinishMode.loop - } else if(finishMode != nil && finishMode == 1) { - self.finishMode = FinishMode.pause - } else { - self.finishMode = FinishMode.stop - } - player?.play() - player?.delegate = self - player?.rate = Float(speed) - timerUpdate() - startListening() - result(player?.isPlaying) - } - - func pausePlayer(result: @escaping RCTPromiseResolveBlock) { - stopListening() - player?.pause() - timerUpdate() - result(true) - } - - func stopPlayer() { - stopListening() - player?.stop() - timerUpdate() - player = nil - timer = nil - } - - func getDuration(_ type: DurationType, _ result: @escaping RCTPromiseResolveBlock) { - if type == .Current { - let ms = (player?.currentTime ?? 0) * 1000 - result(Int(ms)) - } else { - let ms = (player?.duration ?? 0) * 1000 - result(Int(ms)) + if(finishMode != nil && finishMode == 0) { + self.finishMode = FinishMode.loop + } else if(finishMode != nil && finishMode == 1) { + self.finishMode = FinishMode.pause + } else { + self.finishMode = FinishMode.stop + } + player?.play() + player?.delegate = self + player?.rate = Float(speed) + timerUpdate() + startListening() + result(player?.isPlaying) } - } - - func setVolume(_ volume: Double?, _ result: @escaping RCTPromiseResolveBlock) { - player?.volume = Float(volume ?? 1.0) - result(true) - } - - func seekTo(_ time: Double?, _ result: @escaping RCTPromiseResolveBlock) { - if(time != 0 && time != nil) { - player?.currentTime = Double(time! / 1000) - result(true) - } else { - result(false) + + func pausePlayer(result: @escaping RCTPromiseResolveBlock) { + stopListening() + player?.pause() + timerUpdate() + result(true) + } + + func stopPlayer() { + stopListening() + player?.stop() + timerUpdate() + player = nil + timer = nil + } + + func getDuration(_ type: DurationType, _ result: @escaping RCTPromiseResolveBlock) { + if type == .Current { + let ms = (player?.currentTime ?? 0) * 1000 + result(Int(ms)) + } else { + let ms = (player?.duration ?? 0) * 1000 + result(Int(ms)) + } + } + + func setVolume(_ volume: Double?, _ result: @escaping RCTPromiseResolveBlock) { + player?.volume = Float(volume ?? 1.0) + result(true) + } + + func seekTo(_ time: Double?, _ result: @escaping RCTPromiseResolveBlock) { + if(time != 0 && time != nil) { + player?.currentTime = Double(time! / 1000) + result(true) + } else { + result(false) + } } - } - + @objc func timerUpdate() { let ms = (self.player?.currentTime ?? 0) * 1000 self.sendEvent(withName: Constants.onCurrentDuration, body: [ Constants.currentDuration: Int(ms), Constants.playerKey: self.playerKey] as [String : Any]) } func startListening() { - stopListening() + stopListening() DispatchQueue.main.async { [weak self] in - guard let strongSelf = self else {return } + guard let strongSelf = self else {return } strongSelf.timer = Timer.scheduledTimer(timeInterval: TimeInterval((Float(strongSelf.updateFrequency.rawValue) / 1000)), target: strongSelf, selector: #selector(strongSelf.timerUpdate), userInfo: nil, repeats: true) } } @@ -164,9 +164,9 @@ class AudioPlayer: NSObject, AVAudioPlayerDelegate { return false } } - - func stopListening() { - timer?.invalidate() - timer = nil - } + + func stopListening() { + timer?.invalidate() + timer = nil + } } diff --git a/ios/AudioRecorder.swift b/ios/AudioRecorder.swift index d571ca0..f6d98af 100644 --- a/ios/AudioRecorder.swift +++ b/ios/AudioRecorder.swift @@ -10,72 +10,72 @@ import Accelerate import UIKit public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ - var audioRecorder: AVAudioRecorder? - var path: String? - var useLegacyNormalization: Bool = false - var audioUrl: URL? - var recordedDuration: CMTime = CMTime.zero - private var timer: Timer? + var audioRecorder: AVAudioRecorder? + var path: String? + var useLegacyNormalization: Bool = false + var audioUrl: URL? + var recordedDuration: CMTime = CMTime.zero + private var timer: Timer? var updateFrequency = UpdateFrequency.medium - - private func createAudioRecordPath(fileNameFormat: String?) -> URL? { - let format = DateFormatter() - format.dateFormat = fileNameFormat ?? "yyyy-MM-dd-HH-mm-ss-SSS" - let currentFileName = "\(format.string(from: Date()))" + ".m4a" - let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] - let url = documentsDirectory.appendingPathComponent(currentFileName) - return url - } - - func startRecording(_ path: String?, encoder : Int?, updateFrequency: UpdateFrequency, sampleRate : Int?, bitRate : Int?, fileNameFormat: String?, useLegacy: Bool?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - useLegacyNormalization = useLegacy ?? false - self.updateFrequency = updateFrequency - let settings = [ - AVFormatIDKey: getEncoder(encoder ?? 0), - AVSampleRateKey: sampleRate ?? 44100, - AVNumberOfChannelsKey: 1, - AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue, - AVEncoderBitRateKey: bitRate ?? 128000 - ] - let settingsWithBitrate = [ - AVEncoderBitRateKey: bitRate ?? 128000, - AVFormatIDKey: getEncoder(encoder ?? 0), - AVSampleRateKey: sampleRate ?? 44100, - AVNumberOfChannelsKey: 1, - AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue - ] - let options: AVAudioSession.CategoryOptions = [.defaultToSpeaker, .allowBluetooth, .mixWithOthers] - - if (path == nil) { - guard let newPath = self.createAudioRecordPath(fileNameFormat: fileNameFormat) else { - reject(Constants.audioWaveforms, "Failed to initialise file URL", nil) - return - } - audioUrl = newPath - } else { - audioUrl = URL(fileURLWithPath: path!) + private func createAudioRecordPath(fileNameFormat: String?) -> URL? { + let format = DateFormatter() + format.dateFormat = fileNameFormat ?? "yyyy-MM-dd-HH-mm-ss-SSS" + let currentFileName = "\(format.string(from: Date()))" + ".m4a" + let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] + let url = documentsDirectory.appendingPathComponent(currentFileName) + return url } - - do { - try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: options) - try AVAudioSession.sharedInstance().setActive(true) - guard let newPath = audioUrl else { - reject(Constants.audioWaveforms, "Failed to initialise file URL", nil) - return - } - audioRecorder = try AVAudioRecorder(url: newPath, settings: settings as [String : Any]) - audioRecorder?.delegate = self - audioRecorder?.isMeteringEnabled = true - audioRecorder?.record() - startListening() - resolve(true) - } catch let error as NSError { - print(error.localizedDescription) - reject(Constants.audioWaveforms, "Failed to start recording", error) + func startRecording(_ path: String?, encoder : Int?, updateFrequency: UpdateFrequency, sampleRate : Int?, bitRate : Int?, fileNameFormat: String?, useLegacy: Bool?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + useLegacyNormalization = useLegacy ?? false + self.updateFrequency = updateFrequency + let settings = [ + AVFormatIDKey: getEncoder(encoder ?? 0), + AVSampleRateKey: sampleRate ?? 44100, + AVNumberOfChannelsKey: 1, + AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue, + AVEncoderBitRateKey: bitRate ?? 128000 + ] + let settingsWithBitrate = [ + AVEncoderBitRateKey: bitRate ?? 128000, + AVFormatIDKey: getEncoder(encoder ?? 0), + AVSampleRateKey: sampleRate ?? 44100, + AVNumberOfChannelsKey: 1, + AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue + ] + + let options: AVAudioSession.CategoryOptions = [.defaultToSpeaker, .allowBluetooth, .mixWithOthers] + + if (path == nil) { + guard let newPath = self.createAudioRecordPath(fileNameFormat: fileNameFormat) else { + reject(Constants.audioWaveforms, "Failed to initialise file URL", nil) + return + } + audioUrl = newPath + } else { + audioUrl = URL(fileURLWithPath: path!) + } + + + do { + try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, options: options) + try AVAudioSession.sharedInstance().setActive(true) + guard let newPath = audioUrl else { + reject(Constants.audioWaveforms, "Failed to initialise file URL", nil) + return + } + audioRecorder = try AVAudioRecorder(url: newPath, settings: settings as [String : Any]) + audioRecorder?.delegate = self + audioRecorder?.isMeteringEnabled = true + audioRecorder?.record() + startListening() + resolve(true) + } catch let error as NSError { + print(error.localizedDescription) + reject(Constants.audioWaveforms, "Failed to start recording", error) + } } - } @objc func timerUpdate(_ sender:Timer) { if (audioRecorder?.isRecording ?? false) { @@ -84,124 +84,128 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ } func startListening() { - stopListening() + stopListening() DispatchQueue.main.async { [weak self] in - guard let strongSelf = self else {return } + guard let strongSelf = self else {return } strongSelf.timer = Timer.scheduledTimer(timeInterval: TimeInterval((Float(strongSelf.updateFrequency.rawValue) / 1000)), target: strongSelf, selector: #selector(strongSelf.timerUpdate(_:)), userInfo: nil, repeats: true) } } - - func stopListening() { - timer?.invalidate() - timer = nil - } - - public func stopRecording(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { - stopListening() - audioRecorder?.stop() - if(audioUrl != nil) { - let asset = AVURLAsset(url: audioUrl!) - if #available(iOS 15.0, *) { - Task { - do { - recordedDuration = try await asset.load(.duration) - resolve([asset.url.absoluteString,Int(recordedDuration.seconds * 1000).description]) - } catch let err { - debugPrint(err.localizedDescription) - reject(Constants.audioWaveforms, "Failed to stop recording 3", err) - } + + func stopListening() { + timer?.invalidate() + timer = nil + } + + public func stopRecording(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) { + guard let audioUrl = audioUrl, let audioRecorder = audioRecorder else { + reject(Constants.audioWaveforms, "No audio recording is running", nil) + return + } + + guard audioRecorder.isRecording else { + reject(Constants.audioWaveforms, "Recorder is not recording or has already been stopped", nil) + return } - } else { - recordedDuration = asset.duration - resolve([asset.url.absoluteString,Int(recordedDuration.seconds * 1000).description]) - } - } else { - reject(Constants.audioWaveforms, "Failed to stop recording", nil) + + stopListening() + audioRecorder.stop() + self.audioRecorder = nil + + let asset = AVURLAsset(url: audioUrl) + let recordedDuration = asset.duration + let durationMs = Int(CMTimeGetSeconds(recordedDuration) * 1000) + + self.audioUrl = nil + resolve([asset.url.absoluteString, String(durationMs)]) + } + + public func pauseRecording(_ resolve: RCTPromiseResolveBlock) -> Void { + audioRecorder?.pause() + resolve(true) + } + + public func resumeRecording(_ resolve: RCTPromiseResolveBlock) -> Void { + audioRecorder?.record() + resolve(true) } - audioRecorder = nil - } - - public func pauseRecording(_ resolve: RCTPromiseResolveBlock) -> Void { - audioRecorder?.pause() - resolve(true) - } - - public func resumeRecording(_ resolve: RCTPromiseResolveBlock) -> Void { - audioRecorder?.record() - resolve(true) - } func getDecibelLevel() -> Float { audioRecorder?.updateMeters() if(useLegacyNormalization){ - let amp = audioRecorder?.averagePower(forChannel: 0) ?? 0.0 + let amp = audioRecorder?.averagePower(forChannel: 0) ?? 0.0 return amp } else { - let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0 - let linear = pow(10, amp / 20); + let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0 + let linear = pow(10, amp / 20); return linear } } - - public func getDecibel(_ resolve: RCTPromiseResolveBlock) -> Void { - resolve(getDecibelLevel()) - } - - public func checkHasAudioRecorderPermission(_ resolve: RCTPromiseResolveBlock) -> Void{ - var hasPermission = "" - switch AVAudioSession.sharedInstance().recordPermission{ - case .granted: - hasPermission = "granted" - break - case .undetermined: - hasPermission = "undetermined" - case .denied: - hasPermission = "denied" - @unknown default: - hasPermission = "denied" - break + + public func getDecibel(_ resolve: RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { + guard let audioRecorder = self.audioRecorder, audioRecorder.isRecording else { + reject(Constants.audioWaveforms, "No active audio recording", nil) + return + } + let decibelLevel = getDecibelLevel() + resolve(decibelLevel) + + } + + public func checkHasAudioRecorderPermission(_ resolve: RCTPromiseResolveBlock) -> Void{ + var hasPermission = "" + switch AVAudioSession.sharedInstance().recordPermission{ + case .granted: + hasPermission = "granted" + break + case .undetermined: + hasPermission = "undetermined" + case .denied: + hasPermission = "denied" + @unknown default: + hasPermission = "denied" + break + } + resolve(hasPermission) } - resolve(hasPermission) - } - - public func getAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock) -> Void{ - AVAudioSession.sharedInstance().requestRecordPermission() { allowed in - DispatchQueue.main.async { - print("Permission \(allowed)") - resolve(allowed ? "granted" : "denied") - } + + public func getAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock) -> Void{ + AVAudioSession.sharedInstance().requestRecordPermission() { allowed in + DispatchQueue.main.async { + print("Permission \(allowed)") + resolve(allowed ? "granted" : "denied") + } + } } - } - - public func getEncoder(_ enCoder: Int) -> Int { - switch(enCoder) { - case Constants.kAudioFormatMPEG4AAC: - return Int(kAudioFormatMPEG4AAC) - case Constants.kAudioFormatMPEGLayer1: - return Int(kAudioFormatMPEGLayer1) - case Constants.kAudioFormatMPEGLayer2: - return Int(kAudioFormatMPEGLayer2) - case Constants.kAudioFormatMPEGLayer3: - return Int(kAudioFormatMPEGLayer3) - case Constants.kAudioFormatMPEG4AAC_ELD: - return Int(kAudioFormatMPEG4AAC_ELD) - case Constants.kAudioFormatMPEG4AAC_HE: - return Int(kAudioFormatMPEG4AAC_HE) - case Constants.kAudioFormatOpus: - return Int(kAudioFormatOpus) - case Constants.kAudioFormatAMR: - return Int(kAudioFormatAMR) - case Constants.kAudioFormatAMR_WB: - return Int(kAudioFormatAMR_WB) - case Constants.kAudioFormatLinearPCM: - return Int(kAudioFormatLinearPCM) - case Constants.kAudioFormatAppleLossless: - return Int(kAudioFormatAppleLossless) - case Constants.kAudioFormatMPEG4AAC_HE_V2: - return Int(kAudioFormatMPEG4AAC_HE_V2) - default: - return Int(kAudioFormatMPEG4AAC) + + public func getEncoder(_ enCoder: Int) -> Int { + switch(enCoder) { + case Constants.kAudioFormatMPEG4AAC: + return Int(kAudioFormatMPEG4AAC) + case Constants.kAudioFormatMPEGLayer1: + return Int(kAudioFormatMPEGLayer1) + case Constants.kAudioFormatMPEGLayer2: + return Int(kAudioFormatMPEGLayer2) + case Constants.kAudioFormatMPEGLayer3: + return Int(kAudioFormatMPEGLayer3) + case Constants.kAudioFormatMPEG4AAC_ELD: + return Int(kAudioFormatMPEG4AAC_ELD) + case Constants.kAudioFormatMPEG4AAC_HE: + return Int(kAudioFormatMPEG4AAC_HE) + case Constants.kAudioFormatOpus: + return Int(kAudioFormatOpus) + case Constants.kAudioFormatAMR: + return Int(kAudioFormatAMR) + case Constants.kAudioFormatAMR_WB: + return Int(kAudioFormatAMR_WB) + case Constants.kAudioFormatLinearPCM: + return Int(kAudioFormatLinearPCM) + case Constants.kAudioFormatAppleLossless: + return Int(kAudioFormatAppleLossless) + case Constants.kAudioFormatMPEG4AAC_HE_V2: + return Int(kAudioFormatMPEG4AAC_HE_V2) + default: + return Int(kAudioFormatMPEG4AAC) + } } - } } diff --git a/ios/AudioWaveform.swift b/ios/AudioWaveform.swift index 6ccb19e..30ba983 100644 --- a/ios/AudioWaveform.swift +++ b/ios/AudioWaveform.swift @@ -9,258 +9,258 @@ import UIKit @objc(AudioWaveform) class AudioWaveform: RCTEventEmitter { - final var audioRecorder = AudioRecorder() - var audioPlayers = [String: AudioPlayer]() - var extractors = [String: WaveformExtractor]() - - override init() { - super.init() - NotificationCenter.default.addObserver(self, selector: #selector(didReceiveMeteringLevelUpdate), - name: .audioRecorderManagerMeteringLevelDidUpdateNotification, object: nil) - } - - deinit { - audioPlayers.removeAll() - extractors.removeAll() - NotificationCenter.default.removeObserver(self) - } - - @objc private func didReceiveMeteringLevelUpdate(_ notification: Notification) { - let percentage = notification.userInfo?[Constants.waveformData] as? Float - DispatchQueue.main.async { - print("current power: \(String(describing: notification.userInfo)) dB \(percentage)") + final var audioRecorder = AudioRecorder() + var audioPlayers = [String: AudioPlayer]() + var extractors = [String: WaveformExtractor]() + + override init() { + super.init() + NotificationCenter.default.addObserver(self, selector: #selector(didReceiveMeteringLevelUpdate), + name: .audioRecorderManagerMeteringLevelDidUpdateNotification, object: nil) + } + + deinit { + audioPlayers.removeAll() + extractors.removeAll() + NotificationCenter.default.removeObserver(self) + } + + @objc private func didReceiveMeteringLevelUpdate(_ notification: Notification) { + let percentage = notification.userInfo?[Constants.waveformData] as? Float + DispatchQueue.main.async { + print("current power: \(String(describing: notification.userInfo)) dB \(percentage)") + } } - } - + @objc override static func requiresMainQueueSetup() -> Bool { return true } - // we need to override this method and - // return an array of event names that we can listen to - override func supportedEvents() -> [String]! { - return ["AudioPlayerEvent"] - } - - @objc func checkHasAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.checkHasAudioRecorderPermission(resolve) - } + // we need to override this method and + // return an array of event names that we can listen to + override func supportedEvents() -> [String]! { + return ["AudioPlayerEvent"] + } + + @objc func checkHasAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + audioRecorder.checkHasAudioRecorderPermission(resolve) + } @objc func checkHasAudioReadPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { // iOS does not need to ask for permission to read files so this will resolve "granted" every time resolve("granted") } - + @objc func getAudioReadPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { // iOS does not need to ask for permission to read files so this will resolve "granted" every time resolve("granted") } - - @objc func getAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.getAudioRecorderPermission(resolve) - } - - @objc func markPlayerAsUnmounted() { - if audioPlayers.isEmpty { - return + + @objc func getAudioRecorderPermission(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + audioRecorder.getAudioRecorderPermission(resolve) + } + + @objc func markPlayerAsUnmounted() { + if audioPlayers.isEmpty { + return + } + + for (_, player) in audioPlayers { + player.markPlayerAsUnmounted() + } } - for (_, player) in audioPlayers { - player.markPlayerAsUnmounted() + @objc func startRecording(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + audioRecorder.startRecording(args?[Constants.path] as? String, + encoder: args?[Constants.encoder] as? Int, + updateFrequency: UpdateFrequency(rawValue: (args?[Constants.updateFrequency]) as? Double ?? 0) ?? UpdateFrequency.medium, + sampleRate: args?[Constants.sampleRate] as? Int, + bitRate: args?[Constants.bitRate] as? Int, + fileNameFormat: args?[Constants.fileNameFormat] as? String, + useLegacy: args?[Constants.useLegacyNormalization] as? Bool, + resolver: resolve, + rejecter: reject) } - } - - @objc func startRecording(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.startRecording(args?[Constants.path] as? String, - encoder: args?[Constants.encoder] as? Int, - updateFrequency: UpdateFrequency(rawValue: (args?[Constants.updateFrequency]) as? Double ?? 0) ?? UpdateFrequency.medium, - sampleRate: args?[Constants.sampleRate] as? Int, - bitRate: args?[Constants.bitRate] as? Int, - fileNameFormat: args?[Constants.fileNameFormat] as? String, - useLegacy: args?[Constants.useLegacyNormalization] as? Bool, - resolver: resolve, - rejecter: reject) - } - - @objc func stopRecording(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { - audioRecorder.stopRecording(resolve, rejecter: reject) - } - - @objc func pauseRecording(_ resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.pauseRecording(resolve) - } - - @objc func resumeRecording(_ resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.resumeRecording(resolve) - } - - @objc func getDecibel(_ resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - audioRecorder.getDecibel(resolve) - } - - @objc func extractWaveformData(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - let path = args?[Constants.path] as? String - let noOfSamples = args?[Constants.noOfSamples] as? Int - if(key != nil) { - createOrUpdateExtractor(playerKey: key!, path: path, noOfSamples: noOfSamples, resolve: resolve, rejecter: reject) - } else { - reject(Constants.audioWaveforms,"Can not get waveform data",nil) + + @objc func stopRecording(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { + audioRecorder.stopRecording(resolve, rejecter: reject) } - } - - func createOrUpdateExtractor(playerKey: String, path: String?, noOfSamples: Int?, resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) { - if(!(path ?? "").isEmpty) { - do { - let audioUrl = URL.init(string: path!) - if(audioUrl == nil){ - reject(Constants.audioWaveforms, "Failed to initialise Url from provided audio file If path contains `file://` try removing it", nil) - return + + @objc func pauseRecording(_ resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + audioRecorder.pauseRecording(resolve) + } + + @objc func resumeRecording(_ resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + audioRecorder.resumeRecording(resolve) + } + + @objc func getDecibel(_ resolve: RCTPromiseResolveBlock, rejecter reject: @escaping RCTPromiseRejectBlock) -> Void { + audioRecorder.getDecibel(resolve, rejecter: reject) + } + + @objc func extractWaveformData(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + let path = args?[Constants.path] as? String + let noOfSamples = args?[Constants.noOfSamples] as? Int + if(key != nil) { + createOrUpdateExtractor(playerKey: key!, path: path, noOfSamples: noOfSamples, resolve: resolve, rejecter: reject) + } else { + reject(Constants.audioWaveforms,"Can not get waveform data",nil) } - let newExtractor = try WaveformExtractor(url: audioUrl!, channel: self, resolve: resolve, rejecter: reject) - extractors[playerKey] = newExtractor - let data = newExtractor.extractWaveform(samplesPerPixel: noOfSamples, playerKey: playerKey) - newExtractor.cancel() - if(newExtractor.progress == 1.0) { - // Normalize the waveform data - let normalizedData = normalizeWaveformData(data: data!, scale: 0.12) - let waveformData = newExtractor.getChannelMean(data: normalizedData) - resolve([waveformData]) + } + + func createOrUpdateExtractor(playerKey: String, path: String?, noOfSamples: Int?, resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) { + if(!(path ?? "").isEmpty) { + do { + let audioUrl = URL.init(string: path!) + if(audioUrl == nil){ + reject(Constants.audioWaveforms, "Failed to initialise Url from provided audio file If path contains `file://` try removing it", nil) + return + } + let newExtractor = try WaveformExtractor(url: audioUrl!, channel: self, resolve: resolve, rejecter: reject) + extractors[playerKey] = newExtractor + let data = newExtractor.extractWaveform(samplesPerPixel: noOfSamples, playerKey: playerKey) + newExtractor.cancel() + if(newExtractor.progress == 1.0) { + // Normalize the waveform data + let normalizedData = normalizeWaveformData(data: data!, scale: 0.12) + let waveformData = newExtractor.getChannelMean(data: normalizedData) + resolve([waveformData]) + } + } catch let e { + reject(Constants.audioWaveforms, "Failed to decode audio file", e) + } + } else { + reject(Constants.audioWaveforms, "Audio file path can't be empty or null", nil) + } - } catch let e { - reject(Constants.audioWaveforms, "Failed to decode audio file", e) - } - } else { - reject(Constants.audioWaveforms, "Audio file path can't be empty or null", nil) - } - } - - func normalizeWaveformData(data: [[Float]], scale: Float = 0.25, threshold: Float = 0.01) -> [[Float]] { - return data.map { channelData in - let filteredData = channelData.filter { abs($0) >= threshold } - let maxAmplitude = filteredData.max() ?? 1.0 - guard maxAmplitude > 0 else { return channelData } - return channelData.map { (abs($0) < threshold ? 0 : ($0 / maxAmplitude) * scale) } + + func normalizeWaveformData(data: [[Float]], scale: Float = 0.25, threshold: Float = 0.01) -> [[Float]] { + return data.map { channelData in + let filteredData = channelData.filter { abs($0) >= threshold } + let maxAmplitude = filteredData.max() ?? 1.0 + guard maxAmplitude > 0 else { return channelData } + return channelData.map { (abs($0) < threshold ? 0 : ($0 / maxAmplitude) * scale) } + } } - } - - // Plyer - @objc func preparePlayer(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - if(key != nil){ - initPlayer(playerKey: key!) - audioPlayers[key!]?.preparePlayer(args?[Constants.path] as? String, - volume: args?[Constants.volume] as? Double, - updateFrequency: UpdateFrequency(rawValue: (args?[Constants.updateFrequency]) as? Double ?? 0) ?? UpdateFrequency.medium, - time: args?[Constants.progress] as? Double ?? 0, - resolver: resolve, - rejecter: reject) - } else { - reject(Constants.audioWaveforms, "Can not prepare player", nil) + + // Plyer + @objc func preparePlayer(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + if(key != nil){ + initPlayer(playerKey: key!) + audioPlayers[key!]?.preparePlayer(args?[Constants.path] as? String, + volume: args?[Constants.volume] as? Double, + updateFrequency: UpdateFrequency(rawValue: (args?[Constants.updateFrequency]) as? Double ?? 0) ?? UpdateFrequency.medium, + time: args?[Constants.progress] as? Double ?? 0, + resolver: resolve, + rejecter: reject) + } else { + reject(Constants.audioWaveforms, "Can not prepare player", nil) + } } - } - - @objc func startPlayer(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - let finishMode = args?[Constants.finishMode] as? Int - let speed = (args?[Constants.speed] as? NSNumber)?.floatValue ?? 1.0 - - if(key != nil && audioPlayers[key!] != nil){ - audioPlayers[key!]?.startPlayer(finishMode, speed: speed, result:resolve) - } else { - reject(Constants.audioWaveforms, "Can not start player", nil) + + @objc func startPlayer(_ args: NSDictionary?, resolver resolve: RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + let finishMode = args?[Constants.finishMode] as? Int + let speed = (args?[Constants.speed] as? NSNumber)?.floatValue ?? 1.0 + + if(key != nil && audioPlayers[key!] != nil){ + audioPlayers[key!]?.startPlayer(finishMode, speed: speed, result:resolve) + } else { + reject(Constants.audioWaveforms, "Can not start player", nil) + } } - } - - @objc func pausePlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - if(key != nil && audioPlayers[key!] != nil){ - audioPlayers[key!]?.pausePlayer(result: resolve) - } else { - reject(Constants.audioWaveforms, "Can not pause player, Player key is null", nil) + + @objc func pausePlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + if(key != nil && audioPlayers[key!] != nil){ + audioPlayers[key!]?.pausePlayer(result: resolve) + } else { + reject(Constants.audioWaveforms, "Can not pause player, Player key is null", nil) + } } - } - - @objc func stopPlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - if(key != nil){ - audioPlayers[key!]?.stopPlayer() - audioPlayers[key!] = nil // Release the player after stopping it - resolve(true) - } else { - reject(Constants.audioWaveforms, "Can not stop player, Player key is null", nil) + + @objc func stopPlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + if(key != nil){ + audioPlayers[key!]?.stopPlayer() + audioPlayers[key!] = nil // Release the player after stopping it + resolve(true) + } else { + reject(Constants.audioWaveforms, "Can not stop player, Player key is null", nil) + } } - } - - @objc func seekToPlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - if(key != nil && audioPlayers[key!] != nil){ - audioPlayers[key!]?.seekTo(args?[Constants.progress] as? Double,resolve) - } else { - reject(Constants.audioWaveforms, "Can not seek to postion, Player key is null", nil) + + @objc func seekToPlayer(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + if(key != nil && audioPlayers[key!] != nil){ + audioPlayers[key!]?.seekTo(args?[Constants.progress] as? Double,resolve) + } else { + reject(Constants.audioWaveforms, "Can not seek to postion, Player key is null", nil) + } } - } - - @objc func setVolume(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let key = args?[Constants.playerKey] as? String - if(key != nil && audioPlayers[key!] != nil){ - audioPlayers[key!]?.setVolume(args?[Constants.volume] as? Double,resolve) - } else { - reject(Constants.audioWaveforms, "Can not set volume, Player key is null", nil) + + @objc func setVolume(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let key = args?[Constants.playerKey] as? String + if(key != nil && audioPlayers[key!] != nil){ + audioPlayers[key!]?.setVolume(args?[Constants.volume] as? Double,resolve) + } else { + reject(Constants.audioWaveforms, "Can not set volume, Player key is null", nil) + } } - } - - @objc func getDuration(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - let type = args?[Constants.durationType] as? Int - let key = args?[Constants.playerKey] as? String - if(key != nil && audioPlayers[key!] != nil){ - do{ - if(type == 0) { - try audioPlayers[key!]?.getDuration(DurationType.Current,resolve) + + @objc func getDuration(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + let type = args?[Constants.durationType] as? Int + let key = args?[Constants.playerKey] as? String + if(key != nil && audioPlayers[key!] != nil){ + do{ + if(type == 0) { + try audioPlayers[key!]?.getDuration(DurationType.Current,resolve) + } else { + try audioPlayers[key!]?.getDuration(DurationType.Max,resolve) + } + } catch let e { + reject(Constants.audioWaveforms, "Failed to get duration", e) + } } else { - try audioPlayers[key!]?.getDuration(DurationType.Max,resolve) + reject(Constants.audioWaveforms, "Can not get duration", nil) } - } catch let e { - reject(Constants.audioWaveforms, "Failed to get duration", e) - } - } else { - reject(Constants.audioWaveforms, "Can not get duration", nil) } - } - - @objc func stopAllPlayers(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - for (playerKey,_) in audioPlayers{ - audioPlayers[playerKey]?.stopPlayer() + + @objc func stopAllPlayers(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + for (playerKey,_) in audioPlayers{ + audioPlayers[playerKey]?.stopPlayer() + } + audioPlayers.removeAll() + resolve(true) } - audioPlayers.removeAll() - resolve(true) - } - - @objc func stopAllWaveFormExtractors(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { - for (extractorKey,_) in extractors{ - extractors[extractorKey]?.cancel() + + @objc func stopAllWaveFormExtractors(_ resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) -> Void { + for (extractorKey,_) in extractors{ + extractors[extractorKey]?.cancel() + } + extractors.removeAll() + resolve(true) } - extractors.removeAll() - resolve(true) - } - - - func getUpdateFrequency(freq: Int?) -> Int{ - if(freq == 2){ - return 50 - } else if(freq == 1){ - return 100 + + + func getUpdateFrequency(freq: Int?) -> Int{ + if(freq == 2){ + return 50 + } else if(freq == 1){ + return 100 + } + return 200 } - return 200 - } - - func initPlayer(playerKey: String) { - if audioPlayers[playerKey] == nil { - let newPlayer = AudioPlayer(plugin: self,playerKey: playerKey, channel: "Waveforms" as AnyObject) - audioPlayers[playerKey] = newPlayer + + func initPlayer(playerKey: String) { + if audioPlayers[playerKey] == nil { + let newPlayer = AudioPlayer(plugin: self,playerKey: playerKey, channel: "Waveforms" as AnyObject) + audioPlayers[playerKey] = newPlayer + } } - } @objc func setPlaybackSpeed(_ args: NSDictionary?, resolver resolve: @escaping RCTPromiseResolveBlock, @@ -269,10 +269,10 @@ class AudioWaveform: RCTEventEmitter { let speed = (args?[Constants.speed] as? NSNumber)?.floatValue ?? 1.0 if(key != nil){ - let status = audioPlayers[key!]?.setPlaybackSpeed(speed) - resolve(status) + let status = audioPlayers[key!]?.setPlaybackSpeed(speed) + resolve(status) } else { - reject(Constants.audioWaveforms, "Can not pause player, Player key is null", nil) + reject(Constants.audioWaveforms, "Can not pause player, Player key is null", nil) } } } diff --git a/ios/AudioWaveformsEventEmitter.swift b/ios/AudioWaveformsEventEmitter.swift index 806883e..bcf92b9 100644 --- a/ios/AudioWaveformsEventEmitter.swift +++ b/ios/AudioWaveformsEventEmitter.swift @@ -2,22 +2,22 @@ import Foundation @objc(AudioWaveformsEventEmitter) open class AudioWaveformsEventEmitter: RCTEventEmitter { - - override init() { - super.init() - EventEmitter.sharedInstance.registerEventEmitter(eventEmitter: self) - } - + + override init() { + super.init() + EventEmitter.sharedInstance.registerEventEmitter(eventEmitter: self) + } + @objc public override static func requiresMainQueueSetup() -> Bool { return true } - - /// Base overide for RCTEventEmitter. - /// - /// - Returns: all supported events - @objc open override func supportedEvents() -> [String] { - return EventEmitter.sharedInstance.allEvents - } - + + /// Base overide for RCTEventEmitter. + /// + /// - Returns: all supported events + @objc open override func supportedEvents() -> [String] { + return EventEmitter.sharedInstance.allEvents + } + } diff --git a/ios/EventEmitter.swift b/ios/EventEmitter.swift index 5d0d289..cab0bf2 100644 --- a/ios/EventEmitter.swift +++ b/ios/EventEmitter.swift @@ -1,29 +1,29 @@ class EventEmitter { - - /// Shared Instance. - public static var sharedInstance = EventEmitter() - - // ReactNativeEventEmitter is instantiated by React Native with the bridge. - private static var eventEmitter: AudioWaveformsEventEmitter! - - private init() {} - - // When React Native instantiates the emitter it is registered here. - func registerEventEmitter(eventEmitter: AudioWaveformsEventEmitter) { - EventEmitter.eventEmitter = eventEmitter - } - - func dispatch(name: String, body: Any?) { - EventEmitter.eventEmitter.sendEvent(withName: name, body: body) - } - - /// All Events which must be support by React Native. - lazy var allEvents: [String] = { - var allEventNames: [String] = ["onDidFinishPlayingAudio", "onCurrentDuration", "onCurrentExtractedWaveformData", "onCurrentRecordingWaveformData"] - // Append all events here + /// Shared Instance. + public static var sharedInstance = EventEmitter() + + // ReactNativeEventEmitter is instantiated by React Native with the bridge. + private static var eventEmitter: AudioWaveformsEventEmitter! + + private init() {} + + // When React Native instantiates the emitter it is registered here. + func registerEventEmitter(eventEmitter: AudioWaveformsEventEmitter) { + EventEmitter.eventEmitter = eventEmitter + } + + func dispatch(name: String, body: Any?) { + EventEmitter.eventEmitter.sendEvent(withName: name, body: body) + } + + /// All Events which must be support by React Native. + lazy var allEvents: [String] = { + var allEventNames: [String] = ["onDidFinishPlayingAudio", "onCurrentDuration", "onCurrentExtractedWaveformData", "onCurrentRecordingWaveformData"] + + // Append all events here + + return allEventNames + }() - return allEventNames - }() - } diff --git a/ios/Utils.swift b/ios/Utils.swift index 9a10019..71d29d0 100644 --- a/ios/Utils.swift +++ b/ios/Utils.swift @@ -8,91 +8,91 @@ import Foundation enum DurationType { - case Current - case Max + case Current + case Max } struct Constants { - static let methodChannelName = "simform_audio_waveforms_plugin/methods" - static let audioWaveforms = "AudioWaveforms" - static let startRecording = "startRecording" - static let pauseRecording = "pauseRecording" - static let stopRecording = "stopRecording" - static let getDecibel = "getDecibel" - static let checkPermission = "checkPermission" - static let getPermission = "getPermission" - static let path = "path" - static let encoder = "encoder" - static let sampleRate = "sampleRate" - static let bitRate = "bitRate" - static let fileNameFormat = "fileNameFormat" - static let resumeRecording = "resumeRecording" - - static let kAudioFormatMPEG4AAC = 1 - static let kAudioFormatMPEGLayer1 = 2 - static let kAudioFormatMPEGLayer2 = 3 - static let kAudioFormatMPEGLayer3 = 4 - static let kAudioFormatMPEG4AAC_ELD = 5 - static let kAudioFormatMPEG4AAC_HE = 6 - static let kAudioFormatOpus = 7 - static let kAudioFormatAMR = 8 - static let kAudioFormatAMR_WB = 9 - static let kAudioFormatLinearPCM = 10 - static let kAudioFormatAppleLossless = 11 - static let kAudioFormatMPEG4AAC_HE_V2 = 12 - - static let readAudioFile = "readAudioFile" - static let durationEventChannel = "durationEventChannel" - static let startPlayer = "startPlayer" - static let stopPlayer = "stopPlayer" - static let pausePlayer = "pausePlayer" - static let seekTo = "seekTo" - static let progress = "progress" - static let setVolume = "setVolume" - static let volume = "volume" - static let getDuration = "getDuration" - static let durationType = "durationType" - static let preparePlayer = "preparePlayer" - static let onCurrentDuration = "onCurrentDuration" - static let currentDuration = "currentDuration" + static let methodChannelName = "simform_audio_waveforms_plugin/methods" + static let audioWaveforms = "AudioWaveforms" + static let startRecording = "startRecording" + static let pauseRecording = "pauseRecording" + static let stopRecording = "stopRecording" + static let getDecibel = "getDecibel" + static let checkPermission = "checkPermission" + static let getPermission = "getPermission" + static let path = "path" + static let encoder = "encoder" + static let sampleRate = "sampleRate" + static let bitRate = "bitRate" + static let fileNameFormat = "fileNameFormat" + static let resumeRecording = "resumeRecording" + + static let kAudioFormatMPEG4AAC = 1 + static let kAudioFormatMPEGLayer1 = 2 + static let kAudioFormatMPEGLayer2 = 3 + static let kAudioFormatMPEGLayer3 = 4 + static let kAudioFormatMPEG4AAC_ELD = 5 + static let kAudioFormatMPEG4AAC_HE = 6 + static let kAudioFormatOpus = 7 + static let kAudioFormatAMR = 8 + static let kAudioFormatAMR_WB = 9 + static let kAudioFormatLinearPCM = 10 + static let kAudioFormatAppleLossless = 11 + static let kAudioFormatMPEG4AAC_HE_V2 = 12 + + static let readAudioFile = "readAudioFile" + static let durationEventChannel = "durationEventChannel" + static let startPlayer = "startPlayer" + static let stopPlayer = "stopPlayer" + static let pausePlayer = "pausePlayer" + static let seekTo = "seekTo" + static let progress = "progress" + static let setVolume = "setVolume" + static let volume = "volume" + static let getDuration = "getDuration" + static let durationType = "durationType" + static let preparePlayer = "preparePlayer" + static let onCurrentDuration = "onCurrentDuration" + static let currentDuration = "currentDuration" static let currentDecibel = "currentDecibel" - static let playerKey = "playerKey" - static let stopAllPlayers = "stopAllPlayers" - static let stopAllWaveFormExtractors = "stopAllWaveFormExtractors" - static let onDidFinishPlayingAudio = "onDidFinishPlayingAudio" - static let finishMode = "finishMode" - static let speed = "speed" - static let finishType = "finishType" - static let extractWaveformData = "extractWaveformData" - static let noOfSamples = "noOfSamples" - static let onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData" + static let playerKey = "playerKey" + static let stopAllPlayers = "stopAllPlayers" + static let stopAllWaveFormExtractors = "stopAllWaveFormExtractors" + static let onDidFinishPlayingAudio = "onDidFinishPlayingAudio" + static let finishMode = "finishMode" + static let speed = "speed" + static let finishType = "finishType" + static let extractWaveformData = "extractWaveformData" + static let noOfSamples = "noOfSamples" + static let onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData" static let onCurrentRecordingWaveformData = "onCurrentRecordingWaveformData" - static let waveformData = "waveformData" - static let onExtractionProgressUpdate = "onExtractionProgressUpdate" - static let useLegacyNormalization = "useLegacyNormalization" - static let updateFrequency = "updateFrequency" - static let onGetAudioBuffers = "onGetAudioBuffers" + static let waveformData = "waveformData" + static let onExtractionProgressUpdate = "onExtractionProgressUpdate" + static let useLegacyNormalization = "useLegacyNormalization" + static let updateFrequency = "updateFrequency" + static let onGetAudioBuffers = "onGetAudioBuffers" } enum FinishMode : Int{ - case loop = 0 - case pause = 1 - case stop = 2 + case loop = 0 + case pause = 1 + case stop = 2 } //Note: If you are making change here, please make sure to make change in Android and React Native side as well other wise there will be mismatch in value //Use same values in Android and React native side as well enum UpdateFrequency : Double { case high = 250.0 - case medium = 500.0 - case low = 1000.0 + case medium = 500.0 + case low = 1000.0 } /// Creates an 2D array of floats public typealias FloatChannelData = [[Float]] /// Extension to fill array with zeros public extension RangeReplaceableCollection where Iterator.Element: ExpressibleByIntegerLiteral { - init(zeros count: Int) { - self.init(repeating: 0, count: count) - } + init(zeros count: Int) { + self.init(repeating: 0, count: count) + } } diff --git a/ios/WaveformExtractor.swift b/ios/WaveformExtractor.swift index 71b487e..1d12ce4 100644 --- a/ios/WaveformExtractor.swift +++ b/ios/WaveformExtractor.swift @@ -9,163 +9,163 @@ import Accelerate import AVFoundation extension Notification.Name { - static let audioRecorderManagerMeteringLevelDidUpdateNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidUpdateNotification") - static let audioRecorderManagerMeteringLevelDidFinishNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFinishNotification") - static let audioRecorderManagerMeteringLevelDidFailNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFailNotification") + static let audioRecorderManagerMeteringLevelDidUpdateNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidUpdateNotification") + static let audioRecorderManagerMeteringLevelDidFinishNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFinishNotification") + static let audioRecorderManagerMeteringLevelDidFailNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFailNotification") } public class WaveformExtractor { - public private(set) var audioFile: AVAudioFile? - private var result: RCTPromiseResolveBlock - var flutterChannel: AudioWaveform - private var waveformData = Array() - var progress: Float = 0.0 - var channelCount: Int = 1 - private var currentProgress: Float = 0.0 - private let abortWaveformDataQueue = DispatchQueue(label: "WaveformExtractor",attributes: .concurrent) - - private var _abortGetWaveformData: Bool = false - - public var abortGetWaveformData: Bool { - get { _abortGetWaveformData } - set { - _abortGetWaveformData = newValue - } -} - init(url: URL, channel: AudioWaveform, resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) throws { - audioFile = try AVAudioFile(forReading: url) - result = resolve - self.flutterChannel = channel - } - - deinit { - audioFile = nil - } - - public func extractWaveform(samplesPerPixel: Int?, - offset: Int? = 0, - length: UInt? = nil, playerKey: String) -> FloatChannelData? - { - guard let audioFile = audioFile else { return nil } - - /// prevent division by zero, + minimum resolution - let samplesPerPixel = max(1, samplesPerPixel ?? 100) - - let currentFrame = audioFile.framePosition - - let totalFrameCount = AVAudioFrameCount(audioFile.length) - var framesPerBuffer: AVAudioFrameCount = totalFrameCount / AVAudioFrameCount(samplesPerPixel) + public private(set) var audioFile: AVAudioFile? + private var result: RCTPromiseResolveBlock + var flutterChannel: AudioWaveform + private var waveformData = Array() + var progress: Float = 0.0 + var channelCount: Int = 1 + private var currentProgress: Float = 0.0 + private let abortWaveformDataQueue = DispatchQueue(label: "WaveformExtractor",attributes: .concurrent) - guard let rmsBuffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, - frameCapacity: AVAudioFrameCount(framesPerBuffer)) else { return nil } + private var _abortGetWaveformData: Bool = false - channelCount = Int(audioFile.processingFormat.channelCount) - var data = Array(repeating: [Float](zeros: samplesPerPixel), count: channelCount) - - var start: Int - if let offset = offset, offset >= 0 { - start = offset - } else { - start = Int(currentFrame / Int64(framesPerBuffer)) - if let offset = offset, offset < 0 { - start += offset - } - - if start < 0 { - start = 0 - } + public var abortGetWaveformData: Bool { + get { _abortGetWaveformData } + set { + _abortGetWaveformData = newValue + } } - var startFrame: AVAudioFramePosition = offset == nil ? currentFrame : Int64(start * Int(framesPerBuffer)) - - var end = samplesPerPixel - if let length = length { - end = start + Int(length) + init(url: URL, channel: AudioWaveform, resolve: @escaping RCTPromiseResolveBlock, rejecter reject: RCTPromiseRejectBlock) throws { + audioFile = try AVAudioFile(forReading: url) + result = resolve + self.flutterChannel = channel } - if end > samplesPerPixel { - end = samplesPerPixel - } - if start > end { - let resultsDict = ["code": Constants.audioWaveforms, "message": "offset is larger than total length. Please select less number of samples"] as [String : Any]; - result([resultsDict]) - - return nil + deinit { + audioFile = nil } - for i in start ..< end { - - if abortGetWaveformData { - audioFile.framePosition = currentFrame - abortGetWaveformData = false - return nil - } - - do { - audioFile.framePosition = startFrame - /// Read portion of the buffer - try audioFile.read(into: rmsBuffer, frameCount: framesPerBuffer) + public func extractWaveform(samplesPerPixel: Int?, + offset: Int? = 0, + length: UInt? = nil, playerKey: String) -> FloatChannelData? + { + guard let audioFile = audioFile else { return nil } + + /// prevent division by zero, + minimum resolution + let samplesPerPixel = max(1, samplesPerPixel ?? 100) - } catch let err as NSError { - let resultsDict = ["code": Constants.audioWaveforms, "message": "Couldn't read into buffer. \(err)"] as [String : Any]; - result([resultsDict]) + let currentFrame = audioFile.framePosition - return nil - } - - guard let floatData = rmsBuffer.floatChannelData else { return nil } - /// Calculating RMS(Root mean square) - for channel in 0 ..< channelCount { - var rms: Float = 0.0 - vDSP_rmsqv(floatData[channel], 1, &rms, vDSP_Length(rmsBuffer.frameLength)) - data[channel][i] = rms + let totalFrameCount = AVAudioFrameCount(audioFile.length) + var framesPerBuffer: AVAudioFrameCount = totalFrameCount / AVAudioFrameCount(samplesPerPixel) - } - - /// Update progress - currentProgress += 1 - progress = currentProgress / Float(samplesPerPixel) - - /// Send to RN channel - self.sendEvent(withName: Constants.onCurrentExtractedWaveformData, body:[Constants.waveformData: getChannelMean(data: data) as Any, Constants.progress: progress, Constants.playerKey: playerKey]) - - startFrame += AVAudioFramePosition(framesPerBuffer) - - if startFrame + AVAudioFramePosition(framesPerBuffer) > totalFrameCount { - framesPerBuffer = totalFrameCount - AVAudioFrameCount(startFrame) - if framesPerBuffer <= 0 { break } - } + guard let rmsBuffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, + frameCapacity: AVAudioFrameCount(framesPerBuffer)) else { return nil } + + channelCount = Int(audioFile.processingFormat.channelCount) + var data = Array(repeating: [Float](zeros: samplesPerPixel), count: channelCount) + + var start: Int + if let offset = offset, offset >= 0 { + start = offset + } else { + start = Int(currentFrame / Int64(framesPerBuffer)) + if let offset = offset, offset < 0 { + start += offset + } + + if start < 0 { + start = 0 + } + } + var startFrame: AVAudioFramePosition = offset == nil ? currentFrame : Int64(start * Int(framesPerBuffer)) + + var end = samplesPerPixel + if let length = length { + end = start + Int(length) + } + + if end > samplesPerPixel { + end = samplesPerPixel + } + if start > end { + let resultsDict = ["code": Constants.audioWaveforms, "message": "offset is larger than total length. Please select less number of samples"] as [String : Any]; + result([resultsDict]) + + return nil + } + + for i in start ..< end { + + if abortGetWaveformData { + audioFile.framePosition = currentFrame + abortGetWaveformData = false + return nil + } + + do { + audioFile.framePosition = startFrame + /// Read portion of the buffer + try audioFile.read(into: rmsBuffer, frameCount: framesPerBuffer) + + } catch let err as NSError { + let resultsDict = ["code": Constants.audioWaveforms, "message": "Couldn't read into buffer. \(err)"] as [String : Any]; + result([resultsDict]) + + return nil + } + + guard let floatData = rmsBuffer.floatChannelData else { return nil } + /// Calculating RMS(Root mean square) + for channel in 0 ..< channelCount { + var rms: Float = 0.0 + vDSP_rmsqv(floatData[channel], 1, &rms, vDSP_Length(rmsBuffer.frameLength)) + data[channel][i] = rms + + } + + /// Update progress + currentProgress += 1 + progress = currentProgress / Float(samplesPerPixel) + + /// Send to RN channel + self.sendEvent(withName: Constants.onCurrentExtractedWaveformData, body:[Constants.waveformData: getChannelMean(data: data) as Any, Constants.progress: progress, Constants.playerKey: playerKey]) + + startFrame += AVAudioFramePosition(framesPerBuffer) + + if startFrame + AVAudioFramePosition(framesPerBuffer) > totalFrameCount { + framesPerBuffer = totalFrameCount - AVAudioFrameCount(startFrame) + if framesPerBuffer <= 0 { break } + } + } + + audioFile.framePosition = currentFrame + + return data } - audioFile.framePosition = currentFrame + func sendEvent(withName: String, body: Any?) { + EventEmitter.sharedInstance.dispatch(name: withName, body: body) + } - return data - } - - func sendEvent(withName: String, body: Any?) { - EventEmitter.sharedInstance.dispatch(name: withName, body: body) - } - - func getChannelMean(data: FloatChannelData) -> [Float] { - waveformData.removeAll() - if(channelCount == 2 && data[0].isEmpty == false && data[1].isEmpty == false) { - for (ele1, ele2) in zip(data[0], data[1]) { - waveformData.append((ele1 + ele2) / 2) - } - } else if(data[0].isEmpty == false) { - waveformData = data[0] + func getChannelMean(data: FloatChannelData) -> [Float] { + waveformData.removeAll() + if(channelCount == 2 && data[0].isEmpty == false && data[1].isEmpty == false) { + for (ele1, ele2) in zip(data[0], data[1]) { + waveformData.append((ele1 + ele2) / 2) + } + } else if(data[0].isEmpty == false) { + waveformData = data[0] + } + else if (data[1].isEmpty == false) { + waveformData = data[1] + } else { + let resultsDict = ["code": Constants.audioWaveforms, "message": "Can not get waveform mean. Both audio channels are null"] as [String : Any]; + result([resultsDict]) + } + return waveformData } - else if (data[1].isEmpty == false) { - waveformData = data[1] - } else { - let resultsDict = ["code": Constants.audioWaveforms, "message": "Can not get waveform mean. Both audio channels are null"] as [String : Any]; - result([resultsDict]) + + public func cancel() { + abortGetWaveformData = true } - return waveformData - } - - public func cancel() { - abortGetWaveformData = true - } } diff --git a/src/components/Waveform/Waveform.tsx b/src/components/Waveform/Waveform.tsx index cef932b..dc563a8 100644 --- a/src/components/Waveform/Waveform.tsx +++ b/src/components/Waveform/Waveform.tsx @@ -23,7 +23,6 @@ import { PermissionStatus, playbackSpeedThreshold, PlayerState, - RecorderState, UpdateFrequency, } from '../../constants'; import { @@ -80,7 +79,6 @@ export const Waveform = forwardRef((props, ref) => { const [currentProgress, setCurrentProgress] = useState(0); const [panMoving, setPanMoving] = useState(false); const [playerState, setPlayerState] = useState(PlayerState.stopped); - const [recorderState, setRecorderState] = useState(RecorderState.stopped); const [isWaveformExtracted, setWaveformExtracted] = useState(false); const audioSpeed: number = playbackSpeed > playbackSpeedThreshold ? 1.0 : playbackSpeed; @@ -95,13 +93,18 @@ export const Waveform = forwardRef((props, ref) => { pausePlayer, onCurrentDuration, onDidFinishPlayingAudio, - onCurrentRecordingWaveformData, setPlaybackSpeed, markPlayerAsUnmounted, } = useAudioPlayer(); - const { startRecording, stopRecording, pauseRecording, resumeRecording } = - useAudioRecorder(); + const { + startRecording, + stopRecording, + pauseRecording, + resumeRecording, + onCurrentRecordingWaveformData, + recorderState, + } = useAudioRecorder(); const { checkHasAudioRecorderPermission } = useAudioPermission(); @@ -316,13 +319,7 @@ export const Waveform = forwardRef((props, ref) => { const hasPermission = await checkHasAudioRecorderPermission(); if (hasPermission === PermissionStatus.granted) { - const start = await startRecording(args); - if (!isNil(start) && start) { - setRecorderState(RecorderState.recording); - return Promise.resolve(true); - } else { - return Promise.reject(new Error('error in start recording action')); - } + return await startRecording(args); } else { return Promise.reject( new Error( @@ -344,19 +341,9 @@ export const Waveform = forwardRef((props, ref) => { if (mode === 'live') { try { const data = await stopRecording(); - if (!isNil(data) && !isEmpty(data)) { - setWaveform([]); - const pathData = head(data); - if (!isNil(pathData)) { - setRecorderState(RecorderState.stopped); - return Promise.resolve(pathData); - } else { - return Promise.reject( - new Error( - 'error in stopping recording. can not get path of recording' - ) - ); - } + setWaveform([]); + const pathData = head(data); + if (!isNil(pathData)) { } else { return Promise.reject( new Error( @@ -364,6 +351,7 @@ export const Waveform = forwardRef((props, ref) => { ) ); } + return Promise.resolve(pathData); } catch (err) { return Promise.reject(err); } @@ -377,13 +365,7 @@ export const Waveform = forwardRef((props, ref) => { const pauseRecordingAction = async () => { if (mode === 'live') { try { - const pause = await pauseRecording(); - if (!isNil(pause) && pause) { - setRecorderState(RecorderState.paused); - return Promise.resolve(pause); - } else { - return Promise.reject(new Error('Error in pausing recording audio')); - } + return await pauseRecording(); } catch (err) { return Promise.reject(err); } @@ -399,13 +381,7 @@ export const Waveform = forwardRef((props, ref) => { try { const hasPermission = await checkHasAudioRecorderPermission(); if (hasPermission === PermissionStatus.granted) { - const resume = await resumeRecording(); - if (!isNil(resume)) { - setRecorderState(RecorderState.recording); - return Promise.resolve(resume); - } else { - return Promise.reject(new Error('Error in resume recording')); - } + return await resumeRecording(); } else { return Promise.reject( new Error( diff --git a/src/hooks/useAudioPlayer.tsx b/src/hooks/useAudioPlayer.tsx index c231c65..cfe7b00 100644 --- a/src/hooks/useAudioPlayer.tsx +++ b/src/hooks/useAudioPlayer.tsx @@ -7,7 +7,6 @@ import { type IGetDuration, type IOnCurrentDurationChange, type IOnCurrentExtractedWaveForm, - type IOnCurrentRecordingWaveForm, type IPausePlayer, type IPreparePlayer, type ISeekPlayer, @@ -71,14 +70,6 @@ export const useAudioPlayer = () => { result => callback(result) ); - const onCurrentRecordingWaveformData = ( - callback: (result: IOnCurrentRecordingWaveForm) => void - ) => - audioPlayerEmitter.addListener( - NativeEvents.onCurrentRecordingWaveformData, - result => callback(result) - ); - const setPlaybackSpeed = (args: ISetPlaybackSpeed) => AudioWaveform.setPlaybackSpeed(args); @@ -99,7 +90,6 @@ export const useAudioPlayer = () => { onCurrentDuration, onCurrentExtractedWaveformData, getDuration, - onCurrentRecordingWaveformData, setPlaybackSpeed, markPlayerAsUnmounted, stopAllWaveFormExtractors, diff --git a/src/hooks/useAudioRecorder.tsx b/src/hooks/useAudioRecorder.tsx index 092414f..c4b0181 100644 --- a/src/hooks/useAudioRecorder.tsx +++ b/src/hooks/useAudioRecorder.tsx @@ -1,23 +1,76 @@ +import { isEmpty, isNil } from 'lodash'; +import { useState } from 'react'; +import { NativeEventEmitter, NativeModules } from 'react-native'; import { AudioWaveform } from '../AudioWaveform'; -import type { IStartRecording } from '../types'; +import { NativeEvents, RecorderState } from '../constants'; +import type { IOnCurrentRecordingWaveForm, IStartRecording } from '../types'; export const useAudioRecorder = () => { - const startRecording = (args?: Partial) => - AudioWaveform.startRecording(args); + const [recorderState, setRecorderState] = useState(RecorderState.stopped); - const stopRecording = () => AudioWaveform.stopRecording(); + const audioPlayerEmitter = new NativeEventEmitter( + NativeModules.AudioWaveformsEventEmitter + ); - const pauseRecording = () => AudioWaveform.pauseRecording(); + const startRecording = async (args?: Partial) => { + const start = await AudioWaveform.startRecording(args); + if (!isNil(start) && start) { + setRecorderState(RecorderState.recording); + return Promise.resolve(true); + } else { + return Promise.reject(new Error('error in start recording action')); + } + }; + + const stopRecording = async () => { + const data = await AudioWaveform.stopRecording(); + if (!isNil(data) && !isEmpty(data)) { + setRecorderState(RecorderState.stopped); + return Promise.resolve(data); + } else { + return Promise.reject( + new Error('error in stopping recording. can not get path of recording') + ); + } + }; + + const pauseRecording = async () => { + const pause = await AudioWaveform.pauseRecording(); + if (!isNil(pause) && pause) { + setRecorderState(RecorderState.paused); + return Promise.resolve(pause); + } else { + return Promise.reject(new Error('Error in pausing recording audio')); + } + }; + + const resumeRecording = async () => { + const resume = await AudioWaveform.resumeRecording(); + if (!isNil(resume)) { + setRecorderState(RecorderState.recording); + return Promise.resolve(resume); + } else { + return Promise.reject(new Error('Error in resume recording')); + } + }; - const resumeRecording = () => AudioWaveform.resumeRecording(); + const getDecibel = async () => await AudioWaveform.getDecibel(); - const getDecibel = () => AudioWaveform.getDecibel(); + const onCurrentRecordingWaveformData = ( + callback: (result: IOnCurrentRecordingWaveForm) => void + ) => + audioPlayerEmitter.addListener( + NativeEvents.onCurrentRecordingWaveformData, + result => callback(result) + ); return { + recorderState, getDecibel, pauseRecording, resumeRecording, startRecording, stopRecording, + onCurrentRecordingWaveformData, }; }; diff --git a/src/index.ts b/src/index.ts index d056c9c..0626586 100644 --- a/src/index.ts +++ b/src/index.ts @@ -10,4 +10,4 @@ export { RecorderState, UpdateFrequency, } from './constants'; -export { useAudioPermission, useAudioPlayer } from './hooks'; +export { useAudioPermission, useAudioPlayer, useAudioRecorder } from './hooks';