Skip to content

Commit 8f97a05

Browse files
✨ Add legacy normalization and updated minor stopAllPlayer behaviour
1 parent 58bfa4f commit 8f97a05

File tree

14 files changed

+114
-64
lines changed

14 files changed

+114
-64
lines changed

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,8 @@
1+
## 1.0.2 (Unreleased)
2+
3+
- Now, calling `stopAllPlayers` is not mandatory for disposing streams and it will also not dispose controller. With last remaining player they will be disposed (Streams can be re-initialised by creating a new PlayerController).
4+
- Added legacy normalization with this fixed [#144](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/144).
5+
16
## 1.0.1
27

38
- Added `onCompletion` stream to get event when audio is finished playing.

android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,26 @@ import io.flutter.plugin.common.MethodChannel
1212
import io.flutter.plugin.common.PluginRegistry
1313
import java.io.IOException
1414
import java.lang.IllegalStateException
15+
import kotlin.math.log10
1516

1617
private const val LOG_TAG = "AudioWaveforms"
1718
private const val RECORD_AUDIO_REQUEST_CODE = 1001
1819

1920
class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {
2021
private var permissions = arrayOf(Manifest.permission.RECORD_AUDIO)
22+
private var useLegacyNormalization = false
23+
2124
fun getDecibel(result: MethodChannel.Result, recorder: MediaRecorder?) {
22-
result.success(recorder?.maxAmplitude?.toDouble() ?: 0.0)
25+
if (useLegacyNormalization) {
26+
val db = 20 * log10(((recorder?.maxAmplitude?.toDouble() ?: (0.0 / 32768.0))))
27+
if (db == Double.NEGATIVE_INFINITY) {
28+
Log.d(LOG_TAG, "Microphone might be turned off")
29+
} else {
30+
result.success(db)
31+
}
32+
} else {
33+
result.success(recorder?.maxAmplitude?.toDouble() ?: 0.0)
34+
}
2335
}
2436

2537
fun initRecorder(
@@ -62,8 +74,9 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {
6274
}
6375
}
6476

65-
fun startRecorder(result: MethodChannel.Result, recorder: MediaRecorder?) {
77+
fun startRecorder(result: MethodChannel.Result, recorder: MediaRecorder?, useLegacy: Boolean) {
6678
try {
79+
useLegacyNormalization = useLegacy
6780
recorder?.start()
6881
result.success(true)
6982
} catch (e: IllegalStateException) {
@@ -137,14 +150,8 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {
137150
MediaRecorder.AudioEncoder.AAC
138151
}
139152
}
140-
Constants.vorbis -> {
141-
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
142-
MediaRecorder.AudioEncoder.VORBIS
143-
} else {
144-
Log.e(LOG_TAG, "Minimum android Q is required, Setting Acc encoder.")
145-
MediaRecorder.AudioEncoder.AAC
146-
}
147-
}
153+
Constants.vorbis -> return MediaRecorder.AudioEncoder.VORBIS
154+
148155
else -> return MediaRecorder.AudioEncoder.AAC
149156
}
150157
}
@@ -164,14 +171,8 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener {
164171

165172
Constants.amr_wb -> return MediaRecorder.OutputFormat.AMR_WB
166173
Constants.amr_nb -> return MediaRecorder.OutputFormat.AMR_NB
167-
Constants.webm -> {
168-
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
169-
MediaRecorder.OutputFormat.WEBM
170-
} else {
171-
Log.e(LOG_TAG, "Minimum android Q is required, Setting MPEG_4 output format.")
172-
MediaRecorder.OutputFormat.MPEG_4
173-
}
174-
}
174+
Constants.webm ->
175+
return MediaRecorder.OutputFormat.WEBM
175176
Constants.mpeg_2_ts -> {
176177
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
177178
MediaRecorder.OutputFormat.MPEG_2_TS

android/src/main/kotlin/com/simform/audio_waveforms/AudioWaveformsPlugin.kt

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,11 @@ class AudioWaveformsPlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
5454
bitRate = (call.argument(Constants.bitRate) as Int?)
5555
checkPathAndInitialiseRecorder(result, encoder, outputFormat, sampleRate, bitRate)
5656
}
57-
Constants.startRecording -> audioRecorder.startRecorder(result, recorder)
57+
Constants.startRecording -> {
58+
var useLegacyNormalization =
59+
(call.argument(Constants.useLegacyNormalization) as Boolean?) ?: false
60+
audioRecorder.startRecorder(result, recorder, useLegacyNormalization)
61+
}
5862
Constants.stopRecording -> {
5963
audioRecorder.stopRecording(result, recorder, path!!)
6064
recorder = null

android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ object Constants {
6161
const val noOfSamples = "noOfSamples"
6262
const val onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData"
6363
const val waveformData = "waveformData"
64+
const val useLegacyNormalization = "useLegacyNormalization"
6465
}
6566

6667
enum class FinishMode(val value:Int) {

example/lib/chat_bubble.dart

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@ class WaveBubble extends StatefulWidget {
5353
final int? index;
5454
final String? path;
5555
final double? width;
56-
final bool isLastWidget;
5756
final Directory appDirectory;
5857

5958
const WaveBubble({
@@ -63,7 +62,6 @@ class WaveBubble extends StatefulWidget {
6362
this.index,
6463
this.isSender = false,
6564
this.path,
66-
this.isLastWidget = false,
6765
}) : super(key: key);
6866

6967
@override
@@ -124,9 +122,6 @@ class _WaveBubbleState extends State<WaveBubble> {
124122
@override
125123
void dispose() {
126124
playerStateSubscription.cancel();
127-
if (widget.isLastWidget) {
128-
controller.stopAllPlayers();
129-
}
130125
controller.dispose();
131126
super.dispose();
132127
}

example/lib/main.dart

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ class Home extends StatefulWidget {
2828
State<Home> createState() => _HomeState();
2929
}
3030

31-
class _HomeState extends State<Home> with WidgetsBindingObserver {
31+
class _HomeState extends State<Home> {
3232
late final RecorderController recorderController;
3333

3434
String? path;
@@ -53,7 +53,7 @@ class _HomeState extends State<Home> with WidgetsBindingObserver {
5353
}
5454

5555
void _initialiseControllers() {
56-
recorderController = RecorderController()
56+
recorderController = RecorderController(useLegacyNormalization: true)
5757
..androidEncoder = AndroidEncoder.aac
5858
..androidOutputFormat = AndroidOutputFormat.mpeg4
5959
..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC
@@ -113,8 +113,6 @@ class _HomeState extends State<Home> with WidgetsBindingObserver {
113113
index: index + 1,
114114
isSender: index.isOdd,
115115
width: MediaQuery.of(context).size.width / 2,
116-
isLastWidget:
117-
!isRecordingCompleted || musicFile == null,
118116
appDirectory: appDirectory,
119117
);
120118
},
@@ -124,14 +122,12 @@ class _HomeState extends State<Home> with WidgetsBindingObserver {
124122
WaveBubble(
125123
path: path,
126124
isSender: true,
127-
isLastWidget: isRecordingCompleted && musicFile == null,
128125
appDirectory: appDirectory,
129126
),
130127
if (musicFile != null)
131128
WaveBubble(
132129
path: musicFile,
133130
isSender: true,
134-
isLastWidget: true,
135131
appDirectory: appDirectory,
136132
),
137133
SafeArea(

ios/Classes/AudioRecorder.swift

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,22 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{
55
var audioRecorder: AVAudioRecorder?
66
var path: String?
77
var hasPermission: Bool = false
8-
public var meteringLevels: [Float]?
8+
var useLegacyNormalization: Bool = false
99

10-
public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String){
10+
public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ bitRate : Int?,_ fileNameFormat: String, _ useLegacy: Bool?){
11+
useLegacyNormalization = useLegacy ?? false
1112
let settings = [
1213
AVFormatIDKey: getEncoder(encoder ?? 0),
1314
AVSampleRateKey: sampleRate ?? 44100,
1415
AVNumberOfChannelsKey: 1,
1516
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
1617
]
1718
let settingsWithBitrate = [
18-
AVEncoderBitRateKey: bitRate,
19-
AVFormatIDKey: getEncoder(encoder ?? 0),
20-
AVSampleRateKey: sampleRate ?? 44100,
21-
AVNumberOfChannelsKey: 1,
22-
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
19+
AVEncoderBitRateKey: bitRate,
20+
AVFormatIDKey: getEncoder(encoder ?? 0),
21+
AVSampleRateKey: sampleRate ?? 44100,
22+
AVNumberOfChannelsKey: 1,
23+
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
2324
]
2425

2526
let options: AVAudioSession.CategoryOptions = [.defaultToSpeaker, .allowBluetooth]
@@ -69,9 +70,14 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{
6970

7071
public func getDecibel(_ result: @escaping FlutterResult) {
7172
audioRecorder?.updateMeters()
72-
let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0
73-
let linear = pow(10, amp / 20);
74-
result(linear)
73+
if(useLegacyNormalization){
74+
let amp = audioRecorder?.averagePower(forChannel: 0) ?? 0.0
75+
result(amp)
76+
} else {
77+
let amp = audioRecorder?.peakPower(forChannel: 0) ?? 0.0
78+
let linear = pow(10, amp / 20);
79+
result(linear)
80+
}
7581
}
7682

7783
public func checkHasPermission(_ result: @escaping FlutterResult){

ios/Classes/SwiftAudioWaveformsPlugin.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ public class SwiftAudioWaveformsPlugin: NSObject, FlutterPlugin {
2828
switch call.method {
2929
case Constants.startRecording:
3030
audioRecorder.startRecording(result, args?[Constants.path] as? String,
31-
args?[Constants.encoder] as? Int, args?[Constants.sampleRate] as? Int, args?[Constants.bitRate] as? Int,Constants.fileNameFormat)
31+
args?[Constants.encoder] as? Int, args?[Constants.sampleRate] as? Int, args?[Constants.bitRate] as? Int,Constants.fileNameFormat, args?[Constants.useLegacyNormalization] as? Bool)
3232
break
3333
case Constants.pauseRecording:
3434
audioRecorder.pauseRecording(result)

ios/Classes/Utils.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ struct Constants {
5555
static let onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData"
5656
static let waveformData = "waveformData"
5757
static let onExtractionProgressUpdate = "onExtractionProgressUpdate"
58+
static let useLegacyNormalization = "useLegacyNormalization"
5859
}
5960

6061
enum FinishMode : Int{

lib/src/base/audio_waveforms_interface.dart

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,22 @@ class AudioWaveformsInterface {
1414
required int sampleRate,
1515
int? bitRate,
1616
String? path,
17+
bool useLegacyNormalization = false,
1718
}) async {
1819
final isRecording = await _methodChannel.invokeMethod(
19-
Constants.startRecording,
20-
Platform.isIOS
21-
? {
22-
Constants.path: path,
23-
Constants.encoder: audioFormat,
24-
Constants.sampleRate: sampleRate,
25-
Constants.bitRate: bitRate,
26-
}
27-
: null);
20+
Constants.startRecording,
21+
Platform.isIOS
22+
? {
23+
Constants.path: path,
24+
Constants.encoder: audioFormat,
25+
Constants.sampleRate: sampleRate,
26+
Constants.bitRate: bitRate,
27+
Constants.useLegacyNormalization: useLegacyNormalization,
28+
}
29+
: {
30+
Constants.useLegacyNormalization: useLegacyNormalization,
31+
},
32+
);
2833
return isRecording ?? false;
2934
}
3035

0 commit comments

Comments
 (0)