diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index eb76ef7cac..0bbe967c5d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,7 +25,7 @@ jobs: - name: Dart Format Check run: dart format lib/ test/ --set-exit-if-changed - name: Import Sorter Check - run: flutter pub run import_sorter:main --no-comments --exit-if-changed + run: dart pub run import_sorter:main --no-comments --exit-if-changed - name: Dart Analyze Check run: flutter analyze - name: Dart Test Check diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f87f5e592..de9f923488 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,9 @@ # Changelog +[1.0.7] - 2025-06-10 +* Added `handleCallInterruptionCallbacks` method to provide an option to handle system audio interruption like incoming calls, or other media playing + [1.0.6] - 2025-05-27 * [iOS] Added native audio route picker for iOS * [Android] Expanded the mapping for audio device types diff --git a/android/src/main/java/io/getstream/webrtc/flutter/FlutterWebRTCPlugin.java b/android/src/main/java/io/getstream/webrtc/flutter/FlutterWebRTCPlugin.java index dec7cf9d9d..b399841a81 100644 --- a/android/src/main/java/io/getstream/webrtc/flutter/FlutterWebRTCPlugin.java +++ b/android/src/main/java/io/getstream/webrtc/flutter/FlutterWebRTCPlugin.java @@ -45,7 +45,11 @@ public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventC public EventChannel.EventSink eventSink; public FlutterWebRTCPlugin() { - sharedSingleton = this; + if (sharedSingleton == null) { + sharedSingleton = this; + } else { + Log.w(TAG, "Warning - Multiple plugin instances detected. Keeping existing singleton."); + } } public static FlutterWebRTCPlugin sharedSingleton; diff --git a/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java b/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java index 2459dd6f3b..890f3fb9c5 100644 --- a/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java +++ b/android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java @@ -24,6 +24,7 @@ import io.getstream.webrtc.flutter.audio.AudioProcessingFactoryProvider; import io.getstream.webrtc.flutter.audio.AudioProcessingController; import io.getstream.webrtc.flutter.audio.AudioSwitchManager; +import io.getstream.webrtc.flutter.audio.AudioFocusManager; import io.getstream.webrtc.flutter.audio.AudioUtils; import io.getstream.webrtc.flutter.audio.LocalAudioTrack; // import io.getstream.webrtc.flutter.audio.PlaybackSamplesReadyCallbackAdapter; @@ -124,6 +125,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { private AudioDeviceModule audioDeviceModule; + private AudioFocusManager audioFocusManager; + private FlutterRTCFrameCryptor frameCryptor; private Activity activity; @@ -147,6 +150,10 @@ static private void resultError(String method, String error, Result result) { } void dispose() { + if (audioFocusManager != null) { + audioFocusManager.setAudioFocusChangeListener(null); + audioFocusManager = null; + } for (final MediaStream mediaStream : localStreams.values()) { streamDispose(mediaStream); mediaStream.dispose(); @@ -161,6 +168,7 @@ void dispose() { } mPeerConnectionObservers.clear(); } + private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, @Nullable ConstraintsMap androidAudioConfiguration) { if (mFactory != null) { @@ -359,6 +367,43 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { result.success(null); break; } + case "handleCallInterruptionCallbacks": { + String interruptionSource = call.argument("androidInterruptionSource"); + AudioFocusManager.InterruptionSource source; + + switch (interruptionSource) { + case "audioFocusOnly": + source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_ONLY; + break; + case "telephonyOnly": + source = AudioFocusManager.InterruptionSource.TELEPHONY_ONLY; + break; + case "audioFocusAndTelephony": + source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY; + break; + default: + source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY; + break; + } + + audioFocusManager = new AudioFocusManager(context, source); + audioFocusManager.setAudioFocusChangeListener(new AudioFocusManager.AudioFocusChangeListener() { + @Override + public void onInterruptionStart() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onInterruptionStart"); + FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap()); + } + + @Override + public void onInterruptionEnd() { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onInterruptionEnd"); + FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap()); + } + }); + result.success(null); + } case "createPeerConnection": { Map constraints = call.argument("constraints"); Map configuration = call.argument("configuration"); diff --git a/android/src/main/java/io/getstream/webrtc/flutter/audio/AudioFocusManager.java b/android/src/main/java/io/getstream/webrtc/flutter/audio/AudioFocusManager.java new file mode 100644 index 0000000000..8d4ef6ede3 --- /dev/null +++ b/android/src/main/java/io/getstream/webrtc/flutter/audio/AudioFocusManager.java @@ -0,0 +1,209 @@ +package io.getstream.webrtc.flutter.audio; + +import android.content.Context; +import android.media.AudioAttributes; +import android.media.AudioFocusRequest; +import android.media.AudioManager; +import android.os.Build; +import android.telephony.PhoneStateListener; +import android.telephony.TelephonyCallback; +import android.telephony.TelephonyManager; +import android.util.Log; + +import io.getstream.webrtc.flutter.utils.ConstraintsMap; + +public class AudioFocusManager { + private static final String TAG = "AudioFocusManager"; + + public enum InterruptionSource { + AUDIO_FOCUS_ONLY, + TELEPHONY_ONLY, + AUDIO_FOCUS_AND_TELEPHONY + } + + private AudioManager audioManager; + private TelephonyManager telephonyManager; + + private PhoneStateListener phoneStateListener; + private AudioFocusChangeListener focusChangeListener; + + private TelephonyCallback telephonyCallback; + private AudioFocusRequest audioFocusRequest; + + private InterruptionSource interruptionSource; + private Context context; + + public interface AudioFocusChangeListener { + void onInterruptionStart(); + void onInterruptionEnd(); + } + + public AudioFocusManager(Context context) { + this(context, InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY); + } + + public AudioFocusManager(Context context, InterruptionSource interruptionSource) { + this.context = context; + this.interruptionSource = interruptionSource; + + if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); + } + + if (interruptionSource == InterruptionSource.TELEPHONY_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); + } + } + + public void setAudioFocusChangeListener(AudioFocusChangeListener listener) { + this.focusChangeListener = listener; + + if (listener != null) { + startMonitoring(); + } else { + stopMonitoring(); + } + } + + public void startMonitoring() { + if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + requestAudioFocusInternal(); + } + + if (interruptionSource == InterruptionSource.TELEPHONY_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + registerTelephonyListener(); + } + } + + public void stopMonitoring() { + if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + abandonAudioFocusInternal(); + } + + if (interruptionSource == InterruptionSource.TELEPHONY_ONLY || + interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) { + unregisterTelephonyListener(); + } + } + + private void requestAudioFocusInternal() { + if (audioManager == null) { + Log.w(TAG, "AudioManager is null, cannot request audio focus"); + return; + } + + AudioManager.OnAudioFocusChangeListener onAudioFocusChangeListener = focusChange -> { + switch (focusChange) { + case AudioManager.AUDIOFOCUS_LOSS: + case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT: + Log.d(TAG, "Audio focus lost"); + if (focusChangeListener != null) { + focusChangeListener.onInterruptionStart(); + } + break; + case AudioManager.AUDIOFOCUS_GAIN: + Log.d(TAG, "Audio focus gained"); + if (focusChangeListener != null) { + focusChangeListener.onInterruptionEnd(); + } + break; + } + }; + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + AudioAttributes audioAttributes = new AudioAttributes.Builder() + .setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH) + .build(); + + audioFocusRequest = new AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN) + .setAudioAttributes(audioAttributes) + .setOnAudioFocusChangeListener(onAudioFocusChangeListener) + .build(); + + audioManager.requestAudioFocus(audioFocusRequest); + } else { + audioManager.requestAudioFocus(onAudioFocusChangeListener, + AudioManager.STREAM_VOICE_CALL, + AudioManager.AUDIOFOCUS_GAIN); + } + } + + private void registerTelephonyListener() { + if (telephonyManager == null) { + Log.w(TAG, "TelephonyManager is null, cannot register telephony listener"); + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + // Use TelephonyCallback for Android 12+ (API 31+) + class CallStateCallback extends TelephonyCallback implements TelephonyCallback.CallStateListener { + @Override + public void onCallStateChanged(int state) { + handleCallStateChange(state); + } + } + telephonyCallback = new CallStateCallback(); + telephonyManager.registerTelephonyCallback(context.getMainExecutor(), telephonyCallback); + } else { + // Use PhoneStateListener for older Android versions + phoneStateListener = new PhoneStateListener() { + @Override + public void onCallStateChanged(int state, String phoneNumber) { + handleCallStateChange(state); + } + }; + telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE); + } + } + + private void handleCallStateChange(int state) { + if (focusChangeListener == null) { + return; + } + + switch (state) { + case TelephonyManager.CALL_STATE_RINGING: + case TelephonyManager.CALL_STATE_OFFHOOK: + Log.d(TAG, "Phone call interruption began"); + focusChangeListener.onInterruptionStart(); + break; + case TelephonyManager.CALL_STATE_IDLE: + Log.d(TAG, "Phone call interruption ended"); + focusChangeListener.onInterruptionEnd(); + break; + } + } + + private void abandonAudioFocusInternal() { + if (audioManager == null) { + return; + } + + int result; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && audioFocusRequest != null) { + result = audioManager.abandonAudioFocusRequest(audioFocusRequest); + } else { + result = audioManager.abandonAudioFocus(null); + } + } + + private void unregisterTelephonyListener() { + if (telephonyManager == null) { + return; + } + + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && telephonyCallback != null) { + telephonyManager.unregisterTelephonyCallback(telephonyCallback); + telephonyCallback = null; + } else if (phoneStateListener != null) { + telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE); + phoneStateListener = null; + } + } +} \ No newline at end of file diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.h b/common/darwin/Classes/FlutterWebRTCPlugin.h index c656746afb..ef8ea50e37 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.h +++ b/common/darwin/Classes/FlutterWebRTCPlugin.h @@ -56,7 +56,9 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); @property(nonatomic, strong) NSObject* _Nonnull messenger; @property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer; @property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; +#if TARGET_OS_IPHONE @property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; +#endif @property (nonatomic, strong) VideoEffectProcessor* _Nullable videoEffectProcessor; @property(nonatomic, strong) NSString* _Nonnull focusMode; diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index 8e99f8c3e8..fa0122f5f8 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -106,7 +106,9 @@ @implementation FlutterWebRTCPlugin { id _textures; BOOL _speakerOn; BOOL _speakerOnButPreferBluetooth; +#if TARGET_OS_IPHONE AVAudioSessionPort _preferredInput; +#endif AudioManager* _audioManager; #if TARGET_OS_IPHONE FLutterRTCVideoPlatformViewFactory *_platformViewFactory; @@ -125,7 +127,9 @@ + (FlutterWebRTCPlugin *)sharedSingleton @synthesize messenger = _messenger; @synthesize eventSink = _eventSink; +#if TARGET_OS_IPHONE @synthesize preferredInput = _preferredInput; +#endif @synthesize audioManager = _audioManager; + (void)registerWithRegistrar:(NSObject*)registrar { @@ -250,6 +254,19 @@ - (void)didSessionRouteChange:(NSNotification*)notification { #endif } +- (void)handleInterruption:(NSNotification*)notification { +#if TARGET_OS_IPHONE + NSDictionary* info = notification.userInfo; + AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue]; + + if (type == AVAudioSessionInterruptionTypeBegan) { + postEvent(self.eventSink, @{@"event": @"onInterruptionStart"}); + } else if (type == AVAudioSessionInterruptionTypeEnded) { + postEvent(self.eventSink, @{@"event": @"onInterruptionEnd"}); + } +#endif +} + - (void)initialize:(NSArray*)networkIgnoreMask bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); @@ -312,6 +329,14 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSArray* names = argsMap[@"names"]; [self mediaStreamTrackSetVideoEffects:trackId names:names]; + } else if ([@"handleCallInterruptionCallbacks" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(handleInterruption:) + name:AVAudioSessionInterruptionNotification + object:[AVAudioSession sharedInstance]]; +#endif + result(@""); } else if ([@"createPeerConnection" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; NSDictionary* configuration = argsMap[@"configuration"]; diff --git a/ios/stream_webrtc_flutter.podspec b/ios/stream_webrtc_flutter.podspec index 1b6b58e070..8d12dd1542 100644 --- a/ios/stream_webrtc_flutter.podspec +++ b/ios/stream_webrtc_flutter.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'stream_webrtc_flutter' - s.version = '1.0.6' + s.version = '1.0.7' s.summary = 'Flutter WebRTC plugin for iOS.' s.description = <<-DESC A new flutter plugin project. diff --git a/lib/src/android_interruption_source.dart b/lib/src/android_interruption_source.dart new file mode 100644 index 0000000000..bd34a79d23 --- /dev/null +++ b/lib/src/android_interruption_source.dart @@ -0,0 +1,5 @@ +enum AndroidInterruptionSource { + audioFocusOnly, + telephonyOnly, + audioFocusAndTelephony, +} diff --git a/lib/src/native/factory_impl.dart b/lib/src/native/factory_impl.dart index 0ca8820952..821ae1a862 100644 --- a/lib/src/native/factory_impl.dart +++ b/lib/src/native/factory_impl.dart @@ -1,7 +1,9 @@ import 'dart:async'; +import 'dart:io'; import 'package:webrtc_interface/webrtc_interface.dart'; +import '../android_interruption_source.dart'; import '../desktop_capturer.dart'; import 'desktop_capturer_impl.dart'; import 'frame_cryptor_impl.dart'; @@ -25,6 +27,30 @@ class RTCFactoryNative extends RTCFactory { }); } + Future handleCallInterruptionCallbacks( + void Function()? onInterruptionStart, + void Function()? onInterruptionEnd, { + AndroidInterruptionSource androidInterruptionSource = + AndroidInterruptionSource.audioFocusAndTelephony, + }) async { + if (!Platform.isAndroid && !Platform.isIOS) { + throw UnimplementedError( + 'handleCallInterruptionCallbacks is only supported on Android and iOS'); + } + + await WebRTC.invokeMethod( + 'handleCallInterruptionCallbacks', + { + if (Platform.isAndroid) + 'androidInterruptionSource': androidInterruptionSource.name, + }, + ); + + final mediaDeviceNative = mediaDevices as MediaDeviceNative; + mediaDeviceNative.onInterruptionStart = onInterruptionStart; + mediaDeviceNative.onInterruptionEnd = onInterruptionEnd; + } + @override Future createLocalMediaStream(String label) async { final response = await WebRTC.invokeMethod('createLocalMediaStream'); @@ -105,6 +131,20 @@ Future setVideoEffects( .setVideoEffects(trackId, names); } +Future handleCallInterruptionCallbacks( + void Function()? onInterruptionStart, + void Function()? onInterruptionEnd, { + AndroidInterruptionSource androidInterruptionSource = + AndroidInterruptionSource.audioFocusAndTelephony, +}) { + return (RTCFactoryNative.instance as RTCFactoryNative) + .handleCallInterruptionCallbacks( + onInterruptionStart, + onInterruptionEnd, + androidInterruptionSource: androidInterruptionSource, + ); +} + Future createPeerConnection( Map configuration, [Map constraints = const {}]) async { diff --git a/lib/src/native/mediadevices_impl.dart b/lib/src/native/mediadevices_impl.dart index 47f585615c..d9599df252 100644 --- a/lib/src/native/mediadevices_impl.dart +++ b/lib/src/native/mediadevices_impl.dart @@ -24,9 +24,18 @@ class MediaDeviceNative extends MediaDevices { case 'onDeviceChange': ondevicechange?.call(null); break; + case 'onInterruptionStart': + onInterruptionStart?.call(); + break; + case 'onInterruptionEnd': + onInterruptionEnd?.call(); + break; } } + Function()? onInterruptionStart; + Function()? onInterruptionEnd; + @override Future getUserMedia( Map mediaConstraints) async { diff --git a/lib/src/web/factory_impl.dart b/lib/src/web/factory_impl.dart index 9fe2821b31..a77273ad9d 100644 --- a/lib/src/web/factory_impl.dart +++ b/lib/src/web/factory_impl.dart @@ -1,3 +1,4 @@ +import '../android_interruption_source.dart'; import '../desktop_capturer.dart'; export 'package:dart_webrtc/dart_webrtc.dart' @@ -11,3 +12,12 @@ Future setVideoEffects( }) async { throw UnimplementedError('setVideoEffects() is not supported on web'); } + +Future handleCallInterruptionCallbacks( + void Function()? onInterruptionStart, + void Function()? onInterruptionEnd, { + AndroidInterruptionSource? androidInterruptionSource, +}) { + throw UnimplementedError( + 'handleCallInterruptionCallbacks() is not supported on web'); +} diff --git a/lib/stream_webrtc_flutter.dart b/lib/stream_webrtc_flutter.dart index b7dd3a8fc6..283641a0da 100644 --- a/lib/stream_webrtc_flutter.dart +++ b/lib/stream_webrtc_flutter.dart @@ -3,6 +3,7 @@ library flutter_webrtc; export 'package:webrtc_interface/webrtc_interface.dart' hide MediaDevices, MediaRecorder, Navigator; +export 'src/android_interruption_source.dart'; export 'src/helper.dart'; export 'src/desktop_capturer.dart'; export 'src/media_devices.dart'; diff --git a/macos/stream_webrtc_flutter.podspec b/macos/stream_webrtc_flutter.podspec index 0318e0e3bb..9660d1cd7b 100644 --- a/macos/stream_webrtc_flutter.podspec +++ b/macos/stream_webrtc_flutter.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'stream_webrtc_flutter' - s.version = '1.0.6' + s.version = '1.0.7' s.summary = 'Flutter WebRTC plugin for macOS.' s.description = <<-DESC A new flutter plugin project. diff --git a/pubspec.yaml b/pubspec.yaml index 399f59586e..53cd44011d 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: stream_webrtc_flutter description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC. -version: 1.0.6 +version: 1.0.7 homepage: https://github.com/GetStream/webrtc-flutter environment: sdk: ">=3.3.0 <4.0.0"