Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- name: Dart Format Check
run: dart format lib/ test/ --set-exit-if-changed
- name: Import Sorter Check
run: flutter pub run import_sorter:main --no-comments --exit-if-changed
run: dart pub run import_sorter:main --no-comments --exit-if-changed
- name: Dart Analyze Check
run: flutter analyze
- name: Dart Test Check
Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@

# Changelog

[1.0.7] - 2025-06-10
* Added `handleCallInterruptionCallbacks` method to provide an option to handle system audio interruption like incoming calls, or other media playing

[1.0.6] - 2025-05-27
* [iOS] Added native audio route picker for iOS
* [Android] Expanded the mapping for audio device types
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,11 @@ public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventC
public EventChannel.EventSink eventSink;

public FlutterWebRTCPlugin() {
sharedSingleton = this;
if (sharedSingleton == null) {
sharedSingleton = this;
} else {
Log.w(TAG, "Warning - Multiple plugin instances detected. Keeping existing singleton.");
}
}

public static FlutterWebRTCPlugin sharedSingleton;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import io.getstream.webrtc.flutter.audio.AudioProcessingFactoryProvider;
import io.getstream.webrtc.flutter.audio.AudioProcessingController;
import io.getstream.webrtc.flutter.audio.AudioSwitchManager;
import io.getstream.webrtc.flutter.audio.AudioFocusManager;
import io.getstream.webrtc.flutter.audio.AudioUtils;
import io.getstream.webrtc.flutter.audio.LocalAudioTrack;
// import io.getstream.webrtc.flutter.audio.PlaybackSamplesReadyCallbackAdapter;
Expand Down Expand Up @@ -124,6 +125,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {

private AudioDeviceModule audioDeviceModule;

private AudioFocusManager audioFocusManager;

private FlutterRTCFrameCryptor frameCryptor;

private Activity activity;
Expand All @@ -147,6 +150,10 @@ static private void resultError(String method, String error, Result result) {
}

void dispose() {
if (audioFocusManager != null) {
audioFocusManager.setAudioFocusChangeListener(null);
audioFocusManager = null;
}
for (final MediaStream mediaStream : localStreams.values()) {
streamDispose(mediaStream);
mediaStream.dispose();
Expand All @@ -161,6 +168,7 @@ void dispose() {
}
mPeerConnectionObservers.clear();
}

private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List<String> forceSWCodecList,
@Nullable ConstraintsMap androidAudioConfiguration) {
if (mFactory != null) {
Expand Down Expand Up @@ -359,6 +367,43 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
result.success(null);
break;
}
case "handleCallInterruptionCallbacks": {
String interruptionSource = call.argument("androidInterruptionSource");
AudioFocusManager.InterruptionSource source;

switch (interruptionSource) {
case "audioFocusOnly":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_ONLY;
break;
case "telephonyOnly":
source = AudioFocusManager.InterruptionSource.TELEPHONY_ONLY;
break;
case "audioFocusAndTelephony":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
default:
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
}

audioFocusManager = new AudioFocusManager(context, source);
audioFocusManager.setAudioFocusChangeListener(new AudioFocusManager.AudioFocusChangeListener() {
@Override
public void onInterruptionStart() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionStart");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}

@Override
public void onInterruptionEnd() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionEnd");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}
});
result.success(null);
}
case "createPeerConnection": {
Map<String, Object> constraints = call.argument("constraints");
Map<String, Object> configuration = call.argument("configuration");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,209 @@
package io.getstream.webrtc.flutter.audio;

import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioFocusRequest;
import android.media.AudioManager;
import android.os.Build;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyCallback;
import android.telephony.TelephonyManager;
import android.util.Log;

import io.getstream.webrtc.flutter.utils.ConstraintsMap;

public class AudioFocusManager {
private static final String TAG = "AudioFocusManager";

public enum InterruptionSource {
AUDIO_FOCUS_ONLY,
TELEPHONY_ONLY,
AUDIO_FOCUS_AND_TELEPHONY
}

private AudioManager audioManager;
private TelephonyManager telephonyManager;

private PhoneStateListener phoneStateListener;
private AudioFocusChangeListener focusChangeListener;

private TelephonyCallback telephonyCallback;
private AudioFocusRequest audioFocusRequest;

private InterruptionSource interruptionSource;
private Context context;

public interface AudioFocusChangeListener {
void onInterruptionStart();
void onInterruptionEnd();
}

public AudioFocusManager(Context context) {
this(context, InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY);
}

public AudioFocusManager(Context context, InterruptionSource interruptionSource) {
this.context = context;
this.interruptionSource = interruptionSource;

if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}

if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
}
}

public void setAudioFocusChangeListener(AudioFocusChangeListener listener) {
this.focusChangeListener = listener;

if (listener != null) {
startMonitoring();
} else {
stopMonitoring();
}
}

public void startMonitoring() {
if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
requestAudioFocusInternal();
}

if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
registerTelephonyListener();
}
}

public void stopMonitoring() {
if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
abandonAudioFocusInternal();
}

if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
unregisterTelephonyListener();
}
}

private void requestAudioFocusInternal() {
if (audioManager == null) {
Log.w(TAG, "AudioManager is null, cannot request audio focus");
return;
}

AudioManager.OnAudioFocusChangeListener onAudioFocusChangeListener = focusChange -> {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_LOSS:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
Log.d(TAG, "Audio focus lost");
if (focusChangeListener != null) {
focusChangeListener.onInterruptionStart();
}
break;
case AudioManager.AUDIOFOCUS_GAIN:
Log.d(TAG, "Audio focus gained");
if (focusChangeListener != null) {
focusChangeListener.onInterruptionEnd();
}
break;
}
};

if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
AudioAttributes audioAttributes = new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build();

audioFocusRequest = new AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN)
.setAudioAttributes(audioAttributes)
.setOnAudioFocusChangeListener(onAudioFocusChangeListener)
.build();

audioManager.requestAudioFocus(audioFocusRequest);
} else {
audioManager.requestAudioFocus(onAudioFocusChangeListener,
AudioManager.STREAM_VOICE_CALL,
AudioManager.AUDIOFOCUS_GAIN);
}
}

private void registerTelephonyListener() {
if (telephonyManager == null) {
Log.w(TAG, "TelephonyManager is null, cannot register telephony listener");
return;
}

if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
// Use TelephonyCallback for Android 12+ (API 31+)
class CallStateCallback extends TelephonyCallback implements TelephonyCallback.CallStateListener {
@Override
public void onCallStateChanged(int state) {
handleCallStateChange(state);
}
}
telephonyCallback = new CallStateCallback();
telephonyManager.registerTelephonyCallback(context.getMainExecutor(), telephonyCallback);
} else {
// Use PhoneStateListener for older Android versions
phoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String phoneNumber) {
handleCallStateChange(state);
}
};
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
}
}

private void handleCallStateChange(int state) {
if (focusChangeListener == null) {
return;
}

switch (state) {
case TelephonyManager.CALL_STATE_RINGING:
case TelephonyManager.CALL_STATE_OFFHOOK:
Log.d(TAG, "Phone call interruption began");
focusChangeListener.onInterruptionStart();
break;
case TelephonyManager.CALL_STATE_IDLE:
Log.d(TAG, "Phone call interruption ended");
focusChangeListener.onInterruptionEnd();
break;
}
}

private void abandonAudioFocusInternal() {
if (audioManager == null) {
return;
}

int result;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && audioFocusRequest != null) {
result = audioManager.abandonAudioFocusRequest(audioFocusRequest);
} else {
result = audioManager.abandonAudioFocus(null);
}
}

private void unregisterTelephonyListener() {
if (telephonyManager == null) {
return;
}

if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && telephonyCallback != null) {
telephonyManager.unregisterTelephonyCallback(telephonyCallback);
telephonyCallback = null;
} else if (phoneStateListener != null) {
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE);
phoneStateListener = null;
}
}
}
2 changes: 2 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.h
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler);
@property(nonatomic, strong) NSObject<FlutterBinaryMessenger>* _Nonnull messenger;
@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer;
@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer;
#if TARGET_OS_IPHONE
@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput;
#endif
@property (nonatomic, strong) VideoEffectProcessor* _Nullable videoEffectProcessor;

@property(nonatomic, strong) NSString* _Nonnull focusMode;
Expand Down
25 changes: 25 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,9 @@ @implementation FlutterWebRTCPlugin {
id _textures;
BOOL _speakerOn;
BOOL _speakerOnButPreferBluetooth;
#if TARGET_OS_IPHONE
AVAudioSessionPort _preferredInput;
#endif
AudioManager* _audioManager;
#if TARGET_OS_IPHONE
FLutterRTCVideoPlatformViewFactory *_platformViewFactory;
Expand All @@ -125,7 +127,9 @@ + (FlutterWebRTCPlugin *)sharedSingleton

@synthesize messenger = _messenger;
@synthesize eventSink = _eventSink;
#if TARGET_OS_IPHONE
@synthesize preferredInput = _preferredInput;
#endif
@synthesize audioManager = _audioManager;

+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
Expand Down Expand Up @@ -250,6 +254,19 @@ - (void)didSessionRouteChange:(NSNotification*)notification {
#endif
}

- (void)handleInterruption:(NSNotification*)notification {
#if TARGET_OS_IPHONE
NSDictionary* info = notification.userInfo;
AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];

if (type == AVAudioSessionInterruptionTypeBegan) {
postEvent(self.eventSink, @{@"event": @"onInterruptionStart"});
} else if (type == AVAudioSessionInterruptionTypeEnded) {
postEvent(self.eventSink, @{@"event": @"onInterruptionEnd"});
}
#endif
}

- (void)initialize:(NSArray*)networkIgnoreMask
bypassVoiceProcessing:(BOOL)bypassVoiceProcessing {
// RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose);
Expand Down Expand Up @@ -312,6 +329,14 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
NSArray* names = argsMap[@"names"];

[self mediaStreamTrackSetVideoEffects:trackId names:names];
} else if ([@"handleCallInterruptionCallbacks" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(handleInterruption:)
name:AVAudioSessionInterruptionNotification
object:[AVAudioSession sharedInstance]];
#endif
result(@"");
} else if ([@"createPeerConnection" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
NSDictionary* configuration = argsMap[@"configuration"];
Expand Down
2 changes: 1 addition & 1 deletion ios/stream_webrtc_flutter.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'stream_webrtc_flutter'
s.version = '1.0.6'
s.version = '1.0.7'
s.summary = 'Flutter WebRTC plugin for iOS.'
s.description = <<-DESC
A new flutter plugin project.
Expand Down
Loading
Loading