Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@

# Changelog

[1.0.11] - 2025-08-13
* [Android] Added option to configure Android audio attributes in AudioFocusManager

[1.0.10] - 2025-08-07
* [Linux/Windows] added missing cloneTrack method

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {

public AudioProcessingFactoryProvider audioProcessingFactoryProvider;

private ConstraintsMap initializedAndroidAudioConfiguration;

MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) {
this.context = context;
this.textures = textureRegistry;
Expand Down Expand Up @@ -175,6 +177,8 @@ private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, bo
return;
}

this.initializedAndroidAudioConfiguration = androidAudioConfiguration;

PeerConnectionFactory.initialize(
InitializationOptions.builder(context)
.setEnableInternalTracer(true)
Expand Down Expand Up @@ -387,7 +391,28 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
break;
}

audioFocusManager = new AudioFocusManager(context, source);
Integer usage = null, content = null;

// Prefer override values if provided, else fallback to persisted config
String overrideUsageStr = call.argument("androidAudioAttributesUsageType");
String overrideContentStr = call.argument("androidAudioAttributesContentType");

if (overrideUsageStr != null) {
usage = AudioUtils.getAudioAttributesUsageTypeForString(overrideUsageStr);
} else if (initializedAndroidAudioConfiguration != null) {
usage = AudioUtils.getAudioAttributesUsageTypeForString(
initializedAndroidAudioConfiguration.getString("androidAudioAttributesUsageType"));
}

if (overrideContentStr != null) {
content = AudioUtils.getAudioAttributesContentTypeFromString(overrideContentStr);
} else if (initializedAndroidAudioConfiguration != null) {
content = AudioUtils.getAudioAttributesContentTypeFromString(
initializedAndroidAudioConfiguration.getString("androidAudioAttributesContentType"));
}

audioFocusManager = new AudioFocusManager(context, source, usage, content);

audioFocusManager.setAudioFocusChangeListener(new AudioFocusManager.AudioFocusChangeListener() {
@Override
public void onInterruptionStart() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,18 +33,27 @@ public enum InterruptionSource {
private InterruptionSource interruptionSource;
private Context context;

private Integer focusUsageType; // AudioAttributes.USAGE_*
private Integer focusContentType; // AudioAttributes.CONTENT_TYPE_*

public interface AudioFocusChangeListener {
void onInterruptionStart();
void onInterruptionEnd();
}

public AudioFocusManager(Context context) {
this(context, InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY);
this(context, InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY, null, null);
}

public AudioFocusManager(Context context, InterruptionSource interruptionSource) {
this(context, interruptionSource, null, null);
}

public AudioFocusManager(Context context, InterruptionSource interruptionSource, Integer usageType, Integer contentType) {
this.context = context;
this.interruptionSource = interruptionSource;
this.focusUsageType = usageType;
this.focusContentType = contentType;

if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
Expand Down Expand Up @@ -117,8 +126,8 @@ private void requestAudioFocusInternal() {

if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
AudioAttributes audioAttributes = new AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.setUsage(focusUsageType != null ? focusUsageType : AudioAttributes.USAGE_VOICE_COMMUNICATION)
.setContentType(focusContentType != null ? focusContentType : AudioAttributes.CONTENT_TYPE_SPEECH)
.build();

audioFocusRequest = new AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN)
Expand All @@ -128,12 +137,47 @@ private void requestAudioFocusInternal() {

audioManager.requestAudioFocus(audioFocusRequest);
} else {
int streamType = inferPreOStreamType(focusUsageType, focusContentType);
audioManager.requestAudioFocus(onAudioFocusChangeListener,
AudioManager.STREAM_VOICE_CALL,
streamType,
AudioManager.AUDIOFOCUS_GAIN);
}
}

private int inferPreOStreamType(Integer usageType, Integer contentType) {
if (usageType != null) {
if (usageType == AudioAttributes.USAGE_MEDIA
|| usageType == AudioAttributes.USAGE_GAME
|| usageType == AudioAttributes.USAGE_ASSISTANT) {
return AudioManager.STREAM_MUSIC;
}
if (usageType == AudioAttributes.USAGE_VOICE_COMMUNICATION
|| usageType == AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING) {
return AudioManager.STREAM_VOICE_CALL;
}
if (usageType == AudioAttributes.USAGE_NOTIFICATION
|| usageType == AudioAttributes.USAGE_NOTIFICATION_RINGTONE
|| usageType == AudioAttributes.USAGE_NOTIFICATION_COMMUNICATION_REQUEST) {
return AudioManager.STREAM_NOTIFICATION;
}
if (usageType == AudioAttributes.USAGE_ALARM) {
return AudioManager.STREAM_ALARM;
}
}

if (contentType != null) {
if (contentType == AudioAttributes.CONTENT_TYPE_MUSIC
|| contentType == AudioAttributes.CONTENT_TYPE_MOVIE) {
return AudioManager.STREAM_MUSIC;
}
if (contentType == AudioAttributes.CONTENT_TYPE_SPEECH) {
return AudioManager.STREAM_VOICE_CALL;
}
}

return AudioManager.STREAM_VOICE_CALL;
}

private void registerTelephonyListener() {
if (telephonyManager == null) {
Log.w(TAG, "TelephonyManager is null, cannot register telephony listener");
Expand Down
2 changes: 1 addition & 1 deletion ios/stream_webrtc_flutter.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'stream_webrtc_flutter'
s.version = '1.0.10'
s.version = '1.0.11'
s.summary = 'Flutter WebRTC plugin for iOS.'
s.description = <<-DESC
A new flutter plugin project.
Expand Down
5 changes: 0 additions & 5 deletions lib/src/android_interruption_source.dart

This file was deleted.

6 changes: 6 additions & 0 deletions lib/src/native/android/audio_configuration.dart
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@ extension AndroidAudioFocusModeEnumEx on String {
AndroidAudioFocusMode.values.firstWhere((d) => d.name == toLowerCase());
}

enum AndroidInterruptionSource {
audioFocusOnly,
telephonyOnly,
audioFocusAndTelephony,
}

enum AndroidAudioStreamType {
accessibility,
alarm,
Expand Down
14 changes: 13 additions & 1 deletion lib/src/native/factory_impl.dart
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ import 'dart:io';

import 'package:webrtc_interface/webrtc_interface.dart';

import '../android_interruption_source.dart';
import '../desktop_capturer.dart';
import 'android/audio_configuration.dart';
import 'desktop_capturer_impl.dart';
import 'frame_cryptor_impl.dart';
import 'media_recorder_impl.dart';
Expand Down Expand Up @@ -32,6 +32,8 @@ class RTCFactoryNative extends RTCFactory {
void Function()? onInterruptionEnd, {
AndroidInterruptionSource androidInterruptionSource =
AndroidInterruptionSource.audioFocusAndTelephony,
AndroidAudioAttributesUsageType? androidAudioAttributesUsageType,
AndroidAudioAttributesContentType? androidAudioAttributesContentType,
}) async {
if (!Platform.isAndroid && !Platform.isIOS) {
throw UnimplementedError(
Expand All @@ -43,6 +45,12 @@ class RTCFactoryNative extends RTCFactory {
<String, dynamic>{
if (Platform.isAndroid)
'androidInterruptionSource': androidInterruptionSource.name,
if (Platform.isAndroid && androidAudioAttributesUsageType != null)
'androidAudioAttributesUsageType':
androidAudioAttributesUsageType.name,
if (Platform.isAndroid && androidAudioAttributesContentType != null)
'androidAudioAttributesContentType':
androidAudioAttributesContentType.name,
},
);

Expand Down Expand Up @@ -136,12 +144,16 @@ Future<void> handleCallInterruptionCallbacks(
void Function()? onInterruptionEnd, {
AndroidInterruptionSource androidInterruptionSource =
AndroidInterruptionSource.audioFocusAndTelephony,
AndroidAudioAttributesUsageType? androidAudioAttributesUsageType,
AndroidAudioAttributesContentType? androidAudioAttributesContentType,
}) {
return (RTCFactoryNative.instance as RTCFactoryNative)
.handleCallInterruptionCallbacks(
onInterruptionStart,
onInterruptionEnd,
androidInterruptionSource: androidInterruptionSource,
androidAudioAttributesUsageType: androidAudioAttributesUsageType,
androidAudioAttributesContentType: androidAudioAttributesContentType,
);
}

Expand Down
5 changes: 3 additions & 2 deletions lib/src/web/factory_impl.dart
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import '../android_interruption_source.dart';
import '../desktop_capturer.dart';

export 'package:dart_webrtc/dart_webrtc.dart'
Expand All @@ -16,7 +15,9 @@ Future<void> setVideoEffects(
Future<void> handleCallInterruptionCallbacks(
void Function()? onInterruptionStart,
void Function()? onInterruptionEnd, {
AndroidInterruptionSource? androidInterruptionSource,
Object? androidInterruptionSource,
Object? androidAudioAttributesUsageType,
Object? androidAudioAttributesContentType,
}) {
throw UnimplementedError(
'handleCallInterruptionCallbacks() is not supported on web');
Expand Down
1 change: 0 additions & 1 deletion lib/stream_webrtc_flutter.dart
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ library flutter_webrtc;
export 'package:webrtc_interface/webrtc_interface.dart'
hide MediaDevices, MediaRecorder, Navigator;

export 'src/android_interruption_source.dart';
export 'src/helper.dart';
export 'src/desktop_capturer.dart';
export 'src/media_devices.dart';
Expand Down
2 changes: 1 addition & 1 deletion macos/stream_webrtc_flutter.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'stream_webrtc_flutter'
s.version = '1.0.10'
s.version = '1.0.11'
s.summary = 'Flutter WebRTC plugin for macOS.'
s.description = <<-DESC
A new flutter plugin project.
Expand Down
2 changes: 1 addition & 1 deletion pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: stream_webrtc_flutter
description: Flutter WebRTC plugin for iOS/Android/Destkop/Web, based on GoogleWebRTC.
version: 1.0.10
version: 1.0.11
homepage: https://github.com/GetStream/webrtc-flutter
environment:
sdk: ">=3.3.0 <4.0.0"
Expand Down
Loading