Skip to content

Commit 3898604

Browse files
committed
android screen share audio
1 parent d5f1099 commit 3898604

File tree

5 files changed

+601
-48
lines changed

5 files changed

+601
-48
lines changed

android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java

Lines changed: 108 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,9 @@
3636
import io.getstream.webrtc.flutter.audio.AudioSwitchManager;
3737
import io.getstream.webrtc.flutter.audio.AudioUtils;
3838
import io.getstream.webrtc.flutter.audio.LocalAudioTrack;
39+
import io.getstream.webrtc.flutter.audio.ScreenAudioCapturer;
40+
41+
import java.nio.ByteBuffer;
3942
import io.getstream.webrtc.flutter.record.AudioChannel;
4043
import io.getstream.webrtc.flutter.record.AudioSamplesInterceptor;
4144
import io.getstream.webrtc.flutter.record.MediaRecorderImpl;
@@ -127,6 +130,9 @@ public class GetUserMediaImpl {
127130
private boolean isTorchOn;
128131
private Intent mediaProjectionData = null;
129132

133+
private ScreenAudioCapturer screenAudioCapturer;
134+
private volatile boolean screenAudioEnabled = false;
135+
private OrientationAwareScreenCapturer currentScreenCapturer;
130136

131137
public void screenRequestPermissions(ResultReceiver resultReceiver) {
132138
mediaProjectionData = null;
@@ -495,6 +501,9 @@ public void invoke(Object... args) {
495501

496502
void getDisplayMedia(
497503
final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) {
504+
// Check if audio is requested for screen share
505+
final boolean includeAudio = parseIncludeAudio(constraints);
506+
498507
if (mediaProjectionData == null) {
499508
screenRequestPermissions(
500509
new ResultReceiver(new Handler(Looper.getMainLooper())) {
@@ -507,41 +516,76 @@ protected void onReceiveResult(int requestCode, Bundle resultData) {
507516
resultError("screenRequestPermissions", "User didn't give permission to capture the screen.", result);
508517
return;
509518
}
510-
getDisplayMedia(result, mediaStream, mediaProjectionData);
519+
getDisplayMedia(result, mediaStream, mediaProjectionData, includeAudio);
511520
}
512521
});
513522
} else {
514-
getDisplayMedia(result, mediaStream, mediaProjectionData);
523+
getDisplayMedia(result, mediaStream, mediaProjectionData, includeAudio);
524+
}
525+
}
526+
527+
/**
528+
* Parses the includeAudio flag from constraints.
529+
* Checks for audio: true or audio: { ... } in constraints.
530+
*/
531+
private boolean parseIncludeAudio(ConstraintsMap constraints) {
532+
if (constraints == null || !constraints.hasKey("audio")) {
533+
return false;
534+
}
535+
536+
ObjectType audioType = constraints.getType("audio");
537+
if (audioType == ObjectType.Boolean) {
538+
return constraints.getBoolean("audio");
539+
} else if (audioType == ObjectType.Map) {
540+
// If audio is a map/object, we treat it as audio enabled
541+
return true;
515542
}
543+
544+
return false;
516545
}
517546

518-
private void getDisplayMedia(final Result result, final MediaStream mediaStream, final Intent mediaProjectionData) {
547+
private void getDisplayMedia(final Result result, final MediaStream mediaStream,
548+
final Intent mediaProjectionData, final boolean includeAudio) {
519549
/* Create ScreenCapture */
520550
VideoTrack displayTrack = null;
521-
VideoCapturer videoCapturer = null;
522551
String trackId = stateProvider.getNextTrackUUID();
523552

524-
videoCapturer = new OrientationAwareScreenCapturer(
553+
OrientationAwareScreenCapturer videoCapturer = new OrientationAwareScreenCapturer(
525554
applicationContext,
526555
mediaProjectionData,
527556
new MediaProjection.Callback() {
528557
@Override
529558
public void onStop() {
530559
super.onStop();
531560

561+
// Stop screen audio capture when screen sharing stops
562+
stopScreenAudioCapture();
563+
532564
ConstraintsMap params = new ConstraintsMap();
533565
params.putString("event", EVENT_DISPLAY_MEDIA_STOPPED);
534566
params.putString("trackId", trackId);
535567
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
536568
}
537-
}
538-
);
569+
});
539570

540-
if (videoCapturer == null) {
541-
resultError("screenRequestPermissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result);
542-
return;
571+
// Set up screen audio capture listener if audio is requested
572+
if (includeAudio && ScreenAudioCapturer.isSupported()) {
573+
videoCapturer.setMediaProjectionReadyListener(
574+
new OrientationAwareScreenCapturer.MediaProjectionReadyListener() {
575+
@Override
576+
public void onMediaProjectionReady(MediaProjection mediaProjection) {
577+
startScreenAudioCapture(mediaProjection);
578+
}
579+
580+
@Override
581+
public void onMediaProjectionStopped() {
582+
stopScreenAudioCapture();
583+
}
584+
});
543585
}
544586

587+
currentScreenCapturer = videoCapturer;
588+
545589
PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory();
546590
VideoSource videoSource = pcFactory.createVideoSource(true);
547591

@@ -566,7 +610,8 @@ public void onStop() {
566610
info.capturer = videoCapturer;
567611

568612
videoCapturer.startCapture(info.width, info.height, info.fps);
569-
Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps);
613+
Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps +
614+
", includeAudio: " + includeAudio);
570615

571616
mVideoCapturers.put(trackId, info);
572617
mVideoSources.put(trackId, videoSource);
@@ -609,6 +654,58 @@ public void onStop() {
609654
result.success(successResult.toMap());
610655
}
611656

657+
@RequiresApi(api = Build.VERSION_CODES.Q)
658+
private void startScreenAudioCapture(MediaProjection mediaProjection) {
659+
if (!ScreenAudioCapturer.isSupported()) {
660+
Log.w(TAG, "Screen audio capture not supported on this device");
661+
return;
662+
}
663+
664+
if (screenAudioCapturer == null) {
665+
screenAudioCapturer = new ScreenAudioCapturer(applicationContext);
666+
}
667+
668+
boolean started = screenAudioCapturer.startCapture(mediaProjection);
669+
screenAudioEnabled = started;
670+
671+
if (started) {
672+
Log.d(TAG, "Screen audio capture started successfully");
673+
} else {
674+
Log.w(TAG, "Failed to start screen audio capture");
675+
}
676+
}
677+
678+
private synchronized void stopScreenAudioCapture() {
679+
screenAudioEnabled = false;
680+
681+
ScreenAudioCapturer localCapturer = screenAudioCapturer;
682+
if (localCapturer != null) {
683+
localCapturer.stopCapture();
684+
Log.d(TAG, "Screen audio capture stopped");
685+
}
686+
687+
currentScreenCapturer = null;
688+
}
689+
690+
/**
691+
* Returns whether screen audio capture is currently enabled.
692+
*/
693+
public boolean isScreenAudioEnabled() {
694+
return screenAudioEnabled && screenAudioCapturer != null && screenAudioCapturer.isCapturing();
695+
}
696+
697+
/**
698+
* Gets screen audio bytes for mixing with microphone audio.
699+
* Returns null if screen audio capture is not active.
700+
*/
701+
public ByteBuffer getScreenAudioBytes(int bytesRequested) {
702+
if (!isScreenAudioEnabled()) {
703+
return null;
704+
}
705+
706+
return screenAudioCapturer.getScreenAudioBytes(bytesRequested);
707+
}
708+
612709
/**
613710
* Implements {@code getUserMedia} with the knowledge that the necessary permissions have already
614711
* been granted. If the necessary permissions have not been granted yet, they will NOT be

android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import androidx.annotation.Nullable;
2424
import androidx.annotation.RequiresApi;
2525

26+
import io.getstream.webrtc.flutter.audio.AudioBufferMixer;
2627
import io.getstream.webrtc.flutter.audio.AudioDeviceKind;
2728
import io.getstream.webrtc.flutter.audio.AudioProcessingFactoryProvider;
2829
import io.getstream.webrtc.flutter.audio.AudioProcessingController;
@@ -202,6 +203,23 @@ void dispose() {
202203
mPeerConnectionObservers.clear();
203204
}
204205

206+
/**
207+
* Checks if the microphone is muted by examining all local audio tracks.
208+
* Returns true if all audio tracks are disabled or if there are no audio
209+
* tracks.
210+
*/
211+
private boolean isMicrophoneMuted() {
212+
for (LocalTrack track : localTracks.values()) {
213+
if (track instanceof LocalAudioTrack) {
214+
if (track.enabled()) {
215+
return false;
216+
}
217+
}
218+
}
219+
220+
return true;
221+
}
222+
205223
private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List<String> forceSWCodecList,
206224
@Nullable ConstraintsMap androidAudioConfiguration, Severity logSeverity, @Nullable Integer audioSampleRate, @Nullable Integer audioOutputSampleRate) {
207225
if (mFactory != null) {
@@ -315,6 +333,24 @@ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples a
315333
audioDeviceModuleBuilder.setAudioAttributes(audioAttributes);
316334
}
317335

336+
// Set up audio buffer callback for screen audio mixing
337+
audioDeviceModuleBuilder.setAudioBufferCallback(
338+
(audioBuffer, audioFormat, channelCount, sampleRate, bytesRead, captureTimeNs) -> {
339+
boolean isMicrophoneMuted = isMicrophoneMuted();
340+
if (!isMicrophoneMuted && bytesRead > 0 && getUserMediaImpl != null && getUserMediaImpl.isScreenAudioEnabled()) {
341+
// Get screen audio bytes and mix with microphone audio
342+
ByteBuffer screenAudioBuffer = getUserMediaImpl.getScreenAudioBytes(bytesRead);
343+
if (screenAudioBuffer != null && screenAudioBuffer.remaining() > 0) {
344+
AudioBufferMixer.mixScreenAudioWithMicrophone(
345+
audioBuffer,
346+
screenAudioBuffer,
347+
bytesRead);
348+
}
349+
}
350+
351+
return captureTimeNs;
352+
});
353+
318354
audioDeviceModule = audioDeviceModuleBuilder.createAudioDeviceModule();
319355

320356
if(!bypassVoiceProcessing) {

0 commit comments

Comments
 (0)