Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
146 changes: 133 additions & 13 deletions android/src/main/java/io/getstream/webrtc/flutter/GetUserMediaImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@
import io.getstream.webrtc.flutter.audio.AudioSwitchManager;
import io.getstream.webrtc.flutter.audio.AudioUtils;
import io.getstream.webrtc.flutter.audio.LocalAudioTrack;
import io.getstream.webrtc.flutter.audio.ScreenAudioCapturer;

import java.nio.ByteBuffer;
import io.getstream.webrtc.flutter.record.AudioChannel;
import io.getstream.webrtc.flutter.record.AudioSamplesInterceptor;
import io.getstream.webrtc.flutter.record.MediaRecorderImpl;
Expand Down Expand Up @@ -127,6 +130,9 @@ public class GetUserMediaImpl {
private boolean isTorchOn;
private Intent mediaProjectionData = null;

private ScreenAudioCapturer screenAudioCapturer;
private volatile boolean screenAudioEnabled = false;
private OrientationAwareScreenCapturer currentScreenCapturer;

public void screenRequestPermissions(ResultReceiver resultReceiver) {
mediaProjectionData = null;
Expand Down Expand Up @@ -176,15 +182,40 @@ public static class ScreenRequestPermissionsFragment extends Fragment {

private ResultReceiver resultReceiver = null;
private int requestCode = 0;
private final int resultCode = 0;
private int resultCode = 0;
private boolean hasRequestedPermission = false;

private void checkSelfPermissions(boolean requestPermissions) {
// Avoid requesting permission multiple times
if (hasRequestedPermission) {
return;
}

if (resultCode != Activity.RESULT_OK) {
Activity activity = this.getActivity();
if (activity == null || activity.isFinishing()) {
return;
}

Bundle args = getArguments();
if (args == null) {
return;
}

resultReceiver = args.getParcelable(RESULT_RECEIVER);
requestCode = args.getInt(REQUEST_CODE);
requestStart(activity, requestCode);

hasRequestedPermission = true;

// Post the permission request to allow the activity to fully stabilize.
// This helps prevent the app from going to background on Samsung and other
// devices when the MediaProjection permission dialog appears.
new Handler(Looper.getMainLooper()).postDelayed(() -> {
Activity currentActivity = getActivity();
if (currentActivity != null && !currentActivity.isFinishing() && isAdded()) {
requestStart(currentActivity, requestCode);
}
}, 100);
}
}

Expand Down Expand Up @@ -495,6 +526,9 @@ public void invoke(Object... args) {

void getDisplayMedia(
final ConstraintsMap constraints, final Result result, final MediaStream mediaStream) {
// Check if audio is requested for screen share
final boolean includeAudio = parseIncludeAudio(constraints);

if (mediaProjectionData == null) {
screenRequestPermissions(
new ResultReceiver(new Handler(Looper.getMainLooper())) {
Expand All @@ -507,41 +541,76 @@ protected void onReceiveResult(int requestCode, Bundle resultData) {
resultError("screenRequestPermissions", "User didn't give permission to capture the screen.", result);
return;
}
getDisplayMedia(result, mediaStream, mediaProjectionData);
getDisplayMedia(result, mediaStream, mediaProjectionData, includeAudio);
}
});
} else {
getDisplayMedia(result, mediaStream, mediaProjectionData);
getDisplayMedia(result, mediaStream, mediaProjectionData, includeAudio);
}
}

private void getDisplayMedia(final Result result, final MediaStream mediaStream, final Intent mediaProjectionData) {
/**
* Parses the includeAudio flag from constraints.
* Checks for audio: true or audio: { ... } in constraints.
*/
private boolean parseIncludeAudio(ConstraintsMap constraints) {
if (constraints == null || !constraints.hasKey("audio")) {
return false;
}

ObjectType audioType = constraints.getType("audio");
if (audioType == ObjectType.Boolean) {
return constraints.getBoolean("audio");
} else if (audioType == ObjectType.Map) {
// If audio is a map/object, we treat it as audio enabled
return true;
}

return false;
}

private void getDisplayMedia(final Result result, final MediaStream mediaStream,
final Intent mediaProjectionData, final boolean includeAudio) {
/* Create ScreenCapture */
VideoTrack displayTrack = null;
VideoCapturer videoCapturer = null;
String trackId = stateProvider.getNextTrackUUID();

videoCapturer = new OrientationAwareScreenCapturer(
OrientationAwareScreenCapturer videoCapturer = new OrientationAwareScreenCapturer(
applicationContext,
mediaProjectionData,
new MediaProjection.Callback() {
@Override
public void onStop() {
super.onStop();

// Stop screen audio capture when screen sharing stops
stopScreenAudioCapture();

ConstraintsMap params = new ConstraintsMap();
params.putString("event", EVENT_DISPLAY_MEDIA_STOPPED);
params.putString("trackId", trackId);
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}
}
);
});

if (videoCapturer == null) {
resultError("screenRequestPermissions", "GetDisplayMediaFailed, User revoked permission to capture the screen.", result);
return;
// Set up screen audio capture listener if audio is requested
if (includeAudio && ScreenAudioCapturer.isSupported()) {
videoCapturer.setMediaProjectionReadyListener(
new OrientationAwareScreenCapturer.MediaProjectionReadyListener() {
@Override
public void onMediaProjectionReady(MediaProjection mediaProjection) {
startScreenAudioCapture(mediaProjection);
}

@Override
public void onMediaProjectionStopped() {
stopScreenAudioCapture();
}
});
}

currentScreenCapturer = videoCapturer;

PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory();
VideoSource videoSource = pcFactory.createVideoSource(true);

Expand All @@ -566,7 +635,8 @@ public void onStop() {
info.capturer = videoCapturer;

videoCapturer.startCapture(info.width, info.height, info.fps);
Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps);
Log.d(TAG, "OrientationAwareScreenCapturer.startCapture: " + info.width + "x" + info.height + "@" + info.fps +
", includeAudio: " + includeAudio);

mVideoCapturers.put(trackId, info);
mVideoSources.put(trackId, videoSource);
Expand Down Expand Up @@ -609,6 +679,56 @@ public void onStop() {
result.success(successResult.toMap());
}

@RequiresApi(api = Build.VERSION_CODES.Q)
private void startScreenAudioCapture(MediaProjection mediaProjection) {
if (!ScreenAudioCapturer.isSupported()) {
Log.w(TAG, "Screen audio capture not supported on this device");
return;
}

if (screenAudioCapturer == null) {
screenAudioCapturer = new ScreenAudioCapturer(applicationContext);
}

boolean started = screenAudioCapturer.startCapture(mediaProjection);
screenAudioEnabled = started;

if (started) {
Log.d(TAG, "Screen audio capture started successfully");
} else {
Log.w(TAG, "Failed to start screen audio capture");
}
}

private synchronized void stopScreenAudioCapture() {
screenAudioEnabled = false;

ScreenAudioCapturer localCapturer = screenAudioCapturer;
if (localCapturer != null) {
localCapturer.stopCapture();
Log.d(TAG, "Screen audio capture stopped");
}
}

/**
* Returns whether screen audio capture is currently enabled.
*/
public boolean isScreenAudioEnabled() {
return screenAudioEnabled && screenAudioCapturer != null && screenAudioCapturer.isCapturing();
}

/**
* Gets screen audio bytes for mixing with microphone audio.
* Returns null if screen audio capture is not active.
*/
public ByteBuffer getScreenAudioBytes(int bytesRequested) {
if (!isScreenAudioEnabled()) {
return null;
}

return screenAudioCapturer.getScreenAudioBytes(bytesRequested);
}

/**
* Implements {@code getUserMedia} with the knowledge that the necessary permissions have already
* been granted. If the necessary permissions have not been granted yet, they will NOT be
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;

import io.getstream.webrtc.flutter.audio.AudioBufferMixer;
import io.getstream.webrtc.flutter.audio.AudioDeviceKind;
import io.getstream.webrtc.flutter.audio.AudioProcessingFactoryProvider;
import io.getstream.webrtc.flutter.audio.AudioProcessingController;
Expand Down Expand Up @@ -202,6 +203,25 @@ void dispose() {
mPeerConnectionObservers.clear();
}

/**
* Checks if the microphone is muted by examining all local audio tracks.
* Returns true if all audio tracks are disabled or if there are no audio
* tracks.
*/
private boolean isMicrophoneMuted() {
synchronized (localTracks) {
for (LocalTrack track : localTracks.values()) {
if (track instanceof LocalAudioTrack) {
if (track.enabled()) {
return false;
}
}
}
}

return true;
}

private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List<String> forceSWCodecList,
@Nullable ConstraintsMap androidAudioConfiguration, Severity logSeverity, @Nullable Integer audioSampleRate, @Nullable Integer audioOutputSampleRate) {
if (mFactory != null) {
Expand Down Expand Up @@ -315,6 +335,24 @@ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples a
audioDeviceModuleBuilder.setAudioAttributes(audioAttributes);
}

// Set up audio buffer callback for screen audio mixing
audioDeviceModuleBuilder.setAudioBufferCallback(
(audioBuffer, audioFormat, channelCount, sampleRate, bytesRead, captureTimeNs) -> {
boolean isMicrophoneMuted = isMicrophoneMuted();
if (!isMicrophoneMuted && bytesRead > 0 && getUserMediaImpl != null && getUserMediaImpl.isScreenAudioEnabled()) {
// Get screen audio bytes and mix with microphone audio
ByteBuffer screenAudioBuffer = getUserMediaImpl.getScreenAudioBytes(bytesRead);
if (screenAudioBuffer != null && screenAudioBuffer.remaining() > 0) {
AudioBufferMixer.mixScreenAudioWithMicrophone(
audioBuffer,
screenAudioBuffer,
bytesRead);
}
}

return captureTimeNs;
});

audioDeviceModule = audioDeviceModuleBuilder.createAudioDeviceModule();

if(!bypassVoiceProcessing) {
Expand Down
Loading