Skip to content

Commit 08282de

Browse files
authored
Move media devices API logic into submodule in medea-flutter-webrtc-native crate (#232, #210)
1 parent 650c6ec commit 08282de

File tree

16 files changed

+608
-485
lines changed

16 files changed

+608
-485
lines changed
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
//! [MediaStreamConstraints][1] for [`Webrtc::get_media()`] audio configuration.
2+
//!
3+
//! [1]: https://w3.org/TR/mediacapture-streams#dom-mediastreamconstraints
4+
5+
use crate::api::NoiseSuppressionLevel;
6+
#[cfg(doc)]
7+
use crate::{
8+
Webrtc,
9+
api::{AudioProcessingConfig, MediaStreamTrack},
10+
};
11+
12+
/// Nature and settings of the audio [`MediaStreamTrack`] returned by
13+
/// [`Webrtc::get_media()`].
14+
#[derive(Debug)]
15+
pub struct AudioConstraints {
16+
/// Identifier of the device generating the content of the
17+
/// [`MediaStreamTrack`].
18+
///
19+
/// First device will be chosen if an empty [`String`] is provided.
20+
pub device_id: Option<String>,
21+
22+
/// Audio processing configuration constraints of the [`MediaStreamTrack`].
23+
pub processing: AudioProcessingConstraints,
24+
}
25+
26+
/// Constraints of an [`AudioProcessingConfig`].
27+
#[derive(Debug, Default)]
28+
pub struct AudioProcessingConstraints {
29+
/// Indicator whether the audio volume level should be automatically tuned
30+
/// to maintain a steady overall volume level.
31+
pub auto_gain_control: Option<bool>,
32+
33+
/// Indicator whether a high-pass filter should be enabled to eliminate
34+
/// low-frequency noise.
35+
pub high_pass_filter: Option<bool>,
36+
37+
/// Indicator whether noise suppression should be enabled to reduce
38+
/// background sounds.
39+
pub noise_suppression: Option<bool>,
40+
41+
/// Level of aggressiveness for noise suppression.
42+
pub noise_suppression_level: Option<NoiseSuppressionLevel>,
43+
44+
/// Indicator whether echo cancellation should be enabled to prevent
45+
/// feedback.
46+
pub echo_cancellation: Option<bool>,
47+
}
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
//! [MediaStreamConstraints][1] for [`Webrtc::get_media()`] configuration.
2+
//!
3+
//! [1]: https://w3.org/TR/mediacapture-streams#dom-mediastreamconstraints
4+
5+
pub mod audio;
6+
pub mod video;
7+
8+
pub use self::{
9+
audio::{AudioConstraints, AudioProcessingConstraints},
10+
video::VideoConstraints,
11+
};
12+
#[cfg(doc)]
13+
use crate::{Webrtc, api::MediaStreamTrack};
14+
15+
/// [MediaStreamConstraints][1], used to instruct what sort of
16+
/// [`MediaStreamTrack`]s to return by the [`Webrtc::get_media()`].
17+
///
18+
/// [1]: https://w3.org/TR/mediacapture-streams#dom-mediastreamconstraints
19+
#[derive(Debug)]
20+
pub struct MediaStreamConstraints {
21+
/// Specifies the nature and settings of the audio [`MediaStreamTrack`].
22+
pub audio: Option<AudioConstraints>,
23+
24+
/// Specifies the nature and settings of the video [`MediaStreamTrack`].
25+
pub video: Option<VideoConstraints>,
26+
}
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
//! [MediaStreamConstraints][1] for [`Webrtc::get_media()`] video configuration.
2+
//!
3+
//! [1]: https://w3.org/TR/mediacapture-streams#dom-mediastreamconstraints
4+
5+
#[cfg(doc)]
6+
use crate::{Webrtc, api::MediaStreamTrack};
7+
8+
/// Nature and settings of the video [`MediaStreamTrack`] returned by
9+
/// [`Webrtc::get_media()`].
10+
#[derive(Debug)]
11+
pub struct VideoConstraints {
12+
/// Identifier of the device generating the content of the
13+
/// [`MediaStreamTrack`].
14+
///
15+
/// The first device will be chosen if an empty [`String`] is provided.
16+
pub device_id: Option<String>,
17+
18+
/// Width in pixels.
19+
pub width: u32,
20+
21+
/// Height in pixels.
22+
pub height: u32,
23+
24+
/// Exact frame rate (frames per second).
25+
pub frame_rate: u32,
26+
27+
/// Indicator whether the request video track should be acquired via screen
28+
/// capturing.
29+
pub is_display: bool,
30+
}

crates/native/src/api/media/mod.rs

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
//! Devices and displays managing API.
2+
3+
pub mod constraints;
4+
5+
use std::sync::atomic::{AtomicBool, Ordering};
6+
7+
pub use self::constraints::{
8+
AudioConstraints, AudioProcessingConstraints, MediaStreamConstraints,
9+
VideoConstraints,
10+
};
11+
use crate::{
12+
api::{MediaDeviceInfo, MediaDisplayInfo, WEBRTC},
13+
devices,
14+
frb_generated::StreamSink,
15+
};
16+
17+
/// Indicator whether application is configured to use fake media devices.
18+
static FAKE_MEDIA: AtomicBool = AtomicBool::new(false);
19+
20+
/// Configures media acquisition to use fake devices instead of actual camera
21+
/// and microphone.
22+
pub fn enable_fake_media() {
23+
FAKE_MEDIA.store(true, Ordering::Release);
24+
}
25+
26+
/// Indicates whether application is configured to use fake media devices.
27+
pub fn is_fake_media() -> bool {
28+
FAKE_MEDIA.load(Ordering::Acquire)
29+
}
30+
31+
/// Returns a list of all available media input and output devices, such as
32+
/// microphones, cameras, headsets, and so forth.
33+
pub fn enumerate_devices() -> anyhow::Result<Vec<MediaDeviceInfo>> {
34+
WEBRTC.lock().unwrap().enumerate_devices()
35+
}
36+
37+
/// Returns a list of all available displays that can be used for screen
38+
/// capturing.
39+
#[must_use]
40+
pub fn enumerate_displays() -> Vec<MediaDisplayInfo> {
41+
devices::enumerate_displays()
42+
}
43+
44+
/// Sets the specified `audio playout` device.
45+
pub fn set_audio_playout_device(device_id: String) -> anyhow::Result<()> {
46+
WEBRTC.lock().unwrap().set_audio_playout_device(device_id)
47+
}
48+
49+
/// Indicates whether the microphone is available to set volume.
50+
pub fn microphone_volume_is_available() -> anyhow::Result<bool> {
51+
WEBRTC.lock().unwrap().microphone_volume_is_available()
52+
}
53+
54+
/// Sets the microphone system volume according to the specified `level` in
55+
/// percents.
56+
///
57+
/// Valid values range is `[0; 100]`.
58+
pub fn set_microphone_volume(level: u8) -> anyhow::Result<()> {
59+
WEBRTC.lock().unwrap().set_microphone_volume(level)
60+
}
61+
62+
/// Returns the current level of the microphone volume in `[0; 100]` range.
63+
pub fn microphone_volume() -> anyhow::Result<u32> {
64+
WEBRTC.lock().unwrap().microphone_volume()
65+
}
66+
67+
/// Sets the provided `OnDeviceChangeCallback` as the callback to be called
68+
/// whenever a set of available media devices changes.
69+
///
70+
/// Only one callback can be set at a time, so the previous one will be dropped,
71+
/// if any.
72+
pub fn set_on_device_changed(cb: StreamSink<()>) {
73+
WEBRTC.lock().unwrap().set_on_device_changed(cb);
74+
}

crates/native/src/api/mod.rs

Lines changed: 11 additions & 141 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
//! API surface and implementation for Flutter.
22
33
pub mod capability;
4+
pub mod media;
45
pub mod media_info;
56
pub mod media_stream_track;
67
pub mod peer;
@@ -12,10 +13,7 @@ pub mod transceiver;
1213
#[cfg(doc)]
1314
use std::sync::mpsc;
1415
use std::{
15-
sync::{
16-
Arc, LazyLock, Mutex,
17-
atomic::{AtomicBool, Ordering},
18-
},
16+
sync::{Arc, LazyLock, Mutex},
1917
time::Duration,
2018
};
2119

@@ -29,6 +27,13 @@ pub use self::{
2927
ScalabilityMode, get_rtp_receiver_capabilities,
3028
get_rtp_sender_capabilities, set_codec_preferences,
3129
},
30+
media::{
31+
AudioConstraints, AudioProcessingConstraints, MediaStreamConstraints,
32+
VideoConstraints, enable_fake_media, enumerate_devices,
33+
enumerate_displays, is_fake_media, microphone_volume,
34+
microphone_volume_is_available, set_audio_playout_device,
35+
set_microphone_volume, set_on_device_changed,
36+
},
3237
media_info::{MediaDeviceInfo, MediaDeviceKind, MediaDisplayInfo},
3338
media_stream_track::{
3439
AudioProcessingConfig, GetMediaError, GetMediaResult, MediaStreamTrack,
@@ -68,11 +73,9 @@ pub use crate::{
6873
renderer::TextureEvent,
6974
};
7075
use crate::{
71-
Webrtc, devices,
76+
Webrtc,
7277
frb::{FrbHandler, new_frb_handler},
73-
frb_generated::{
74-
FLUTTER_RUST_BRIDGE_CODEGEN_VERSION, RustOpaque, StreamSink,
75-
},
78+
frb_generated::{FLUTTER_RUST_BRIDGE_CODEGEN_VERSION, RustOpaque},
7679
};
7780

7881
/// Custom [`Handler`] for executing Rust code called from Dart.
@@ -100,9 +103,6 @@ pub(crate) static WEBRTC: LazyLock<Mutex<Webrtc>> =
100103
/// Timeout for [`mpsc::Receiver::recv_timeout()`] operations.
101104
pub static RX_TIMEOUT: Duration = Duration::from_secs(5);
102105

103-
/// Indicator whether application is configured to use fake media devices.
104-
static FAKE_MEDIA: AtomicBool = AtomicBool::new(false);
105-
106106
/// [MediaStreamTrack.kind][1] representation.
107107
///
108108
/// [1]: https://w3.org/TR/mediacapture-streams#dfn-kind
@@ -124,104 +124,6 @@ impl From<sys::TrackKind> for TrackKind {
124124
}
125125
}
126126

127-
/// [MediaStreamConstraints], used to instruct what sort of
128-
/// [`MediaStreamTrack`]s to return by the [`Webrtc::get_media()`].
129-
///
130-
/// [1]: https://w3.org/TR/mediacapture-streams#dom-mediastreamconstraints
131-
#[derive(Debug)]
132-
pub struct MediaStreamConstraints {
133-
/// Specifies the nature and settings of the audio [`MediaStreamTrack`].
134-
pub audio: Option<AudioConstraints>,
135-
136-
/// Specifies the nature and settings of the video [`MediaStreamTrack`].
137-
pub video: Option<VideoConstraints>,
138-
}
139-
140-
/// Nature and settings of the video [`MediaStreamTrack`] returned by
141-
/// [`Webrtc::get_media()`].
142-
#[derive(Debug)]
143-
pub struct VideoConstraints {
144-
/// Identifier of the device generating the content of the
145-
/// [`MediaStreamTrack`].
146-
///
147-
/// The first device will be chosen if an empty [`String`] is provided.
148-
pub device_id: Option<String>,
149-
150-
/// Width in pixels.
151-
pub width: u32,
152-
153-
/// Height in pixels.
154-
pub height: u32,
155-
156-
/// Exact frame rate (frames per second).
157-
pub frame_rate: u32,
158-
159-
/// Indicator whether the request video track should be acquired via screen
160-
/// capturing.
161-
pub is_display: bool,
162-
}
163-
164-
/// Nature and settings of the audio [`MediaStreamTrack`] returned by
165-
/// [`Webrtc::get_media()`].
166-
#[derive(Debug)]
167-
pub struct AudioConstraints {
168-
/// Identifier of the device generating the content of the
169-
/// [`MediaStreamTrack`].
170-
///
171-
/// First device will be chosen if an empty [`String`] is provided.
172-
pub device_id: Option<String>,
173-
174-
/// Audio processing configuration constraints of the [`MediaStreamTrack`].
175-
pub processing: AudioProcessingConstraints,
176-
}
177-
178-
/// Constraints of an [`AudioProcessingConfig`].
179-
#[derive(Debug, Default)]
180-
pub struct AudioProcessingConstraints {
181-
/// Indicator whether the audio volume level should be automatically tuned
182-
/// to maintain a steady overall volume level.
183-
pub auto_gain_control: Option<bool>,
184-
185-
/// Indicator whether a high-pass filter should be enabled to eliminate
186-
/// low-frequency noise.
187-
pub high_pass_filter: Option<bool>,
188-
189-
/// Indicator whether noise suppression should be enabled to reduce
190-
/// background sounds.
191-
pub noise_suppression: Option<bool>,
192-
193-
/// Level of aggressiveness for noise suppression.
194-
pub noise_suppression_level: Option<NoiseSuppressionLevel>,
195-
196-
/// Indicator whether echo cancellation should be enabled to prevent
197-
/// feedback.
198-
pub echo_cancellation: Option<bool>,
199-
}
200-
201-
/// Configures media acquisition to use fake devices instead of actual camera
202-
/// and microphone.
203-
pub fn enable_fake_media() {
204-
FAKE_MEDIA.store(true, Ordering::Release);
205-
}
206-
207-
/// Indicates whether application is configured to use fake media devices.
208-
pub fn is_fake_media() -> bool {
209-
FAKE_MEDIA.load(Ordering::Acquire)
210-
}
211-
212-
/// Returns a list of all available media input and output devices, such as
213-
/// microphones, cameras, headsets, and so forth.
214-
pub fn enumerate_devices() -> anyhow::Result<Vec<MediaDeviceInfo>> {
215-
WEBRTC.lock().unwrap().enumerate_devices()
216-
}
217-
218-
/// Returns a list of all available displays that can be used for screen
219-
/// capturing.
220-
#[must_use]
221-
pub fn enumerate_displays() -> Vec<MediaDisplayInfo> {
222-
devices::enumerate_displays()
223-
}
224-
225127
/// Replaces the specified [`AudioTrack`] (or [`VideoTrack`]) on the
226128
/// [`sys::RtpTransceiverInterface`]'s `sender`.
227129
///
@@ -253,35 +155,3 @@ pub fn sender_set_parameters(
253155
) -> anyhow::Result<()> {
254156
transceiver.sender_set_parameters(params)
255157
}
256-
257-
/// Sets the specified `audio playout` device.
258-
pub fn set_audio_playout_device(device_id: String) -> anyhow::Result<()> {
259-
WEBRTC.lock().unwrap().set_audio_playout_device(device_id)
260-
}
261-
262-
/// Indicates whether the microphone is available to set volume.
263-
pub fn microphone_volume_is_available() -> anyhow::Result<bool> {
264-
WEBRTC.lock().unwrap().microphone_volume_is_available()
265-
}
266-
267-
/// Sets the microphone system volume according to the specified `level` in
268-
/// percents.
269-
///
270-
/// Valid values range is `[0; 100]`.
271-
pub fn set_microphone_volume(level: u8) -> anyhow::Result<()> {
272-
WEBRTC.lock().unwrap().set_microphone_volume(level)
273-
}
274-
275-
/// Returns the current level of the microphone volume in `[0; 100]` range.
276-
pub fn microphone_volume() -> anyhow::Result<u32> {
277-
WEBRTC.lock().unwrap().microphone_volume()
278-
}
279-
280-
/// Sets the provided `OnDeviceChangeCallback` as the callback to be called
281-
/// whenever a set of available media devices changes.
282-
///
283-
/// Only one callback can be set at a time, so the previous one will be dropped,
284-
/// if any.
285-
pub fn set_on_device_changed(cb: StreamSink<()>) {
286-
WEBRTC.lock().unwrap().set_on_device_changed(cb);
287-
}

0 commit comments

Comments
 (0)