Skip to content

Commit e4cf6fa

Browse files
authored
Move MediaStreamTrack API into submodule in medea-flutter-webrtc-native crate (#222, #210)
1 parent 16a6edd commit e4cf6fa

27 files changed

+3398
-3452
lines changed
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
//! Configuration of audio processing.
2+
3+
use libwebrtc_sys as sys;
4+
5+
#[cfg(doc)]
6+
use crate::api::MediaStreamTrack;
7+
use crate::api::WEBRTC;
8+
9+
/// Audio processing configuration for some local audio [`MediaStreamTrack`].
10+
#[expect(clippy::struct_excessive_bools, reason = "that's ok")]
11+
#[derive(Debug)]
12+
pub struct AudioProcessingConfig {
13+
/// Indicator whether the audio volume level should be automatically tuned
14+
/// to maintain a steady overall volume level.
15+
pub auto_gain_control: bool,
16+
17+
/// Indicator whether a high-pass filter should be enabled to eliminate
18+
/// low-frequency noise.
19+
pub high_pass_filter: bool,
20+
21+
/// Indicator whether noise suppression should be enabled to reduce
22+
/// background sounds.
23+
pub noise_suppression: bool,
24+
25+
/// Level of aggressiveness for noise suppression.
26+
pub noise_suppression_level: NoiseSuppressionLevel,
27+
28+
/// Indicator whether echo cancellation should be enabled to prevent
29+
/// feedback.
30+
pub echo_cancellation: bool,
31+
}
32+
33+
/// [`AudioProcessingConfig`] noise suppression aggressiveness.
34+
#[derive(Clone, Copy, Debug)]
35+
pub enum NoiseSuppressionLevel {
36+
/// Minimal noise suppression.
37+
Low,
38+
39+
/// Moderate level of suppression.
40+
Moderate,
41+
42+
/// Aggressive noise suppression.
43+
High,
44+
45+
/// Maximum suppression.
46+
VeryHigh,
47+
}
48+
49+
impl From<NoiseSuppressionLevel> for sys::NoiseSuppressionLevel {
50+
fn from(level: NoiseSuppressionLevel) -> Self {
51+
match level {
52+
NoiseSuppressionLevel::Low => Self::kLow,
53+
NoiseSuppressionLevel::Moderate => Self::kModerate,
54+
NoiseSuppressionLevel::High => Self::kHigh,
55+
NoiseSuppressionLevel::VeryHigh => Self::kVeryHigh,
56+
}
57+
}
58+
}
59+
60+
impl From<sys::NoiseSuppressionLevel> for NoiseSuppressionLevel {
61+
fn from(level: sys::NoiseSuppressionLevel) -> Self {
62+
match level {
63+
sys::NoiseSuppressionLevel::kLow => Self::Low,
64+
sys::NoiseSuppressionLevel::kModerate => Self::Moderate,
65+
sys::NoiseSuppressionLevel::kHigh => Self::High,
66+
sys::NoiseSuppressionLevel::kVeryHigh => Self::VeryHigh,
67+
_ => unreachable!(),
68+
}
69+
}
70+
}
71+
72+
/// Returns the current [`AudioProcessingConfig`] for the specified local audio
73+
/// track.
74+
pub fn get_audio_processing_config(
75+
track_id: String,
76+
) -> anyhow::Result<AudioProcessingConfig> {
77+
WEBRTC.lock().unwrap().get_audio_processing_config(track_id)
78+
}
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
//! Media types of a [`MediaStreamTrack`].
2+
3+
use libwebrtc_sys as sys;
4+
5+
#[cfg(doc)]
6+
use crate::api::MediaStreamTrack;
7+
8+
/// Possible media types of a [`MediaStreamTrack`].
9+
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
10+
pub enum MediaType {
11+
/// Audio [`MediaStreamTrack`].
12+
Audio,
13+
14+
/// Video [`MediaStreamTrack`].
15+
Video,
16+
}
17+
18+
impl From<MediaType> for sys::MediaType {
19+
fn from(state: MediaType) -> Self {
20+
match state {
21+
MediaType::Audio => Self::MEDIA_TYPE_AUDIO,
22+
MediaType::Video => Self::MEDIA_TYPE_VIDEO,
23+
}
24+
}
25+
}
26+
27+
impl From<sys::MediaType> for MediaType {
28+
fn from(state: sys::MediaType) -> Self {
29+
match state {
30+
sys::MediaType::MEDIA_TYPE_AUDIO => Self::Audio,
31+
sys::MediaType::MEDIA_TYPE_VIDEO => Self::Video,
32+
_ => unreachable!(),
33+
}
34+
}
35+
}
Lines changed: 260 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,260 @@
1+
//! Representation of a [MediaStreamTrack][0].
2+
//!
3+
//! [0]: https://w3.org/TR/mediacapture-streams#dom-mediastreamtrack
4+
5+
pub mod audio_processing_config;
6+
pub mod media_type;
7+
pub mod track_event;
8+
pub mod track_state;
9+
10+
#[cfg(doc)]
11+
use libwebrtc_sys as sys;
12+
13+
pub use self::{
14+
audio_processing_config::{
15+
AudioProcessingConfig, NoiseSuppressionLevel,
16+
get_audio_processing_config,
17+
},
18+
media_type::MediaType,
19+
track_event::TrackEvent,
20+
track_state::TrackState,
21+
};
22+
#[cfg(doc)]
23+
use crate::PeerConnection;
24+
use crate::{
25+
api::{
26+
AudioProcessingConstraints, MediaStreamConstraints, TextureEvent,
27+
WEBRTC,
28+
},
29+
frb_generated::StreamSink,
30+
media::TrackOrigin,
31+
pc::PeerConnectionId,
32+
renderer::FrameHandler,
33+
};
34+
35+
/// Representation of a single media track within a [MediaStream].
36+
///
37+
/// Typically, these are audio or video tracks, but other track types may exist
38+
/// as well.
39+
///
40+
/// [MediaStream]: https://w3.org/TR/mediacapture-streams#dom-mediastream
41+
#[derive(Clone, Debug)]
42+
pub struct MediaStreamTrack {
43+
/// Unique identifier (GUID) of this [`MediaStreamTrack`].
44+
pub id: String,
45+
46+
/// Unique identifier of the [`PeerConnection`] from which this
47+
/// [`MediaStreamTrack`] was received.
48+
///
49+
/// Always [`None`] for local [`MediaStreamTrack`]s.
50+
pub peer_id: Option<u32>,
51+
52+
/// Label identifying the track source, as in "internal microphone".
53+
pub device_id: String,
54+
55+
/// [`MediaType`] of this [`MediaStreamTrack`].
56+
pub kind: MediaType,
57+
58+
/// Indicator whether this [`MediaStreamTrack`] is allowed to render the
59+
/// source stream.
60+
///
61+
/// This can be used to intentionally mute a track.
62+
pub enabled: bool,
63+
}
64+
65+
/// [`get_media()`] function result.
66+
pub enum GetMediaResult {
67+
/// Requested media tracks.
68+
Ok(Vec<MediaStreamTrack>),
69+
70+
/// Failed to get requested media.
71+
Err(GetMediaError),
72+
}
73+
74+
/// Media acquisition error.
75+
pub enum GetMediaError {
76+
/// Could not acquire audio track.
77+
Audio(String),
78+
79+
/// Could not acquire video track.
80+
Video(String),
81+
}
82+
83+
/// Creates a [MediaStream] with tracks according to provided
84+
/// [`MediaStreamConstraints`].
85+
///
86+
/// [MediaStream]: https://w3.org/TR/mediacapture-streams#dom-mediastream
87+
#[must_use]
88+
pub fn get_media(constraints: MediaStreamConstraints) -> GetMediaResult {
89+
#[expect(clippy::significant_drop_in_scrutinee, reason = "no problems")]
90+
match WEBRTC.lock().unwrap().get_media(constraints) {
91+
Ok(tracks) => GetMediaResult::Ok(tracks),
92+
Err(err) => GetMediaResult::Err(err),
93+
}
94+
}
95+
96+
/// Disposes the specified [`MediaStreamTrack`].
97+
pub fn dispose_track(track_id: String, peer_id: Option<u32>, kind: MediaType) {
98+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
99+
100+
WEBRTC.lock().unwrap().dispose_track(track_origin, track_id, kind, false);
101+
}
102+
103+
/// Returns the [readyState][0] property of the [`MediaStreamTrack`] by its ID
104+
/// and [`MediaType`].
105+
///
106+
/// [0]: https://w3.org/TR/mediacapture-streams#dfn-readystate
107+
pub fn track_state(
108+
track_id: String,
109+
peer_id: Option<u32>,
110+
kind: MediaType,
111+
) -> TrackState {
112+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
113+
114+
WEBRTC.lock().unwrap().track_state(track_id, track_origin, kind)
115+
}
116+
117+
/// Returns the [height] property of the media track by its ID and
118+
/// [`MediaType`].
119+
///
120+
/// Blocks until the [height] is initialized.
121+
///
122+
/// [height]: https://w3.org/TR/mediacapture-streams#dfn-height
123+
pub fn track_height(
124+
track_id: String,
125+
peer_id: Option<u32>,
126+
kind: MediaType,
127+
) -> Option<i32> {
128+
if kind == MediaType::Audio {
129+
return None;
130+
}
131+
132+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
133+
134+
WEBRTC.lock().unwrap().track_height(track_id, track_origin)
135+
}
136+
137+
/// Returns the [width] property of the media track by its ID and [`MediaType`].
138+
///
139+
/// Blocks until the [width] is initialized.
140+
///
141+
/// [width]: https://w3.org/TR/mediacapture-streams#dfn-height
142+
pub fn track_width(
143+
track_id: String,
144+
peer_id: Option<u32>,
145+
kind: MediaType,
146+
) -> Option<i32> {
147+
if kind == MediaType::Audio {
148+
return None;
149+
}
150+
151+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
152+
153+
WEBRTC.lock().unwrap().track_width(track_id, track_origin)
154+
}
155+
156+
/// Changes the [enabled][1] property of the [`MediaStreamTrack`] by its ID and
157+
/// [`MediaType`].
158+
///
159+
/// [1]: https://w3.org/TR/mediacapture-streams#track-enabled
160+
pub fn set_track_enabled(
161+
track_id: String,
162+
peer_id: Option<u32>,
163+
kind: MediaType,
164+
enabled: bool,
165+
) {
166+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
167+
168+
WEBRTC.lock().unwrap().set_track_enabled(
169+
track_id,
170+
track_origin,
171+
kind,
172+
enabled,
173+
);
174+
}
175+
176+
/// Clones the specified [`MediaStreamTrack`].
177+
pub fn clone_track(
178+
track_id: String,
179+
peer_id: Option<u32>,
180+
kind: MediaType,
181+
) -> Option<MediaStreamTrack> {
182+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
183+
184+
WEBRTC.lock().unwrap().clone_track(track_id, track_origin, kind)
185+
}
186+
187+
/// Registers an observer to the [`MediaStreamTrack`] events.
188+
pub fn register_track_observer(
189+
cb: StreamSink<TrackEvent>,
190+
peer_id: Option<u32>,
191+
track_id: String,
192+
kind: MediaType,
193+
) {
194+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
195+
196+
WEBRTC.lock().unwrap().register_track_observer(
197+
track_id,
198+
track_origin,
199+
kind,
200+
cb,
201+
);
202+
}
203+
204+
/// Enables or disables audio level observing of the audio [`MediaStreamTrack`]
205+
/// with the provided `track_id`.
206+
pub fn set_audio_level_observer_enabled(
207+
track_id: String,
208+
peer_id: Option<u32>,
209+
enabled: bool,
210+
) {
211+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
212+
WEBRTC.lock().unwrap().set_audio_level_observer_enabled(
213+
track_id,
214+
track_origin,
215+
enabled,
216+
);
217+
}
218+
219+
/// Applies the provided [`AudioProcessingConstraints`] to specified local audio
220+
/// track.
221+
#[expect(clippy::needless_pass_by_value, reason = "FFI")]
222+
pub fn update_audio_processing(
223+
track_id: String,
224+
conf: AudioProcessingConstraints,
225+
) -> anyhow::Result<()> {
226+
WEBRTC.lock().unwrap().apply_audio_processing_config(track_id, &conf)
227+
}
228+
229+
/// Creates a new [`VideoSink`] attached to the specified video track.
230+
///
231+
/// `callback_ptr` argument should be a pointer to an [`UniquePtr`] pointing to
232+
/// an [`sys::OnFrameCallback`].
233+
///
234+
/// [`UniquePtr`]: cxx::UniquePtr
235+
/// [`VideoSink`]: crate::VideoSink
236+
pub fn create_video_sink(
237+
cb: StreamSink<TextureEvent>,
238+
sink_id: i64,
239+
peer_id: Option<u32>,
240+
track_id: String,
241+
callback_ptr: i64,
242+
texture_id: i64,
243+
) {
244+
let handler = FrameHandler::new(callback_ptr as _, cb, texture_id);
245+
let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from));
246+
247+
WEBRTC.lock().unwrap().create_video_sink(
248+
sink_id,
249+
track_id,
250+
track_origin,
251+
handler,
252+
);
253+
}
254+
255+
/// Destroys a [`VideoSink`] by the provided ID.
256+
///
257+
/// [`VideoSink`]: crate::VideoSink
258+
pub fn dispose_video_sink(sink_id: i64) {
259+
WEBRTC.lock().unwrap().dispose_video_sink(sink_id);
260+
}
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
//! Representation of state changes in a [`MediaStreamTrack`].
2+
3+
#[cfg(doc)]
4+
use crate::api::MediaStreamTrack;
5+
6+
/// Indication of the current state of a [`MediaStreamTrack`].
7+
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
8+
pub enum TrackEvent {
9+
/// Ended event of the [`MediaStreamTrack`] interface is fired when playback
10+
/// or streaming has stopped because the end of the media was reached or
11+
/// because no further data is available.
12+
Ended,
13+
14+
/// Event indicating an audio level change in the [`MediaStreamTrack`].
15+
AudioLevelUpdated(u32),
16+
17+
/// Event indicating that the [`MediaStreamTrack`] has completely
18+
/// initialized and can be used on Flutter side.
19+
TrackCreated,
20+
}

0 commit comments

Comments
 (0)