|
| 1 | +//! Representation of a [MediaStreamTrack][0]. |
| 2 | +//! |
| 3 | +//! [0]: https://w3.org/TR/mediacapture-streams#dom-mediastreamtrack |
| 4 | +
|
| 5 | +pub mod audio_processing_config; |
| 6 | +pub mod media_type; |
| 7 | +pub mod track_event; |
| 8 | +pub mod track_state; |
| 9 | + |
| 10 | +#[cfg(doc)] |
| 11 | +use libwebrtc_sys as sys; |
| 12 | + |
| 13 | +pub use self::{ |
| 14 | + audio_processing_config::{ |
| 15 | + AudioProcessingConfig, NoiseSuppressionLevel, |
| 16 | + get_audio_processing_config, |
| 17 | + }, |
| 18 | + media_type::MediaType, |
| 19 | + track_event::TrackEvent, |
| 20 | + track_state::TrackState, |
| 21 | +}; |
| 22 | +#[cfg(doc)] |
| 23 | +use crate::PeerConnection; |
| 24 | +use crate::{ |
| 25 | + api::{ |
| 26 | + AudioProcessingConstraints, MediaStreamConstraints, TextureEvent, |
| 27 | + WEBRTC, |
| 28 | + }, |
| 29 | + frb_generated::StreamSink, |
| 30 | + media::TrackOrigin, |
| 31 | + pc::PeerConnectionId, |
| 32 | + renderer::FrameHandler, |
| 33 | +}; |
| 34 | + |
| 35 | +/// Representation of a single media track within a [MediaStream]. |
| 36 | +/// |
| 37 | +/// Typically, these are audio or video tracks, but other track types may exist |
| 38 | +/// as well. |
| 39 | +/// |
| 40 | +/// [MediaStream]: https://w3.org/TR/mediacapture-streams#dom-mediastream |
| 41 | +#[derive(Clone, Debug)] |
| 42 | +pub struct MediaStreamTrack { |
| 43 | + /// Unique identifier (GUID) of this [`MediaStreamTrack`]. |
| 44 | + pub id: String, |
| 45 | + |
| 46 | + /// Unique identifier of the [`PeerConnection`] from which this |
| 47 | + /// [`MediaStreamTrack`] was received. |
| 48 | + /// |
| 49 | + /// Always [`None`] for local [`MediaStreamTrack`]s. |
| 50 | + pub peer_id: Option<u32>, |
| 51 | + |
| 52 | + /// Label identifying the track source, as in "internal microphone". |
| 53 | + pub device_id: String, |
| 54 | + |
| 55 | + /// [`MediaType`] of this [`MediaStreamTrack`]. |
| 56 | + pub kind: MediaType, |
| 57 | + |
| 58 | + /// Indicator whether this [`MediaStreamTrack`] is allowed to render the |
| 59 | + /// source stream. |
| 60 | + /// |
| 61 | + /// This can be used to intentionally mute a track. |
| 62 | + pub enabled: bool, |
| 63 | +} |
| 64 | + |
| 65 | +/// [`get_media()`] function result. |
| 66 | +pub enum GetMediaResult { |
| 67 | + /// Requested media tracks. |
| 68 | + Ok(Vec<MediaStreamTrack>), |
| 69 | + |
| 70 | + /// Failed to get requested media. |
| 71 | + Err(GetMediaError), |
| 72 | +} |
| 73 | + |
| 74 | +/// Media acquisition error. |
| 75 | +pub enum GetMediaError { |
| 76 | + /// Could not acquire audio track. |
| 77 | + Audio(String), |
| 78 | + |
| 79 | + /// Could not acquire video track. |
| 80 | + Video(String), |
| 81 | +} |
| 82 | + |
| 83 | +/// Creates a [MediaStream] with tracks according to provided |
| 84 | +/// [`MediaStreamConstraints`]. |
| 85 | +/// |
| 86 | +/// [MediaStream]: https://w3.org/TR/mediacapture-streams#dom-mediastream |
| 87 | +#[must_use] |
| 88 | +pub fn get_media(constraints: MediaStreamConstraints) -> GetMediaResult { |
| 89 | + #[expect(clippy::significant_drop_in_scrutinee, reason = "no problems")] |
| 90 | + match WEBRTC.lock().unwrap().get_media(constraints) { |
| 91 | + Ok(tracks) => GetMediaResult::Ok(tracks), |
| 92 | + Err(err) => GetMediaResult::Err(err), |
| 93 | + } |
| 94 | +} |
| 95 | + |
| 96 | +/// Disposes the specified [`MediaStreamTrack`]. |
| 97 | +pub fn dispose_track(track_id: String, peer_id: Option<u32>, kind: MediaType) { |
| 98 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 99 | + |
| 100 | + WEBRTC.lock().unwrap().dispose_track(track_origin, track_id, kind, false); |
| 101 | +} |
| 102 | + |
| 103 | +/// Returns the [readyState][0] property of the [`MediaStreamTrack`] by its ID |
| 104 | +/// and [`MediaType`]. |
| 105 | +/// |
| 106 | +/// [0]: https://w3.org/TR/mediacapture-streams#dfn-readystate |
| 107 | +pub fn track_state( |
| 108 | + track_id: String, |
| 109 | + peer_id: Option<u32>, |
| 110 | + kind: MediaType, |
| 111 | +) -> TrackState { |
| 112 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 113 | + |
| 114 | + WEBRTC.lock().unwrap().track_state(track_id, track_origin, kind) |
| 115 | +} |
| 116 | + |
| 117 | +/// Returns the [height] property of the media track by its ID and |
| 118 | +/// [`MediaType`]. |
| 119 | +/// |
| 120 | +/// Blocks until the [height] is initialized. |
| 121 | +/// |
| 122 | +/// [height]: https://w3.org/TR/mediacapture-streams#dfn-height |
| 123 | +pub fn track_height( |
| 124 | + track_id: String, |
| 125 | + peer_id: Option<u32>, |
| 126 | + kind: MediaType, |
| 127 | +) -> Option<i32> { |
| 128 | + if kind == MediaType::Audio { |
| 129 | + return None; |
| 130 | + } |
| 131 | + |
| 132 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 133 | + |
| 134 | + WEBRTC.lock().unwrap().track_height(track_id, track_origin) |
| 135 | +} |
| 136 | + |
| 137 | +/// Returns the [width] property of the media track by its ID and [`MediaType`]. |
| 138 | +/// |
| 139 | +/// Blocks until the [width] is initialized. |
| 140 | +/// |
| 141 | +/// [width]: https://w3.org/TR/mediacapture-streams#dfn-height |
| 142 | +pub fn track_width( |
| 143 | + track_id: String, |
| 144 | + peer_id: Option<u32>, |
| 145 | + kind: MediaType, |
| 146 | +) -> Option<i32> { |
| 147 | + if kind == MediaType::Audio { |
| 148 | + return None; |
| 149 | + } |
| 150 | + |
| 151 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 152 | + |
| 153 | + WEBRTC.lock().unwrap().track_width(track_id, track_origin) |
| 154 | +} |
| 155 | + |
| 156 | +/// Changes the [enabled][1] property of the [`MediaStreamTrack`] by its ID and |
| 157 | +/// [`MediaType`]. |
| 158 | +/// |
| 159 | +/// [1]: https://w3.org/TR/mediacapture-streams#track-enabled |
| 160 | +pub fn set_track_enabled( |
| 161 | + track_id: String, |
| 162 | + peer_id: Option<u32>, |
| 163 | + kind: MediaType, |
| 164 | + enabled: bool, |
| 165 | +) { |
| 166 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 167 | + |
| 168 | + WEBRTC.lock().unwrap().set_track_enabled( |
| 169 | + track_id, |
| 170 | + track_origin, |
| 171 | + kind, |
| 172 | + enabled, |
| 173 | + ); |
| 174 | +} |
| 175 | + |
| 176 | +/// Clones the specified [`MediaStreamTrack`]. |
| 177 | +pub fn clone_track( |
| 178 | + track_id: String, |
| 179 | + peer_id: Option<u32>, |
| 180 | + kind: MediaType, |
| 181 | +) -> Option<MediaStreamTrack> { |
| 182 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 183 | + |
| 184 | + WEBRTC.lock().unwrap().clone_track(track_id, track_origin, kind) |
| 185 | +} |
| 186 | + |
| 187 | +/// Registers an observer to the [`MediaStreamTrack`] events. |
| 188 | +pub fn register_track_observer( |
| 189 | + cb: StreamSink<TrackEvent>, |
| 190 | + peer_id: Option<u32>, |
| 191 | + track_id: String, |
| 192 | + kind: MediaType, |
| 193 | +) { |
| 194 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 195 | + |
| 196 | + WEBRTC.lock().unwrap().register_track_observer( |
| 197 | + track_id, |
| 198 | + track_origin, |
| 199 | + kind, |
| 200 | + cb, |
| 201 | + ); |
| 202 | +} |
| 203 | + |
| 204 | +/// Enables or disables audio level observing of the audio [`MediaStreamTrack`] |
| 205 | +/// with the provided `track_id`. |
| 206 | +pub fn set_audio_level_observer_enabled( |
| 207 | + track_id: String, |
| 208 | + peer_id: Option<u32>, |
| 209 | + enabled: bool, |
| 210 | +) { |
| 211 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 212 | + WEBRTC.lock().unwrap().set_audio_level_observer_enabled( |
| 213 | + track_id, |
| 214 | + track_origin, |
| 215 | + enabled, |
| 216 | + ); |
| 217 | +} |
| 218 | + |
| 219 | +/// Applies the provided [`AudioProcessingConstraints`] to specified local audio |
| 220 | +/// track. |
| 221 | +#[expect(clippy::needless_pass_by_value, reason = "FFI")] |
| 222 | +pub fn update_audio_processing( |
| 223 | + track_id: String, |
| 224 | + conf: AudioProcessingConstraints, |
| 225 | +) -> anyhow::Result<()> { |
| 226 | + WEBRTC.lock().unwrap().apply_audio_processing_config(track_id, &conf) |
| 227 | +} |
| 228 | + |
| 229 | +/// Creates a new [`VideoSink`] attached to the specified video track. |
| 230 | +/// |
| 231 | +/// `callback_ptr` argument should be a pointer to an [`UniquePtr`] pointing to |
| 232 | +/// an [`sys::OnFrameCallback`]. |
| 233 | +/// |
| 234 | +/// [`UniquePtr`]: cxx::UniquePtr |
| 235 | +/// [`VideoSink`]: crate::VideoSink |
| 236 | +pub fn create_video_sink( |
| 237 | + cb: StreamSink<TextureEvent>, |
| 238 | + sink_id: i64, |
| 239 | + peer_id: Option<u32>, |
| 240 | + track_id: String, |
| 241 | + callback_ptr: i64, |
| 242 | + texture_id: i64, |
| 243 | +) { |
| 244 | + let handler = FrameHandler::new(callback_ptr as _, cb, texture_id); |
| 245 | + let track_origin = TrackOrigin::from(peer_id.map(PeerConnectionId::from)); |
| 246 | + |
| 247 | + WEBRTC.lock().unwrap().create_video_sink( |
| 248 | + sink_id, |
| 249 | + track_id, |
| 250 | + track_origin, |
| 251 | + handler, |
| 252 | + ); |
| 253 | +} |
| 254 | + |
| 255 | +/// Destroys a [`VideoSink`] by the provided ID. |
| 256 | +/// |
| 257 | +/// [`VideoSink`]: crate::VideoSink |
| 258 | +pub fn dispose_video_sink(sink_id: i64) { |
| 259 | + WEBRTC.lock().unwrap().dispose_video_sink(sink_id); |
| 260 | +} |
0 commit comments