diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6ca43a60e..6fdd867e1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### 🔄 Changed
+# [1.39.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.39.0)
+_December 24, 2025_
+
+### ✅ Added
+- Support for capture inApp audio during ScreenSharing sessions. [#1020](https://github.com/GetStream/stream-video-swift/pull/1020)
+
# [1.38.2](https://github.com/GetStream/stream-video-swift/releases/tag/1.38.2)
_December 22, 2025_
diff --git a/DemoApp/Resources/audio-tracks/track_1.mp3 b/DemoApp/Resources/audio-tracks/track_1.mp3
new file mode 100644
index 000000000..69534ea06
Binary files /dev/null and b/DemoApp/Resources/audio-tracks/track_1.mp3 differ
diff --git a/DemoApp/Resources/audio-tracks/track_2.mp3 b/DemoApp/Resources/audio-tracks/track_2.mp3
new file mode 100644
index 000000000..d7f7afe21
Binary files /dev/null and b/DemoApp/Resources/audio-tracks/track_2.mp3 differ
diff --git a/DemoApp/Sources/Components/AudioTrackPlayer/AudioTrackPlayer.swift b/DemoApp/Sources/Components/AudioTrackPlayer/AudioTrackPlayer.swift
new file mode 100644
index 000000000..2161c05c8
--- /dev/null
+++ b/DemoApp/Sources/Components/AudioTrackPlayer/AudioTrackPlayer.swift
@@ -0,0 +1,83 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import StreamVideo
+
+final class AudioTrackPlayer: NSObject, AVAudioPlayerDelegate {
+ enum Track: String, Equatable, CaseIterable {
+ case track1 = "track_1"
+ case track2 = "track_2"
+
+ var fileExtension: String {
+ switch self {
+ case .track1:
+ return ".mp3"
+ case .track2:
+ return ".mp3"
+ }
+ }
+ }
+
+ @Published private(set) var isPlaying: Bool = false
+ @Published private(set) var track: Track?
+
+ private var audioPlayer: AVAudioPlayer?
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+
+ func play(_ track: Track) {
+ processingQueue.addTaskOperation { @MainActor [weak self] in
+ guard
+ let self,
+ self.track != track,
+ let url = Bundle.main.url(forResource: track.rawValue, withExtension: track.fileExtension),
+ let audioPlayer = try? AVAudioPlayer(contentsOf: url)
+ else {
+ return
+ }
+
+ self.audioPlayer = audioPlayer
+ audioPlayer.play()
+ audioPlayer.numberOfLoops = 1000
+ self.track = track
+ self.isPlaying = true
+ }
+ }
+
+ func stop() {
+ processingQueue.addTaskOperation { @MainActor [weak self] in
+ guard
+ let self
+ else {
+ return
+ }
+
+ audioPlayer?.stop()
+ audioPlayer = nil
+ isPlaying = false
+ track = nil
+ }
+ }
+
+ // MARK: - AVAudioPlayerDelegate
+
+ func audioPlayerDidFinishPlaying(
+ _ player: AVAudioPlayer,
+ successfully flag: Bool
+ ) {
+ stop()
+ }
+}
+
+extension AudioTrackPlayer: InjectionKey {
+ static var currentValue: AudioTrackPlayer = .init()
+}
+
+extension InjectedValues {
+ var audioPlayer: AudioTrackPlayer {
+ get { Self[AudioTrackPlayer.self] }
+ set { _ = newValue }
+ }
+}
diff --git a/DemoApp/Sources/Controls/DemoMoreControls/DemoBroadcastMoreControlsListButtonView.swift b/DemoApp/Sources/Controls/DemoMoreControls/DemoBroadcastMoreControlsListButtonView.swift
index 695a59908..0440f4437 100644
--- a/DemoApp/Sources/Controls/DemoMoreControls/DemoBroadcastMoreControlsListButtonView.swift
+++ b/DemoApp/Sources/Controls/DemoMoreControls/DemoBroadcastMoreControlsListButtonView.swift
@@ -40,15 +40,29 @@ struct DemoBroadcastMoreControlsListButtonView: View {
@ViewBuilder
private var inAppScreenshareButtonView: some View {
- DemoMoreControlListButtonView(
- action: {
- viewModel.startScreensharing(type: .inApp)
+ Menu {
+ Button {
+ viewModel.startScreensharing(type: .inApp, includeAudio: false)
selection = .inApp
- },
- label: "Screenshare"
- ) {
- Image(systemName: "record.circle")
- .foregroundColor(appearance.colors.text)
+ } label: {
+ Text("Without audio")
+ }
+
+ Button {
+ viewModel.startScreensharing(type: .inApp, includeAudio: true)
+ selection = .inApp
+ } label: {
+ Text("With audio")
+ }
+
+ } label: {
+ DemoMoreControlListButtonView(
+ action: {},
+ label: "Screenshare"
+ ) {
+ Image(systemName: "record.circle")
+ .foregroundColor(appearance.colors.text)
+ }
}
}
diff --git a/DemoApp/Sources/ViewModifiers/MoreControls/DemoAudioTrackButtonView.swift b/DemoApp/Sources/ViewModifiers/MoreControls/DemoAudioTrackButtonView.swift
new file mode 100644
index 000000000..f72a9b1ba
--- /dev/null
+++ b/DemoApp/Sources/ViewModifiers/MoreControls/DemoAudioTrackButtonView.swift
@@ -0,0 +1,61 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamVideo
+import SwiftUI
+
+struct DemoAudioTrackButtonView: View {
+ @Injected(\.audioPlayer) var audioPlayer: AudioTrackPlayer
+
+ @State private var isPlaying: Bool = AudioTrackPlayer.currentValue.isPlaying
+ @State private var track: AudioTrackPlayer.Track? = AudioTrackPlayer.currentValue.track
+
+ var body: some View {
+ Menu {
+ Button {
+ audioPlayer.stop()
+ } label: {
+ Label {
+ Text("None")
+ } icon: {
+ if track == nil {
+ Image(systemName: "checkmark")
+ }
+ }
+ }
+
+ Divider()
+
+ ForEach(AudioTrackPlayer.Track.allCases, id: \.self) { track in
+ Button {
+ if self.track == track {
+ audioPlayer.stop()
+ } else {
+ audioPlayer.play(track)
+ }
+ } label: {
+ Label {
+ Text(track.rawValue)
+ } icon: {
+ if self.track == track {
+ Image(systemName: "checkmark")
+ }
+ }
+ }
+ }
+ } label: {
+ DemoMoreControlListButtonView(
+ action: {},
+ label: "In-App audio"
+ ) {
+ Image(
+ systemName: isPlaying ? "pause.circle" : "play.circle"
+ )
+ }
+ }
+ .onReceive(audioPlayer.$isPlaying.receive(on: DispatchQueue.main)) { isPlaying = $0 }
+ .onReceive(audioPlayer.$track.receive(on: DispatchQueue.main)) { track = $0 }
+ }
+}
diff --git a/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift b/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift
index 6765a07c5..06ab688fb 100644
--- a/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift
+++ b/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift
@@ -77,6 +77,8 @@ struct DemoMoreControlsViewModifier: ViewModifier {
VStack {
Divider()
+ DemoAudioTrackButtonView()
+
DemoMoreLogsAndGleapButtonView()
DemoBroadcastMoreControlsListButtonView(
diff --git a/DocumentationTests/DocumentationTests/DocumentationTests/06-advanced/04-screensharing.swift b/DocumentationTests/DocumentationTests/DocumentationTests/06-advanced/04-screensharing.swift
index 8f3894b3b..8754d7191 100644
--- a/DocumentationTests/DocumentationTests/DocumentationTests/06-advanced/04-screensharing.swift
+++ b/DocumentationTests/DocumentationTests/DocumentationTests/06-advanced/04-screensharing.swift
@@ -17,6 +17,14 @@ private func content() {
}
}
+ asyncContainer {
+ Task {
+ let call = streamVideo.call(callType: "default", callId: "123")
+ try await call.join()
+ try await call.startScreensharing(type: .inApp, includeAudio: true)
+ }
+ }
+
asyncContainer {
Task {
try await call.stopScreensharing()
diff --git a/README.md b/README.md
index 9b377f710..2b1335e83 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,7 @@
-
+
diff --git a/Sources/StreamVideo/Call.swift b/Sources/StreamVideo/Call.swift
index ed44e255b..c8954282c 100644
--- a/Sources/StreamVideo/Call.swift
+++ b/Sources/StreamVideo/Call.swift
@@ -546,9 +546,18 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
}
/// Starts screensharing from the device.
- /// - Parameter type: The screensharing type (in-app or broadcasting).
- public func startScreensharing(type: ScreensharingType) async throws {
- try await callController.startScreensharing(type: type)
+ /// - Parameters:
+ /// - type: The screensharing type (in-app or broadcasting).
+ /// - includeAudio: Whether to capture app audio during screensharing.
+ /// Only valid for `.inApp`; ignored otherwise.
+ public func startScreensharing(
+ type: ScreensharingType,
+ includeAudio: Bool = true
+ ) async throws {
+ try await callController.startScreensharing(
+ type: type,
+ includeAudio: includeAudio
+ )
}
/// Stops screensharing from the current device.
diff --git a/Sources/StreamVideo/Controllers/CallController.swift b/Sources/StreamVideo/Controllers/CallController.swift
index bc5a05bb1..2e85f96c3 100644
--- a/Sources/StreamVideo/Controllers/CallController.swift
+++ b/Sources/StreamVideo/Controllers/CallController.swift
@@ -211,8 +211,19 @@ class CallController: @unchecked Sendable {
}
}
- func startScreensharing(type: ScreensharingType) async throws {
- try await webRTCCoordinator.startScreensharing(type: type)
+ /// Starts screensharing for the current call.
+ /// - Parameters:
+ /// - type: The screensharing type (in-app or broadcasting).
+ /// - includeAudio: Whether to capture app audio during screensharing.
+ /// Only valid for `.inApp`; ignored otherwise.
+ func startScreensharing(
+ type: ScreensharingType,
+ includeAudio: Bool
+ ) async throws {
+ try await webRTCCoordinator.startScreensharing(
+ type: type,
+ includeAudio: includeAudio
+ )
}
func stopScreensharing() async throws {
diff --git a/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift b/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
index a6a25e305..efff8568e 100644
--- a/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
+++ b/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
@@ -7,7 +7,7 @@ import Foundation
extension SystemEnvironment {
/// A Stream Video version.
- public static let version: String = "1.38.2"
+ public static let version: String = "1.39.0"
/// The WebRTC version.
public static let webRTCVersion: String = "137.0.54"
}
diff --git a/Sources/StreamVideo/Info.plist b/Sources/StreamVideo/Info.plist
index ec8879a18..539de2234 100644
--- a/Sources/StreamVideo/Info.plist
+++ b/Sources/StreamVideo/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.38.2
+ 1.39.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift
index a7aa4911d..b0848ca1b 100644
--- a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift
@@ -96,6 +96,14 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
}
}
+ private struct AudioBufferInjectionPreState {
+ var isAdvancedDuckingEnabled: Bool
+ var duckingLevel: Int
+ var isVoiceProcessingBypassed: Bool
+ var isVoiceProcessingEnabled: Bool
+ var isVoiceProcessingAGCEnabled: Bool
+ }
+
/// Tracks whether WebRTC is currently playing back audio.
private let isPlayingSubject: CurrentValueSubject
/// `true` while audio playout is active.
@@ -168,6 +176,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
/// Strong reference to the current engine so we can introspect it if needed.
private var engine: AVAudioEngine?
+ @Atomic private var engineInputContext: AVAudioEngine.InputContext? {
+ didSet { audioBufferRenderer.configure(with: engineInputContext) }
+ }
+
+ private let audioBufferRenderer: AudioBufferRenderer = .init()
+
+ private var preAudioBufferInjectionSnapshot: AudioBufferInjectionPreState?
/// Textual diagnostics for logging and debugging.
override var description: String {
@@ -317,6 +332,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
source.refreshStereoPlayoutState()
}
+ // MARK: - Audio Buffer injection
+
+ /// Enqueues a screen share audio sample buffer for playback.
+ func enqueue(_ sampleBuffer: CMSampleBuffer) {
+ audioBufferRenderer.enqueue(sampleBuffer)
+ }
+
// MARK: - RTCAudioDeviceModuleDelegate
/// Receives speech activity notifications emitted by WebRTC VAD.
@@ -403,6 +425,7 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
)
isPlayingSubject.send(isPlayoutEnabled)
isRecordingSubject.send(isRecordingEnabled)
+ audioBufferRenderer.reset()
return Constant.successResult
}
@@ -423,6 +446,8 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
)
isPlayingSubject.send(isPlayoutEnabled)
isRecordingSubject.send(isRecordingEnabled)
+ audioBufferRenderer.reset()
+ engineInputContext = nil
return Constant.successResult
}
@@ -434,6 +459,8 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
self.engine = nil
subject.send(.willReleaseAudioEngine(engine))
audioLevelsAdapter.uninstall(on: 0)
+ audioBufferRenderer.reset()
+ engineInputContext = nil
return Constant.successResult
}
@@ -447,6 +474,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
format: AVAudioFormat,
context: [AnyHashable: Any]
) -> Int {
+ engineInputContext = .init(
+ engine: engine,
+ source: source,
+ destination: destination,
+ format: format
+ )
+
subject.send(
.configureInputFromSource(
engine,
@@ -455,12 +489,14 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
format: format
)
)
+
audioLevelsAdapter.installInputTap(
on: destination,
format: format,
bus: 0,
bufferSize: 1024
)
+
return Constant.successResult
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer.swift
new file mode 100644
index 000000000..d080c836e
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer.swift
@@ -0,0 +1,212 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+/// Renders injected audio buffers through an AVAudioEngine graph.
+final class AudioBufferRenderer {
+
+ /// A stable identifier for diagnostics.
+ let identifier = UUID()
+
+ private var context: AVAudioEngine.InputContext?
+ private let playerNode = AVAudioPlayerNode()
+ private let mixerNode = AVAudioMixerNode()
+ private let audioConverter = AudioConverter()
+
+ // MARK: - Called from AudioBufferCapturer
+
+ /// Enqueues a ReplayKit audio sample buffer for playback.
+ func enqueue(_ sampleBuffer: CMSampleBuffer) {
+ guard
+ let context
+ else {
+ return
+ }
+
+ let info = sampleBuffer.rmsAndPeak
+
+ guard
+ !info.isSilent
+ else {
+ return
+ }
+
+ guard
+ let inputBuffer = AVAudioPCMBuffer.from(sampleBuffer)
+ else {
+ return
+ }
+
+ guard
+ let outputBuffer = audioConverter.convertIfRequired(
+ inputBuffer,
+ to: context.format
+ )
+ else {
+ return
+ }
+
+ self.enqueue(outputBuffer)
+ }
+
+ // MARK: - Called from AudioDeviceModule
+
+ /// Configures the renderer with the active audio engine input context.
+ func configure(
+ with newContext: AVAudioEngine.InputContext?
+ ) {
+ if context?.engine !== newContext?.engine, context != nil {
+ reset()
+ }
+
+ self.context = newContext
+
+ guard let context else {
+ log.debug("Configured with nil context ")
+ return
+ }
+
+ #if STREAM_TESTS
+ // Avoid making changes to AVAudioEngine instances during tests as they
+ // cause crashes.
+ #else
+ attachIfNeeded(playerNode, to: context.engine)
+ attachIfNeeded(mixerNode, to: context.engine)
+
+ context.engine.disconnectNodeOutput(playerNode)
+ context.engine.disconnectNodeOutput(mixerNode)
+
+ if let source = context.source {
+ context.engine.disconnectNodeOutput(source)
+ context.engine.connect(
+ source,
+ to: mixerNode,
+ format: context.format
+ )
+ context.engine.connect(
+ playerNode,
+ to: mixerNode,
+ format: context.format
+ )
+ context.engine.connect(
+ mixerNode,
+ to: context.destination,
+ format: context.format
+ )
+ } else {
+ context.engine.connect(
+ playerNode,
+ to: context.destination,
+ format: context.format
+ )
+ }
+ #endif
+
+ log.debug("Configured with non-nil context and playerNode.engine is not nil.")
+ }
+
+ /// Stops playback without tearing down the engine graph.
+ func stop() {
+ guard playerNode.isPlaying else {
+ return
+ }
+ playerNode.stop()
+ }
+
+ /// Resets the graph and converter state for a new session.
+ func reset() {
+ guard
+ let engine = context?.engine
+ else {
+ context = nil
+ log.debug("Resetted...")
+ return
+ }
+
+ playerNode.stop()
+ audioConverter.reset()
+ #if STREAM_TESTS
+ // Avoid making changes to AVAudioEngine instances during tests as they
+ // cause crashes.
+ #else
+ engine.disconnectNodeOutput(playerNode)
+ engine.disconnectNodeOutput(mixerNode)
+ detachIfNeeded(playerNode, from: engine)
+ detachIfNeeded(mixerNode, from: engine)
+ #endif
+ context = nil
+ log.debug("Resetted...")
+ }
+
+ // MARK: - Private Helpers
+
+ private func playIfRequired() {
+ guard
+ let context
+ else {
+ log.warning("Context is nil. PlayerNode cannot start playing.")
+ return
+ }
+
+ guard !playerNode.isPlaying else {
+ return
+ }
+
+ guard
+ playerNode.engine != nil,
+ playerNode.engine === context.engine
+ else {
+ log
+ .warning(
+ "PlayerNode cannot start playing playerNode.engine:\(playerNode.engine != nil) context.engine == playerNode.engine:\(playerNode.engine === context.engine)"
+ )
+ return
+ }
+ playerNode.play()
+ log.debug("PlayerNode started playing")
+ }
+
+ private func enqueue(_ buffer: AVAudioPCMBuffer) {
+ let info = buffer.rmsAndPeak
+
+ guard !info.isSilent else {
+ return
+ }
+
+ playIfRequired()
+
+ guard playerNode.isPlaying else {
+ return
+ }
+
+ playerNode.scheduleBuffer(buffer, completionHandler: nil)
+
+ log.debug(
+ "AVAudioPCMBuffer:\(buffer) with info:\(info) was enqueued.",
+ subsystems: .audioRecording
+ )
+ }
+
+ private func attachIfNeeded(
+ _ node: AVAudioNode,
+ to engine: AVAudioEngine
+ ) {
+ let isAttached = engine.attachedNodes.contains { $0 === node }
+ if !isAttached {
+ engine.attach(node)
+ }
+ }
+
+ private func detachIfNeeded(
+ _ node: AVAudioNode,
+ from engine: AVAudioEngine
+ ) {
+ let isAttached = engine.attachedNodes.contains { $0 === node }
+ if isAttached {
+ engine.detach(node)
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine+InputContext.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine+InputContext.swift
new file mode 100644
index 000000000..abfa6f96d
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine+InputContext.swift
@@ -0,0 +1,36 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension AVAudioEngine {
+ /// Captures the input wiring for an audio engine.
+ ///
+ /// This snapshot is passed across components so they can attach nodes using
+ /// the same engine instance and connect them with a consistent format.
+ struct InputContext: Equatable {
+ /// The engine that owns the input/output graph.
+ var engine: AVAudioEngine
+ /// The optional upstream node that feeds the input chain.
+ var source: AVAudioNode?
+ /// The node that receives the input stream for rendering.
+ var destination: AVAudioNode
+ /// The audio format that the graph expects.
+ var format: AVAudioFormat
+
+ static func == (
+ lhs: InputContext,
+ rhs: InputContext
+ ) -> Bool {
+ // Engine identity must match to avoid cross-engine wiring.
+ lhs.engine === rhs.engine
+ // Nodes are compared for structural equality.
+ && lhs.source == rhs.source
+ && lhs.destination == rhs.destination
+ // Formats must match to avoid converter mismatches.
+ && lhs.format == rhs.format
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+CustomStringConvertible.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+CustomStringConvertible.swift
new file mode 100644
index 000000000..43a967556
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+CustomStringConvertible.swift
@@ -0,0 +1,47 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+
+extension AVAudioPCMBuffer {
+ /// A debug-friendly summary of the buffer contents.
+ override open var description: String {
+ // Build a compact, readable representation for logs and debugging.
+ var result = "{"
+ // Pointer identity helps correlate buffers across logs.
+ result += " address:\(Unmanaged.passUnretained(self).toOpaque())"
+ // Include the full format to capture sample rate and layout details.
+ result += ", format:\(self.format)"
+ // Channel count is used to reason about mono vs stereo paths.
+ result += ", channelCount:\(self.format.channelCount)"
+ // Common format highlights float vs int and bit depth.
+ result += ", commonFormat:\(self.format.commonFormat)"
+ // Interleaving affects how samples are packed in memory.
+ result += ", isInterleaved:\(self.format.isInterleaved)"
+ // Float channel data is non-nil for float formats.
+ result += ", floatChannelData:"
+ result += "\(String(describing: self.floatChannelData))"
+ // Int16 channel data is non-nil for 16-bit integer formats.
+ result += ", int16ChannelData:"
+ result += "\(String(describing: self.int16ChannelData))"
+ result += " }"
+ return result
+ }
+}
+
+extension CMSampleBuffer {
+ /// A debug-friendly summary of the sample buffer.
+ public var description: String {
+ // Build a compact, readable representation for logs and debugging.
+ var result = "{"
+ // Pointer identity helps correlate buffers across logs.
+ result += " address:\(Unmanaged.passUnretained(self).toOpaque())"
+ // Include the resolved audio format when available.
+ result += ", format:\(String(describing: self.format))"
+ // Channel count provides quick context for layout.
+ result += ", channelCount:\(self.format?.mChannelsPerFrame ?? 0)"
+ result += " }"
+ return result
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+Info.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+Info.swift
new file mode 100644
index 000000000..b94662f4d
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer+Info.swift
@@ -0,0 +1,166 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Accelerate
+import AudioToolbox
+import AVFoundation
+import Foundation
+
+extension AVAudioPCMBuffer {
+ /// RMS and peak levels computed from a PCM buffer.
+ ///
+ /// Values are stored in linear amplitude and converted to decibels on
+ /// demand. This keeps computation lightweight while exposing log-friendly
+ /// values when needed.
+ struct RMSAndPeakInfo: Equatable, CustomStringConvertible {
+ /// Root-mean-square amplitude in linear scale.
+ var rms: Float
+ /// Peak amplitude in linear scale.
+ var peak: Float
+
+ /// RMS converted to decibels with a floor to avoid log(0).
+ var rmsDb: Float { 20 * log10(max(rms, 1.0e-7)) }
+ /// Peak converted to decibels with a floor to avoid log(0).
+ var peakDb: Float { 20 * log10(max(peak, 1.0e-7)) }
+ /// A conservative silence threshold in decibels.
+ var isSilent: Bool { rmsDb <= -60 }
+
+ var description: String {
+ "{ rms:\(rms), peak:\(peak), rmsDb:\(rmsDb), " +
+ "peakDb:\(peakDb), isSilent:\(isSilent) }"
+ }
+
+ static let empty = Self(rms: 0, peak: 0)
+ }
+
+ /// RMS and peak information computed from the buffer samples.
+ var rmsAndPeak: RMSAndPeakInfo {
+ // Convert to Int once to avoid repeated casting.
+ let frameLength = Int(self.frameLength)
+ // Empty buffers cannot yield meaningful audio statistics.
+ guard frameLength > 0 else {
+ return .empty
+ }
+
+ // Only linear PCM formats are supported for raw sample access.
+ let streamDescription = format.streamDescription
+ guard
+ streamDescription.pointee.mFormatID == kAudioFormatLinearPCM
+ else {
+ return .empty
+ }
+
+ // Extract format flags that describe sample layout and encoding.
+ let formatFlags = streamDescription.pointee.mFormatFlags
+ // Endianness matters for integer data conversion.
+ let isBigEndian = (formatFlags & kAudioFormatFlagIsBigEndian) != 0
+ // Float indicates samples are already normalized floats.
+ let isFloat = (formatFlags & kAudioFormatFlagIsFloat) != 0
+ // Signed integer indicates PCM int samples need scaling.
+ let isSignedInt = (formatFlags & kAudioFormatFlagIsSignedInteger) != 0
+ // Bit depth drives bytes-per-sample and conversion strategy.
+ let bitsPerChannel = Int(streamDescription.pointee.mBitsPerChannel)
+ // Bytes per sample is used to compute sample counts.
+ let bytesPerSample = bitsPerChannel / 8
+ guard bytesPerSample > 0 else {
+ return .empty
+ }
+
+ // Aggregate RMS and peak across channels by keeping maxima.
+ var rms: Float = 0
+ var peak: Float = 0
+ // Use the underlying buffer list to access each channel buffer.
+ let bufferList = UnsafeMutableAudioBufferListPointer(
+ self.mutableAudioBufferList
+ )
+
+ for buffer in bufferList {
+ // Skip buffers with no data.
+ guard let mData = buffer.mData else {
+ continue
+ }
+
+ if isFloat && bitsPerChannel == 32 {
+ // Sample count is derived from byte size and sample width.
+ let sampleCount = Int(buffer.mDataByteSize) / bytesPerSample
+ // No samples means no statistics for this channel.
+ guard sampleCount > 0 else {
+ continue
+ }
+ // Treat memory as float samples for vDSP routines.
+ let floatPtr = mData.assumingMemoryBound(to: Float.self)
+ var chRms: Float = 0
+ var chPeak: Float = 0
+ // RMS over the channel samples.
+ vDSP_rmsqv(floatPtr, 1, &chRms, vDSP_Length(sampleCount))
+ // Peak magnitude over the channel samples.
+ vDSP_maxmgv(floatPtr, 1, &chPeak, vDSP_Length(sampleCount))
+ // Keep the max across channels for a conservative summary.
+ rms = max(rms, chRms)
+ peak = max(peak, chPeak)
+
+ } else if isSignedInt && bitsPerChannel == 16 {
+ // Sample count is derived from byte size and sample width.
+ let sampleCount = Int(buffer.mDataByteSize) / bytesPerSample
+ guard sampleCount > 0 else {
+ continue
+ }
+ // Interpret raw data as signed 16-bit PCM.
+ let intPtr = mData.assumingMemoryBound(to: Int16.self)
+ // Convert to float so vDSP can operate efficiently.
+ var floatData = [Float](repeating: 0, count: sampleCount)
+ if isBigEndian {
+ // Swap endianness to native before conversion.
+ var swapped = [Int16](repeating: 0, count: sampleCount)
+ for index in 0.. 0 else { return .empty }
+
+ // First call obtains the required AudioBufferList size.
+ var bufferListSizeNeeded = 0
+ var status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
+ self,
+ bufferListSizeNeededOut: &bufferListSizeNeeded,
+ bufferListOut: nil,
+ bufferListSize: 0,
+ blockBufferAllocator: nil,
+ blockBufferMemoryAllocator: nil,
+ flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ blockBufferOut: nil
+ )
+ guard status == noErr else { return .empty }
+
+ // Allocate a buffer list with correct alignment for AudioBufferList.
+ let rawPointer = UnsafeMutableRawPointer.allocate(
+ byteCount: bufferListSizeNeeded,
+ alignment: MemoryLayout.alignment
+ )
+ defer { rawPointer.deallocate() }
+
+ // Bind the raw memory to AudioBufferList for Core Media APIs.
+ let audioBufferListPointer = rawPointer.bindMemory(
+ to: AudioBufferList.self,
+ capacity: 1
+ )
+
+ // Second call fills the buffer list and retains the block buffer.
+ var blockBuffer: CMBlockBuffer?
+ status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
+ self,
+ bufferListSizeNeededOut: nil,
+ bufferListOut: audioBufferListPointer,
+ bufferListSize: bufferListSizeNeeded,
+ blockBufferAllocator: nil,
+ blockBufferMemoryAllocator: nil,
+ flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
+ blockBufferOut: &blockBuffer
+ )
+ guard status == noErr else { return .empty }
+
+ // Iterate each AudioBuffer in the list.
+ let bufferList = UnsafeMutableAudioBufferListPointer(
+ audioBufferListPointer
+ )
+ // Aggregate RMS and peak across channels by keeping maxima.
+ var rms: Float = 0
+ var peak: Float = 0
+
+ for buffer in bufferList {
+ // Skip buffers with no data.
+ guard let mData = buffer.mData else { continue }
+ // Sample count is derived from bytes and sample width.
+ let sampleCount = Int(buffer.mDataByteSize) / bytesPerSample
+ guard sampleCount > 0 else { continue }
+
+ if isFloat && bitsPerChannel == 32 {
+ // Treat memory as float samples for vDSP routines.
+ let floatPtr = mData.assumingMemoryBound(to: Float.self)
+ var chRms: Float = 0
+ var chPeak: Float = 0
+ // RMS over the channel samples.
+ vDSP_rmsqv(floatPtr, 1, &chRms, vDSP_Length(sampleCount))
+ // Peak magnitude over the channel samples.
+ vDSP_maxmgv(floatPtr, 1, &chPeak, vDSP_Length(sampleCount))
+ // Keep the max across channels for a conservative summary.
+ rms = max(rms, chRms)
+ peak = max(peak, chPeak)
+ } else if isSignedInt && bitsPerChannel == 16 {
+ // Interpret raw data as signed 16-bit PCM.
+ let intPtr = mData.assumingMemoryBound(to: Int16.self)
+ // Convert to float so vDSP can operate efficiently.
+ var floatData = [Float](repeating: 0, count: sampleCount)
+ // Convert int16 samples to float.
+ vDSP_vflt16(intPtr, 1, &floatData, 1, vDSP_Length(sampleCount))
+ // Normalize to [-1, 1] using Int16 max value.
+ var scale: Float = 1.0 / Float(Int16.max)
+ vDSP_vsmul(
+ floatData,
+ 1,
+ &scale,
+ &floatData,
+ 1,
+ vDSP_Length(sampleCount)
+ )
+ var chRms: Float = 0
+ var chPeak: Float = 0
+ // RMS over normalized samples.
+ vDSP_rmsqv(floatData, 1, &chRms, vDSP_Length(sampleCount))
+ // Peak magnitude over normalized samples.
+ vDSP_maxmgv(floatData, 1, &chPeak, vDSP_Length(sampleCount))
+ // Keep the max across channels for a conservative summary.
+ rms = max(rms, chRms)
+ peak = max(peak, chPeak)
+ } else {
+ // Unsupported formats return an empty summary.
+ return .empty
+ }
+ }
+
+ // Return the aggregate RMS and peak values.
+ return .init(rms: rms, peak: peak)
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Swift6Migration/CustomStringConvertible+Retroactive.swift b/Sources/StreamVideo/Utils/Swift6Migration/CustomStringConvertible+Retroactive.swift
index ea426c872..c25d72fd7 100644
--- a/Sources/StreamVideo/Utils/Swift6Migration/CustomStringConvertible+Retroactive.swift
+++ b/Sources/StreamVideo/Utils/Swift6Migration/CustomStringConvertible+Retroactive.swift
@@ -22,6 +22,7 @@ extension RTCSignalingState: @retroactive CustomStringConvertible {}
extension RTCDataChannelState: @retroactive CustomStringConvertible {}
extension RTCBundlePolicy: @retroactive CustomStringConvertible {}
extension RTCContinualGatheringPolicy: @retroactive CustomStringConvertible {}
+extension CMSampleBuffer: @retroactive CustomStringConvertible {}
#else
extension AVAudioSession.Category: CustomStringConvertible {}
extension AVAudioSession.CategoryOptions: CustomStringConvertible {}
@@ -38,4 +39,5 @@ extension RTCSignalingState: CustomStringConvertible {}
extension RTCDataChannelState: CustomStringConvertible {}
extension RTCBundlePolicy: CustomStringConvertible {}
extension RTCContinualGatheringPolicy: CustomStringConvertible {}
+extension CMSampleBuffer: CustomStringConvertible {}
#endif
diff --git a/Sources/StreamVideo/Utils/Swift6Migration/Sendable+Extensions.swift b/Sources/StreamVideo/Utils/Swift6Migration/Sendable+Extensions.swift
index ed35639de..7592ee4f0 100644
--- a/Sources/StreamVideo/Utils/Swift6Migration/Sendable+Extensions.swift
+++ b/Sources/StreamVideo/Utils/Swift6Migration/Sendable+Extensions.swift
@@ -28,6 +28,8 @@ extension Published.Publisher: @retroactive @unchecked Sendable {}
extension RTCVideoFrame: @retroactive @unchecked Sendable {}
extension AnyPublisher: @retroactive @unchecked Sendable {}
extension Publishers.Filter: @retroactive @unchecked Sendable {}
+/// Allows audio buffers to cross concurrency boundaries.
+extension AVAudioPCMBuffer: @retroactive @unchecked Sendable {}
#else
extension AnyCancellable: @unchecked Sendable {}
extension AVCaptureDevice: @unchecked Sendable {}
@@ -48,4 +50,6 @@ extension Published.Publisher: @unchecked Sendable {}
extension RTCVideoFrame: @unchecked Sendable {}
extension AnyPublisher: @unchecked Sendable {}
extension Publishers.Filter: @unchecked Sendable {}
+/// Allows audio buffers to cross concurrency boundaries.
+extension AVAudioPCMBuffer: @unchecked Sendable {}
#endif
diff --git a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
index 47c8f735f..ef801bd08 100644
--- a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
+++ b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
@@ -7,23 +7,26 @@ import StreamWebRTC
extension RTCMediaConstraints {
+ /// Optional constraints shared by default and ICE restart configurations.
+ private static let commonOptionalConstraints: [String: String] = [
+ "DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue,
+ /// Added support for Google's media constraints to improve transmitted audio
+ /// https://github.com/GetStream/react-native-webrtc/pull/20/commits/6476119456005dc35ba00e9bf4d4c4124c6066e8
+ "googAutoGainControl": kRTCMediaConstraintsValueTrue,
+ "googNoiseSuppression": kRTCMediaConstraintsValueTrue,
+ "googEchoCancellation": kRTCMediaConstraintsValueTrue,
+ "googHighpassFilter": kRTCMediaConstraintsValueTrue,
+ "googTypingNoiseDetection": kRTCMediaConstraintsValueTrue,
+ "googAudioMirroring": kRTCMediaConstraintsValueFalse
+ ]
+
nonisolated(unsafe) static let defaultConstraints = RTCMediaConstraints(
mandatoryConstraints: nil,
- optionalConstraints: [
- "DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue,
- /// Added support for Google's media constraints to improve transmitted audio
- /// https://github.com/GetStream/react-native-webrtc/pull/20/commits/6476119456005dc35ba00e9bf4d4c4124c6066e8
- "googAutoGainControl": kRTCMediaConstraintsValueTrue,
- "googNoiseSuppression": kRTCMediaConstraintsValueTrue,
- "googEchoCancellation": kRTCMediaConstraintsValueTrue,
- "googHighpassFilter": kRTCMediaConstraintsValueTrue,
- "googTypingNoiseDetection": kRTCMediaConstraintsValueTrue,
- "googAudioMirroring": kRTCMediaConstraintsValueFalse
- ]
+ optionalConstraints: commonOptionalConstraints
)
nonisolated(unsafe) static let iceRestartConstraints = RTCMediaConstraints(
mandatoryConstraints: [kRTCMediaConstraintsIceRestart: kRTCMediaConstraintsValueTrue],
- optionalConstraints: ["DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue]
+ optionalConstraints: commonOptionalConstraints
)
}
diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift
index 39ed1cdc4..ea7142820 100644
--- a/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift
+++ b/Sources/StreamVideo/WebRTC/VideoCapturing/VideoCapturerProviding.swift
@@ -13,29 +13,38 @@ protocol VideoCapturerProviding {
/// Builds a camera capturer with the specified source.
///
- /// - Parameter source: The video source for the capturer, responsible for
- /// providing captured frames.
+ /// - Parameters:
+ /// - source: The video source for the capturer, responsible for
+ /// providing captured frames.
+ /// - audioDeviceModule: The audio device module used by the capturer.
/// - Returns: An instance of `StreamVideoCapturer` for managing camera-based
/// video capturing.
///
/// This method is used for creating a video capturer for a camera input,
/// which can be further configured to process video frames.
func buildCameraCapturer(
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule
) -> StreamVideoCapturing
/// Builds a screen capturer based on the specified type and source.
///
/// - Parameters:
/// - type: The type of screen sharing (`.inApp` or `.broadcast`).
- /// - source: The video source for the capturer, providing the captured frames.
+ /// - source: The video source for the capturer, providing the captured
+ /// frames.
+ /// - audioDeviceModule: The audio device module used by the capturer.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
/// - Returns: An instance of `StreamVideoCapturer` for managing screen sharing.
///
/// Depending on the screen sharing type, this method creates a capturer that
/// supports either in-app screen sharing or broadcasting functionality.
func buildScreenCapturer(
_ type: ScreensharingType,
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule,
+ includeAudio: Bool
) -> StreamVideoCapturing
}
@@ -48,23 +57,33 @@ final class StreamVideoCapturerFactory: VideoCapturerProviding {
/// Creates a camera capturer using the specified video source.
///
- /// - Parameter source: The video source for the capturer, responsible for
- /// providing captured frames.
+ /// - Parameters:
+ /// - source: The video source for the capturer, responsible for
+ /// providing captured frames.
+ /// - audioDeviceModule: The audio device module used by the capturer.
/// - Returns: A `StreamVideoCapturer` instance configured for camera capturing.
///
/// This method initializes a camera capturer, suitable for use in scenarios
/// where a camera is the video input source.
func buildCameraCapturer(
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule
) -> StreamVideoCapturing {
- StreamVideoCapturer.cameraCapturer(with: source)
+ StreamVideoCapturer.cameraCapturer(
+ with: source,
+ audioDeviceModule: audioDeviceModule
+ )
}
/// Creates a screen capturer based on the provided type and source.
///
/// - Parameters:
/// - type: The type of screen sharing (`.inApp` or `.broadcast`).
- /// - source: The video source for the capturer, providing the captured frames.
+ /// - source: The video source for the capturer, providing the captured
+ /// frames.
+ /// - audioDeviceModule: The audio device module used by the capturer.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
/// - Returns: A `StreamVideoCapturer` instance configured for screen sharing.
///
/// This method dynamically creates a capturer based on the screen sharing type:
@@ -74,13 +93,22 @@ final class StreamVideoCapturerFactory: VideoCapturerProviding {
/// Use this method to support flexible screen sharing needs.
func buildScreenCapturer(
_ type: ScreensharingType,
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule,
+ includeAudio: Bool
) -> StreamVideoCapturing {
switch type {
case .inApp:
- return StreamVideoCapturer.screenShareCapturer(with: source)
+ return StreamVideoCapturer.screenShareCapturer(
+ with: source,
+ audioDeviceModule: audioDeviceModule,
+ includeAudio: includeAudio
+ )
case .broadcast:
- return StreamVideoCapturer.broadcastCapturer(with: source)
+ return StreamVideoCapturer.broadcastCapturer(
+ with: source,
+ audioDeviceModule: audioDeviceModule
+ )
}
}
}
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift
index 76c79643d..6e58b4f29 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter.swift
@@ -43,6 +43,8 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ private let audioDeviceModule: AudioDeviceModule
+
/// The primary video track used for screen sharing.
let primaryTrack: RTCVideoTrack
@@ -63,7 +65,9 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
/// - publishOptions: Initial publishing options for video tracks.
/// - subject: A subject for publishing track-related events.
/// - screenShareSessionProvider: Provider for managing screen sharing sessions.
- /// - capturerFactory: Factory for creating video capturers. Defaults to `StreamVideoCapturerFactory`.
+ /// - capturerFactory: Factory for creating video capturers. Defaults to
+ /// `StreamVideoCapturerFactory`.
+ /// - audioDeviceModule: The audio device module used for screen share audio.
init(
sessionID: String,
peerConnection: StreamRTCPeerConnectionProtocol,
@@ -72,7 +76,8 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
publishOptions: [PublishOptions.VideoPublishOptions],
subject: PassthroughSubject,
screenShareSessionProvider: ScreenShareSessionProvider,
- capturerFactory: VideoCapturerProviding = StreamVideoCapturerFactory()
+ capturerFactory: VideoCapturerProviding = StreamVideoCapturerFactory(),
+ audioDeviceModule: AudioDeviceModule
) {
self.sessionID = sessionID
self.peerConnection = peerConnection
@@ -82,6 +87,7 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
self.subject = subject
self.screenShareSessionProvider = screenShareSessionProvider
self.capturerFactory = capturerFactory
+ self.audioDeviceModule = audioDeviceModule
// Initialize the primary track, using the existing session's local track if available.
primaryTrack = {
@@ -325,9 +331,12 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
/// - Parameters:
/// - type: The type of screen sharing to begin.
/// - ownCapabilities: The capabilities of the local participant.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
func beginScreenSharing(
of type: ScreensharingType,
- ownCapabilities: [OwnCapability]
+ ownCapabilities: [OwnCapability],
+ includeAudio: Bool
) async throws {
guard ownCapabilities.contains(.screenshare) else {
try await stopScreenShareCapturingSession()
@@ -336,7 +345,8 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
try await configureActiveScreenShareSession(
screenSharingType: type,
- track: primaryTrack
+ track: primaryTrack,
+ includeAudio: includeAudio
)
try await startScreenShareCapturingSession()
@@ -407,21 +417,27 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
/// - Parameters:
/// - screenSharingType: The type of screen sharing.
/// - track: The video track to use for the session.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
private func configureActiveScreenShareSession(
screenSharingType: ScreensharingType,
- track: RTCVideoTrack
+ track: RTCVideoTrack,
+ includeAudio: Bool
) async throws {
if screenShareSessionProvider.activeSession == nil {
let videoCapturer = capturerFactory.buildScreenCapturer(
screenSharingType,
- source: track.source
+ source: track.source,
+ audioDeviceModule: audioDeviceModule,
+ includeAudio: includeAudio
)
capturer = videoCapturer
screenShareSessionProvider.activeSession = .init(
localTrack: track,
screenSharingType: screenSharingType,
- capturer: videoCapturer
+ capturer: videoCapturer,
+ includeAudio: includeAudio
)
} else if
let activeSession = screenShareSessionProvider.activeSession,
@@ -430,14 +446,17 @@ final class LocalScreenShareMediaAdapter: LocalMediaAdapting, @unchecked Sendabl
let videoCapturer = capturerFactory.buildScreenCapturer(
screenSharingType,
- source: track.source
+ source: track.source,
+ audioDeviceModule: audioDeviceModule,
+ includeAudio: includeAudio
)
capturer = videoCapturer
screenShareSessionProvider.activeSession = .init(
localTrack: track,
screenSharingType: screenSharingType,
- capturer: videoCapturer
+ capturer: videoCapturer,
+ includeAudio: includeAudio
)
}
}
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift
index b96d852e6..f923cca78 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter.swift
@@ -58,6 +58,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
/// The capturer responsible for capturing video frames.
private var capturer: StreamVideoCapturing?
+ private let audioDeviceModule: AudioDeviceModule
+
/// A publisher that emits events related to video track changes.
let subject: PassthroughSubject
@@ -81,7 +83,9 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
/// - publishOptions: Initial publish options for the video track.
/// - subject: A publisher for track-related events.
/// - capturerFactory: A factory for creating video capturers. Defaults to `StreamVideoCapturerFactory`.
- /// - videoCaptureSessionProvider: A provider for managing video capture sessions.
+ /// - videoCaptureSessionProvider: A provider for managing video capture
+ /// sessions.
+ /// - audioDeviceModule: The audio device module used by video capture.
init(
sessionID: String,
peerConnection: StreamRTCPeerConnectionProtocol,
@@ -92,7 +96,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
publishOptions: [PublishOptions.VideoPublishOptions],
subject: PassthroughSubject,
capturerFactory: VideoCapturerProviding = StreamVideoCapturerFactory(),
- videoCaptureSessionProvider: VideoCaptureSessionProvider
+ videoCaptureSessionProvider: VideoCaptureSessionProvider,
+ audioDeviceModule: AudioDeviceModule
) {
self.sessionID = sessionID
self.peerConnection = peerConnection
@@ -104,6 +109,7 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
self.subject = subject
self.capturerFactory = capturerFactory
self.videoCaptureSessionProvider = videoCaptureSessionProvider
+ self.audioDeviceModule = audioDeviceModule
backgroundMuteAdapter = .init(sessionID: sessionID, sfuAdapter: sfuAdapter)
// Initialize the primary video track, either from the active session or a new source.
@@ -623,7 +629,8 @@ final class LocalVideoMediaAdapter: LocalMediaAdapting, @unchecked Sendable {
) async throws {
if videoCaptureSessionProvider.activeSession == nil {
let cameraCapturer = capturerFactory.buildCameraCapturer(
- source: track.source
+ source: track.source,
+ audioDeviceModule: audioDeviceModule
)
capturer = cameraCapturer
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift
index 7b05b3497..2a17078d1 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/MediaAdapter.swift
@@ -42,7 +42,9 @@ final class MediaAdapter {
/// - publishOptions: The publishOptions to use for creating the initial tracks
/// - audioSession: The audio session manager.
/// - videoCaptureSessionProvider: Provides access to the active video capturing session.
- /// - screenShareSessionProvider: Provides access to the active screen sharing session.
+ /// - screenShareSessionProvider: Provides access to the active screen
+ /// sharing session.
+ /// - audioDeviceModule: The audio device module shared with capturers.
convenience init(
sessionID: String,
peerConnectionType: PeerConnectionType,
@@ -53,7 +55,8 @@ final class MediaAdapter {
videoConfig: VideoConfig,
publishOptions: PublishOptions,
videoCaptureSessionProvider: VideoCaptureSessionProvider,
- screenShareSessionProvider: ScreenShareSessionProvider
+ screenShareSessionProvider: ScreenShareSessionProvider,
+ audioDeviceModule: AudioDeviceModule
) {
let subject = PassthroughSubject()
@@ -104,7 +107,8 @@ final class MediaAdapter {
videoConfig: videoConfig,
publishOptions: publishOptions.video,
subject: subject,
- videoCaptureSessionProvider: videoCaptureSessionProvider
+ videoCaptureSessionProvider: videoCaptureSessionProvider,
+ audioDeviceModule: audioDeviceModule
),
screenShareMediaAdapter: .init(
sessionID: sessionID,
@@ -113,7 +117,8 @@ final class MediaAdapter {
sfuAdapter: sfuAdapter,
publishOptions: publishOptions.screenShare,
subject: subject,
- screenShareSessionProvider: screenShareSessionProvider
+ screenShareSessionProvider: screenShareSessionProvider,
+ audioDeviceModule: audioDeviceModule
)
)
}
@@ -339,13 +344,17 @@ final class MediaAdapter {
/// - Parameters:
/// - type: The type of screen sharing to begin.
/// - ownCapabilities: The capabilities of the local participant.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
func beginScreenSharing(
of type: ScreensharingType,
- ownCapabilities: [OwnCapability]
+ ownCapabilities: [OwnCapability],
+ includeAudio: Bool
) async throws {
try await screenShareMediaAdapter.beginScreenSharing(
of: type,
- ownCapabilities: ownCapabilities
+ ownCapabilities: ownCapabilities,
+ includeAudio: includeAudio
)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift
index 1f06ee6d9..eadf4a997 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareMediaAdapter.swift
@@ -46,6 +46,7 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable {
/// - subject: A subject for publishing track events.
/// - screenShareSessionProvider: Provides access to the active screen
/// sharing session.
+ /// - audioDeviceModule: The audio device module used for screen share audio.
convenience init(
sessionID: String,
peerConnection: StreamRTCPeerConnectionProtocol,
@@ -53,7 +54,8 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable {
sfuAdapter: SFUAdapter,
publishOptions: [PublishOptions.VideoPublishOptions],
subject: PassthroughSubject,
- screenShareSessionProvider: ScreenShareSessionProvider
+ screenShareSessionProvider: ScreenShareSessionProvider,
+ audioDeviceModule: AudioDeviceModule
) {
self.init(
sessionID: sessionID,
@@ -66,7 +68,8 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable {
sfuAdapter: sfuAdapter,
publishOptions: publishOptions,
subject: subject,
- screenShareSessionProvider: screenShareSessionProvider
+ screenShareSessionProvider: screenShareSessionProvider,
+ audioDeviceModule: audioDeviceModule
),
subject: subject
)
@@ -170,9 +173,12 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable {
/// - Parameters:
/// - type: The type of screen sharing to begin.
/// - ownCapabilities: The capabilities of the local participant.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
func beginScreenSharing(
of type: ScreensharingType,
- ownCapabilities: [OwnCapability]
+ ownCapabilities: [OwnCapability],
+ includeAudio: Bool
) async throws {
guard
let localScreenShareMediaManager = localMediaManager as? LocalScreenShareMediaAdapter
@@ -182,7 +188,8 @@ final class ScreenShareMediaAdapter: MediaAdapting, @unchecked Sendable {
try await localScreenShareMediaManager.beginScreenSharing(
of: type,
- ownCapabilities: ownCapabilities
+ ownCapabilities: ownCapabilities,
+ includeAudio: includeAudio
)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift
index 3c3f94f36..77a11c72c 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/ScreenShareSession.swift
@@ -15,6 +15,9 @@ struct ScreenShareSession {
/// The video capturer for the screen share.
var capturer: StreamVideoCapturing
+
+ /// Whether app audio is captured alongside the screen share.
+ var includeAudio: Bool
}
/// A class that provides and manages the active screen sharing session.
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift
index 630bbd29a..d82ca43f3 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/MediaAdapters/VideoMediaAdapter.swift
@@ -48,6 +48,7 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable {
/// - subject: A subject for publishing track events.
/// - videoCaptureSessionProvider: The provider for video capture
/// sessions.
+ /// - audioDeviceModule: The audio device module used by video capture.
convenience init(
sessionID: String,
peerConnection: StreamRTCPeerConnectionProtocol,
@@ -57,7 +58,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable {
videoConfig: VideoConfig,
publishOptions: [PublishOptions.VideoPublishOptions],
subject: PassthroughSubject,
- videoCaptureSessionProvider: VideoCaptureSessionProvider
+ videoCaptureSessionProvider: VideoCaptureSessionProvider,
+ audioDeviceModule: AudioDeviceModule
) {
self.init(
sessionID: sessionID,
@@ -72,7 +74,8 @@ final class VideoMediaAdapter: MediaAdapting, @unchecked Sendable {
videoConfig: videoConfig,
publishOptions: publishOptions,
subject: subject,
- videoCaptureSessionProvider: videoCaptureSessionProvider
+ videoCaptureSessionProvider: videoCaptureSessionProvider,
+ audioDeviceModule: audioDeviceModule
),
subject: subject
)
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift
index 4ce7fc55d..49ffb38cd 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/RTCPeerConnectionCoordinatorProviding.swift
@@ -27,6 +27,7 @@ protocol RTCPeerConnectionCoordinatorProviding: Sendable {
/// - screenShareSessionProvider: Provider for screen sharing functionality.
/// - clientCapabilities: A set of client capabilities that affect how the
/// coordinator behaves (e.g., enabling paused tracks support).
+ /// - audioDeviceModule: The audio device module used by media adapters.
///
/// This parameter affects features such as support for paused tracks.
/// - Returns: An initialized `RTCPeerConnectionCoordinator` instance.
@@ -43,7 +44,8 @@ protocol RTCPeerConnectionCoordinatorProviding: Sendable {
sfuAdapter: SFUAdapter,
videoCaptureSessionProvider: VideoCaptureSessionProvider,
screenShareSessionProvider: ScreenShareSessionProvider,
- clientCapabilities: Set
+ clientCapabilities: Set,
+ audioDeviceModule: AudioDeviceModule
) -> RTCPeerConnectionCoordinator
}
@@ -69,6 +71,7 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina
/// - screenShareSessionProvider: Provider for screen sharing functionality.
/// - clientCapabilities: A set of client capabilities that affect how the
/// coordinator behaves (e.g., enabling paused tracks support).
+ /// - audioDeviceModule: The audio device module used by media adapters.
///
/// This parameter affects features such as support for paused tracks.
/// - Returns: A newly created `RTCPeerConnectionCoordinator` instance.
@@ -85,7 +88,8 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina
sfuAdapter: SFUAdapter,
videoCaptureSessionProvider: VideoCaptureSessionProvider,
screenShareSessionProvider: ScreenShareSessionProvider,
- clientCapabilities: Set
+ clientCapabilities: Set,
+ audioDeviceModule: AudioDeviceModule
) -> RTCPeerConnectionCoordinator {
RTCPeerConnectionCoordinator(
sessionId: sessionId,
@@ -100,7 +104,8 @@ final class StreamRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordina
sfuAdapter: sfuAdapter,
videoCaptureSessionProvider: videoCaptureSessionProvider,
screenShareSessionProvider: screenShareSessionProvider,
- clientCapabilities: clientCapabilities
+ clientCapabilities: clientCapabilities,
+ audioDeviceModule: audioDeviceModule
)
}
}
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
index 869cebe4a..0441fffe2 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
@@ -130,6 +130,7 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
/// - audioSession: The audio session to be used.
/// - videoCaptureSessionProvider: Provider for video capturing sessions.
/// - screenShareSessionProvider: Provider for screen sharing sessions.
+ /// - audioDeviceModule: The audio device module used by media adapters.
/// - tracesAdapter: The adapter used to enqueue traces
convenience init(
sessionId: String,
@@ -144,7 +145,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
sfuAdapter: SFUAdapter,
videoCaptureSessionProvider: VideoCaptureSessionProvider,
screenShareSessionProvider: ScreenShareSessionProvider,
- clientCapabilities: Set
+ clientCapabilities: Set,
+ audioDeviceModule: AudioDeviceModule
) {
self.init(
sessionId: sessionId,
@@ -165,7 +167,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
videoConfig: videoConfig,
publishOptions: publishOptions,
videoCaptureSessionProvider: videoCaptureSessionProvider,
- screenShareSessionProvider: screenShareSessionProvider
+ screenShareSessionProvider: screenShareSessionProvider,
+ audioDeviceModule: audioDeviceModule
),
iceAdapter: .init(
sessionID: sessionId,
@@ -696,14 +699,18 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
/// - Parameters:
/// - type: The type of screen sharing to begin.
/// - ownCapabilities: The capabilities of the local participant.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
/// - Throws: An error if starting screen sharing fails.
func beginScreenSharing(
of type: ScreensharingType,
- ownCapabilities: [OwnCapability]
+ ownCapabilities: [OwnCapability],
+ includeAudio: Bool
) async throws {
try await mediaAdapter.beginScreenSharing(
of: type,
- ownCapabilities: ownCapabilities
+ ownCapabilities: ownCapabilities,
+ includeAudio: includeAudio
)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastCaptureHandler.swift
index 6cd5119cc..5771a8a53 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastCaptureHandler.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Broadcast/BroadcastCaptureHandler.swift
@@ -22,9 +22,10 @@ final class BroadcastCaptureHandler: StreamVideoCapturerActionHandler, @unchecke
// MARK: - StreamVideoCapturerActionHandler
+ /// Handles broadcast capture actions.
func handle(_ action: StreamVideoCapturer.Action) async throws {
switch action {
- case let .startCapture(_, dimensions, _, videoSource, videoCapturer, videoCapturerDelegate):
+ case let .startCapture(_, dimensions, _, videoSource, videoCapturer, videoCapturerDelegate, _):
try await execute(
dimensions: dimensions,
videoSource: videoSource,
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCaptureHandler.swift
index c1ac315ea..bf1aff14e 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCaptureHandler.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraCaptureHandler.swift
@@ -26,9 +26,10 @@ final class CameraCaptureHandler: StreamVideoCapturerActionHandler, @unchecked S
// MARK: - StreamVideoCapturerActionHandler
+ /// Handles camera capture actions.
func handle(_ action: StreamVideoCapturer.Action) async throws {
switch action {
- case let .startCapture(position, dimensions, frameRate, videoSource, videoCapturer, videoCapturerDelegate):
+ case let .startCapture(position, dimensions, frameRate, videoSource, videoCapturer, videoCapturerDelegate, _):
guard let cameraCapturer = videoCapturer as? RTCCameraVideoCapturer else {
return
}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraInterruptionsHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraInterruptionsHandler.swift
index 52f4afa82..8600607da 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraInterruptionsHandler.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Camera/CameraInterruptionsHandler.swift
@@ -25,10 +25,11 @@ final class CameraInterruptionsHandler: StreamVideoCapturerActionHandler, @unche
// MARK: - StreamVideoCapturerActionHandler
/// Handles camera-related actions triggered by the video capturer.
+ /// Handles camera interruption actions.
func handle(_ action: StreamVideoCapturer.Action) async throws {
switch action {
/// Handle start capture event and register for interruption notifications.
- case let .startCapture(_, _, _, _, videoCapturer, _):
+ case let .startCapture(_, _, _, _, videoCapturer, _, _):
if let cameraCapturer = videoCapturer as? RTCCameraVideoCapturer {
didStartCapture(session: cameraCapturer.captureSession)
} else {
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter+Convert.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter+Convert.swift
new file mode 100644
index 000000000..2bf284b14
--- /dev/null
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter+Convert.swift
@@ -0,0 +1,77 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AudioToolbox
+import AVFoundation
+import Foundation
+
+extension AVAudioConverter {
+
+ /// Converts an audio buffer to the requested output format.
+ func convert(
+ from inputBuffer: AVAudioPCMBuffer,
+ to outputFormat: AVAudioFormat
+ ) -> AVAudioPCMBuffer? {
+ // Frame length is stored as an integer; convert to Double for math.
+ let inputFrames = Double(inputBuffer.frameLength)
+ // Ratio between output and input sample rates drives resampling.
+ let ratio = outputFormat.sampleRate / inputBuffer.format.sampleRate
+ // Compute how many frames the output buffer must hold.
+ let outputFrameCapacity = AVAudioFrameCount(
+ max(1, ceil(inputFrames * ratio))
+ )
+
+ // Allocate the output buffer in the requested format.
+ guard let outputBuffer = AVAudioPCMBuffer(
+ pcmFormat: outputFormat,
+ frameCapacity: outputFrameCapacity
+ ) else {
+ // Allocation failure leaves no buffer to write into.
+ return nil
+ }
+
+ // Collect conversion errors from AVAudioConverter.
+ var error: NSError?
+ // Track whether we already provided the input buffer.
+ nonisolated(unsafe) var didProvideData = false
+ // Perform conversion with a synchronous input block.
+ let status = self.convert(
+ to: outputBuffer,
+ error: &error
+ ) { _, outStatus in
+ // Provide input only once; then signal no more data.
+ if didProvideData {
+ outStatus.pointee = .noDataNow
+ return nil
+ }
+ // Empty input means there is nothing to convert.
+ guard inputBuffer.frameLength > 0 else {
+ outStatus.pointee = .noDataNow
+ return nil
+ }
+ // Mark the input as consumed so we stop supplying it.
+ didProvideData = true
+ // Tell the converter that we supplied data.
+ outStatus.pointee = .haveData
+ // Return the single input buffer to the converter.
+ return inputBuffer
+ }
+
+ // Conversion errors are signaled by status and error.
+ if status == .error {
+ if let error {
+ log.error(error, subsystems: .videoCapturer)
+ }
+ return nil
+ }
+
+ // Zero-length output indicates conversion produced no data.
+ guard outputBuffer.frameLength > 0 else {
+ return nil
+ }
+
+ // The output buffer now contains converted audio frames.
+ return outputBuffer
+ }
+}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat+Equality.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat+Equality.swift
new file mode 100644
index 000000000..edcd36431
--- /dev/null
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat+Equality.swift
@@ -0,0 +1,22 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension AVAudioFormat {
+
+ override open func isEqual(_ object: Any?) -> Bool {
+ guard let _object = object as? AVAudioFormat else { return false }
+ return self == _object
+ }
+
+ /// Compares formats by sample rate, channel count, and layout settings.
+ public static func == (lhs: AVAudioFormat, rhs: AVAudioFormat) -> Bool {
+ lhs.sampleRate == rhs.sampleRate &&
+ lhs.channelCount == rhs.channelCount &&
+ lhs.commonFormat == rhs.commonFormat &&
+ lhs.isInterleaved == rhs.isInterleaved
+ }
+}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer+FromCMSampleBuffer.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer+FromCMSampleBuffer.swift
new file mode 100644
index 000000000..62c4bd209
--- /dev/null
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer+FromCMSampleBuffer.swift
@@ -0,0 +1,160 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AudioToolbox
+import AVFoundation
+import Foundation
+
+extension AVAudioPCMBuffer {
+
+ /// Creates a PCM buffer from an audio CMSampleBuffer when possible.
+ static func from(
+ _ source: CMSampleBuffer
+ ) -> AVAudioPCMBuffer? {
+ // Extract format information so we can build a matching PCM buffer.
+ guard
+ let formatDescription = CMSampleBufferGetFormatDescription(source),
+ let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(
+ formatDescription
+ )
+ else {
+ // Format description is required to interpret the sample buffer.
+ return nil
+ }
+
+ // Only linear PCM can be copied into AVAudioPCMBuffer.
+ guard asbd.pointee.mFormatID == kAudioFormatLinearPCM else {
+ return nil
+ }
+
+ // Capture format flags to decide how to interpret sample data.
+ let formatFlags = asbd.pointee.mFormatFlags
+ // Float data can be used directly without scaling.
+ let isFloat = (formatFlags & kAudioFormatFlagIsFloat) != 0
+ // Signed integer data needs conversion and scaling.
+ let isSignedInt = (formatFlags & kAudioFormatFlagIsSignedInteger) != 0
+ // Endianness matters when bytes are not native order.
+ let isBigEndian = (formatFlags & kAudioFormatFlagIsBigEndian) != 0
+ // Interleaving determines channel layout in memory.
+ let isInterleaved = (formatFlags
+ & kAudioFormatFlagIsNonInterleaved
+ ) == 0
+ // Bit depth drives the common PCM format choice.
+ let bitsPerChannel = Int(asbd.pointee.mBitsPerChannel)
+
+ // Choose an AVAudioCommonFormat compatible with the sample format.
+ let commonFormat: AVAudioCommonFormat
+ if isFloat, bitsPerChannel == 32 {
+ // 32-bit float is the standard ReplayKit float format.
+ commonFormat = .pcmFormatFloat32
+ } else if isSignedInt, bitsPerChannel == 16 {
+ // 16-bit signed integers are common for PCM audio.
+ commonFormat = .pcmFormatInt16
+ } else {
+ // Unsupported bit depth or type cannot be represented.
+ return nil
+ }
+
+ // Build a concrete AVAudioFormat matching the CMSampleBuffer.
+ guard
+ let inputFormat = AVAudioFormat(
+ commonFormat: commonFormat,
+ sampleRate: asbd.pointee.mSampleRate,
+ channels: asbd.pointee.mChannelsPerFrame,
+ interleaved: isInterleaved
+ )
+ else {
+ // Format construction failure prevents buffer allocation.
+ return nil
+ }
+
+ // Determine how many frames are in the sample buffer.
+ let frameCount = AVAudioFrameCount(
+ CMSampleBufferGetNumSamples(source)
+ )
+ guard
+ frameCount > 0,
+ let pcmBuffer = AVAudioPCMBuffer(
+ pcmFormat: inputFormat,
+ frameCapacity: frameCount
+ )
+ else {
+ // Frame count must be positive and buffer allocation must succeed.
+ return nil
+ }
+
+ // Update the buffer length to match the number of frames.
+ pcmBuffer.frameLength = frameCount
+
+ // Bytes per frame are needed to calculate copy sizes.
+ let bytesPerFrame = Int(asbd.pointee.mBytesPerFrame)
+ guard bytesPerFrame > 0 else {
+ return nil
+ }
+
+ // Get a mutable view over the destination AudioBufferList.
+ let destinationList = UnsafeMutableAudioBufferListPointer(
+ pcmBuffer.mutableAudioBufferList
+ )
+ // Total byte count to copy into each buffer.
+ let bytesToCopy = Int(frameCount) * bytesPerFrame
+ for index in 0...size
+ // Bind the memory to Int16 for swapping.
+ let intPtr = mData.assumingMemoryBound(to: Int16.self)
+ for index in 0...size
+ // Treat float samples as raw UInt32 for swapping.
+ let intPtr = mData.assumingMemoryBound(to: UInt32.self)
+ for index in 0.. AVAudioPCMBuffer? {
+ // When formats already match, no conversion is needed.
+ if inputBuffer.format == outputFormat {
+ return inputBuffer
+ } else {
+ // Ensure a converter exists for this specific format pair.
+ ensureCorrectConverterExists(
+ from: inputBuffer.format,
+ to: outputFormat
+ )
+ // Use the cached converter to perform the conversion.
+ return audioConverter?.convert(
+ from: inputBuffer,
+ to: outputFormat
+ )
+ }
+ }
+
+ // MARK: - Private Helpers
+
+ private func ensureCorrectConverterExists(
+ from inputFormat: AVAudioFormat,
+ to outputFormat: AVAudioFormat
+ ) {
+ // Recreate the converter when formats differ or are missing.
+ let needsNewConverter = audioConverter == nil
+ || audioConverterInputFormat != inputFormat
+ || audioConverterOutputFormat != outputFormat
+
+ // If the converter matches the formats, reuse it as-is.
+ guard needsNewConverter else {
+ return
+ }
+
+ // Create a new converter for the requested format pair.
+ audioConverter = AVAudioConverter(
+ from: inputFormat,
+ to: outputFormat
+ )
+ // Use the highest quality sample rate conversion.
+ audioConverter?.sampleRateConverterQuality = AVAudioQuality.max
+ .rawValue
+ // Choose a high-quality algorithm for resampling.
+ audioConverter?.sampleRateConverterAlgorithm =
+ AVSampleRateConverterAlgorithm_Mastering
+ // Cache the formats that the converter was built for.
+ audioConverterInputFormat = inputFormat
+ audioConverterOutputFormat = outputFormat
+ }
+}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareCaptureHandler.swift
index f1e465371..95ba27c1a 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareCaptureHandler.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/ScreenShareCaptureHandler.swift
@@ -2,24 +2,41 @@
// Copyright © 2025 Stream.io Inc. All rights reserved.
//
+import AudioToolbox
+import AVFoundation
import Foundation
import ReplayKit
import StreamWebRTC
final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandler, RPScreenRecorderDelegate, @unchecked Sendable {
+ @Injected(\.audioFilterProcessingModule) private var audioFilterProcessingModule
+
@Atomic private var isRecording: Bool = false
private var activeSession: Session?
private let recorder: RPScreenRecorder
+ private let includeAudio: Bool
private let disposableBag = DisposableBag()
+ private let audioProcessingQueue = DispatchQueue(
+ label: "io.getstream.screenshare.audio.processing",
+ qos: .userInitiated
+ )
+ private var audioFilterBeforeScreensharingAudio: AudioFilter?
private struct Session {
var videoCapturer: RTCVideoCapturer
var videoCapturerDelegate: RTCVideoCapturerDelegate
+ var audioDeviceModule: AudioDeviceModule
}
- init(recorder: RPScreenRecorder = .shared()) {
+ /// Creates a screen share capture handler.
+ /// - Parameters:
+ /// - recorder: The ReplayKit recorder to use. Defaults to `.shared()`.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
+ init(recorder: RPScreenRecorder = .shared(), includeAudio: Bool) {
self.recorder = recorder
+ self.includeAudio = includeAudio
super.init()
recorder.delegate = self
}
@@ -54,10 +71,19 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
func handle(_ action: StreamVideoCapturer.Action) async throws {
switch action {
- case let .startCapture(_, _, _, _, videoCapturer, videoCapturerDelegate):
+ case let .startCapture(
+ _,
+ _,
+ _,
+ _,
+ videoCapturer,
+ videoCapturerDelegate,
+ audioDeviceModule
+ ):
try await execute(
videoCapturer: videoCapturer,
- videoCapturerDelegate: videoCapturerDelegate
+ videoCapturerDelegate: videoCapturerDelegate,
+ audioDeviceModule: audioDeviceModule
)
case .stopCapture:
try await stop()
@@ -70,7 +96,8 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
private func execute(
videoCapturer: RTCVideoCapturer,
- videoCapturerDelegate: RTCVideoCapturerDelegate
+ videoCapturerDelegate: RTCVideoCapturerDelegate,
+ audioDeviceModule: AudioDeviceModule
) async throws {
guard recorder.isAvailable else {
@@ -85,21 +112,26 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
recorder.isMicrophoneEnabled = false
recorder.isCameraEnabled = false
- try await recorder.startCapture { [weak self] sampleBuffer, sampleBufferType, error in
- if let error {
- log.error(error, subsystems: .videoCapturer)
- } else {
- self?.didReceive(
- sampleBuffer: sampleBuffer,
- sampleBufferType: sampleBufferType,
- error: error
- )
+ audioFilterBeforeScreensharingAudio = audioFilterProcessingModule.activeAudioFilter
+ audioFilterProcessingModule.setAudioFilter(nil)
+
+ try await Task { @MainActor in
+ try await recorder.startCapture { [weak self] sampleBuffer, sampleBufferType, error in
+ if let error {
+ log.error(error, subsystems: .videoCapturer)
+ } else {
+ self?.didReceive(
+ sampleBuffer: sampleBuffer,
+ sampleBufferType: sampleBufferType
+ )
+ }
}
- }
+ }.value
activeSession = .init(
videoCapturer: videoCapturer,
- videoCapturerDelegate: videoCapturerDelegate
+ videoCapturerDelegate: videoCapturerDelegate,
+ audioDeviceModule: audioDeviceModule
)
isRecording = true
@@ -112,24 +144,41 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
private func didReceive(
sampleBuffer: CMSampleBuffer,
- sampleBufferType: RPSampleBufferType,
- error: Error?
+ sampleBufferType: RPSampleBufferType
) {
- guard
- let activeSession = self.activeSession
- else {
+ switch sampleBufferType {
+ case .video:
+ processVideoBuffer(sampleBuffer: sampleBuffer)
+
+ case .audioMic:
log.warning(
- "\(type(of: self)) received sample buffer but no active session was found.",
+ "\(type(of: self)) only video and appAudio sample buffers are supported. Received \(sampleBufferType).",
+ subsystems: .videoCapturer
+ )
+
+ case .audioApp:
+ if includeAudio {
+ processAudioAppBuffer(sampleBuffer: sampleBuffer)
+ } else {
+ // We don't process any audio buffers for this session.
+ }
+
+ @unknown default:
+ log.warning(
+ "\(type(of: self)) received unknown sample buffer type: \(sampleBufferType).",
subsystems: .videoCapturer
)
- return
}
+ }
+ private func processVideoBuffer(
+ sampleBuffer: CMSampleBuffer
+ ) {
guard
- sampleBufferType == .video
+ let activeSession = self.activeSession
else {
log.warning(
- "\(type(of: self)) only video sample buffers are supported. Received \(sampleBufferType).",
+ "\(type(of: self)) received sample buffer but no active session was found.",
subsystems: .videoCapturer
)
return
@@ -173,6 +222,23 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
)
}
+ private func processAudioAppBuffer(
+ sampleBuffer: CMSampleBuffer
+ ) {
+ guard
+ let audioDeviceModule = activeSession?.audioDeviceModule,
+ isRecording
+ else {
+ log.warning(
+ "\(type(of: self)) received sample buffer but no active session was found.",
+ subsystems: .videoCapturer
+ )
+ return
+ }
+
+ audioDeviceModule.enqueue(sampleBuffer)
+ }
+
private func stop() async throws {
guard
isRecording == true
@@ -180,6 +246,10 @@ final class ScreenShareCaptureHandler: NSObject, StreamVideoCapturerActionHandle
return
}
+ // Restore the previously disabled filter
+ audioFilterProcessingModule.setAudioFilter(audioFilterBeforeScreensharingAudio)
+ audioFilterBeforeScreensharingAudio = nil
+
try await recorder.stopCapture()
activeSession = nil
isRecording = false
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorCaptureHandler.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorCaptureHandler.swift
index 88ff9fab7..f708e6d8a 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorCaptureHandler.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/Simulator/SimulatorCaptureHandler.swift
@@ -8,9 +8,10 @@ final class SimulatorCaptureHandler: StreamVideoCapturerActionHandler, @unchecke
// MARK: - StreamVideoCapturerActionHandler
+ /// Handles simulator capture actions.
func handle(_ action: StreamVideoCapturer.Action) async throws {
switch action {
- case let .startCapture(_, _, _, _, videoCapturer, _):
+ case let .startCapture(_, _, _, _, videoCapturer, _, _):
guard let simulatorCapturer = videoCapturer as? SimulatorScreenCapturer else {
return
}
diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift
index e4074b8cd..f21b4781b 100644
--- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift
+++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamVideoCapturer.swift
@@ -13,9 +13,15 @@ final class StreamVideoCapturer: StreamVideoCapturing {
// MARK: - Convenience Initialisers
+ /// Creates a camera capturer for the provided video source.
+ /// - Parameters:
+ /// - videoSource: The video source receiving captured frames.
+ /// - videoCaptureSession: The capture session used for camera input.
+ /// - audioDeviceModule: The audio device module for capture coordination.
static func cameraCapturer(
with videoSource: RTCVideoSource,
- videoCaptureSession: AVCaptureSession = .init()
+ videoCaptureSession: AVCaptureSession = .init(),
+ audioDeviceModule: AudioDeviceModule
) -> StreamVideoCapturer {
let videoCapturerDelegate = StreamVideoCaptureHandler(source: videoSource)
@@ -34,6 +40,7 @@ final class StreamVideoCapturer: StreamVideoCapturing {
videoSource: videoSource,
videoCapturer: videoCapturer,
videoCapturerDelegate: videoCapturerDelegate,
+ audioDeviceModule: audioDeviceModule,
actionHandlers: [
SimulatorCaptureHandler()
]
@@ -46,6 +53,7 @@ final class StreamVideoCapturer: StreamVideoCapturing {
captureSession: videoCaptureSession
),
videoCapturerDelegate: videoCapturerDelegate,
+ audioDeviceModule: audioDeviceModule,
actionHandlers: [
CameraBackgroundAccessHandler(),
CameraCaptureHandler(),
@@ -59,26 +67,41 @@ final class StreamVideoCapturer: StreamVideoCapturing {
#endif
}
+ /// Creates a screen sharing capturer for the provided video source.
+ /// - Parameters:
+ /// - videoSource: The video source receiving captured frames.
+ /// - audioDeviceModule: The audio device module for capture coordination.
+ /// - includeAudio: Whether to capture app audio during screen sharing.
+ /// Only valid for `.inApp`; ignored otherwise.
static func screenShareCapturer(
- with videoSource: RTCVideoSource
+ with videoSource: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule,
+ includeAudio: Bool
) -> StreamVideoCapturer {
.init(
videoSource: videoSource,
videoCapturer: RTCVideoCapturer(delegate: videoSource),
videoCapturerDelegate: videoSource,
+ audioDeviceModule: audioDeviceModule,
actionHandlers: [
- ScreenShareCaptureHandler()
+ ScreenShareCaptureHandler(includeAudio: includeAudio)
]
)
}
+ /// Creates a broadcast capturer for the provided video source.
+ /// - Parameters:
+ /// - videoSource: The video source receiving captured frames.
+ /// - audioDeviceModule: The audio device module for capture coordination.
static func broadcastCapturer(
- with videoSource: RTCVideoSource
+ with videoSource: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule
) -> StreamVideoCapturer {
.init(
videoSource: videoSource,
videoCapturer: RTCVideoCapturer(delegate: videoSource),
videoCapturerDelegate: videoSource,
+ audioDeviceModule: audioDeviceModule,
actionHandlers: [
BroadcastCaptureHandler()
]
@@ -95,7 +118,8 @@ final class StreamVideoCapturer: StreamVideoCapturing {
frameRate: Int,
videoSource: RTCVideoSource,
videoCapturer: RTCVideoCapturer,
- videoCapturerDelegate: RTCVideoCapturerDelegate
+ videoCapturerDelegate: RTCVideoCapturerDelegate,
+ audioDeviceModule: AudioDeviceModule
)
case stopCapture(videoCapturer: RTCVideoCapturer)
case setCameraPosition(
@@ -141,8 +165,25 @@ final class StreamVideoCapturer: StreamVideoCapturing {
case let .checkBackgroundCameraAccess(videoCaptureSession):
return ".checkBackgroundCameraAccess(videoCaptureSession:\(customString(for: videoCaptureSession)))"
- case let .startCapture(position, dimensions, frameRate, videoSource, videoCapturer, videoCapturerDelegate):
- return ".startCapture(position:\(position), dimensions:\(dimensions), frameRate:\(frameRate), videoSource:\(customString(for: videoSource)), videoCapturer:\(customString(for: videoCapturer)), videoCapturerDelegate:\(customString(for: videoCapturerDelegate)))"
+ case let .startCapture(
+ position,
+ dimensions,
+ frameRate,
+ videoSource,
+ videoCapturer,
+ videoCapturerDelegate,
+ audioDeviceModule
+ ):
+ var result = ".startCapture {"
+ result += " position:\(position)"
+ result += ", dimensions:\(dimensions)"
+ result += ", frameRate:\(frameRate)"
+ result += ", videoSource:\(customString(for: videoSource))"
+ result += ", videoCapturer:\(customString(for: videoCapturer))"
+ result += ", videoCapturerDelegate:\(customString(for: videoCapturerDelegate))"
+ result += ", audioDeviceModule:\(audioDeviceModule))"
+ result += " }"
+ return result
case let .stopCapture(videoCapturer):
return ".stopCapture(videoCapturer:\(customString(for: videoCapturer)))"
@@ -179,6 +220,7 @@ final class StreamVideoCapturer: StreamVideoCapturing {
private let videoSource: RTCVideoSource
private let videoCapturer: RTCVideoCapturer
private let videoCapturerDelegate: RTCVideoCapturerDelegate
+ private let audioDeviceModule: AudioDeviceModule
private let actionHandlers: [StreamVideoCapturerActionHandler]
private let disposableBag = DisposableBag()
private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
@@ -198,11 +240,13 @@ final class StreamVideoCapturer: StreamVideoCapturing {
videoSource: RTCVideoSource,
videoCapturer: RTCVideoCapturer,
videoCapturerDelegate: RTCVideoCapturerDelegate,
+ audioDeviceModule: AudioDeviceModule,
actionHandlers: [StreamVideoCapturerActionHandler]
) {
self.videoSource = videoSource
self.videoCapturer = videoCapturer
self.videoCapturerDelegate = videoCapturerDelegate
+ self.audioDeviceModule = audioDeviceModule
self.actionHandlers = actionHandlers
}
@@ -240,7 +284,8 @@ final class StreamVideoCapturer: StreamVideoCapturing {
frameRate: frameRate,
videoSource: videoSource,
videoCapturer: videoCapturer,
- videoCapturerDelegate: videoCapturerDelegate
+ videoCapturerDelegate: videoCapturerDelegate,
+ audioDeviceModule: audioDeviceModule
)
)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
index 969cf7d0a..b48deda47 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
@@ -237,13 +237,18 @@ final class WebRTCCoordinator: @unchecked Sendable {
/// Starts screensharing of the specified type.
///
- /// - Parameter type: The type of screensharing.
+ /// - Parameters:
+ /// - type: The type of screensharing.
+ /// - includeAudio: Whether to capture app audio during screensharing.
+ /// Only valid for `.inApp`; ignored otherwise.
func startScreensharing(
- type: ScreensharingType
+ type: ScreensharingType,
+ includeAudio: Bool
) async throws {
try await stateAdapter.publisher?.beginScreenSharing(
of: type,
- ownCapabilities: Array(stateAdapter.ownCapabilities)
+ ownCapabilities: Array(stateAdapter.ownCapabilities),
+ includeAudio: includeAudio
)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
index 88d409d43..256d5e72f 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
@@ -308,7 +308,8 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
sfuAdapter: sfuAdapter,
videoCaptureSessionProvider: videoCaptureSessionProvider,
screenShareSessionProvider: screenShareSessionProvider,
- clientCapabilities: clientCapabilities
+ clientCapabilities: clientCapabilities,
+ audioDeviceModule: peerConnectionFactory.audioDeviceModule
)
let subscriber = rtcPeerConnectionCoordinatorFactory.buildCoordinator(
@@ -327,7 +328,8 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
sfuAdapter: sfuAdapter,
videoCaptureSessionProvider: videoCaptureSessionProvider,
screenShareSessionProvider: screenShareSessionProvider,
- clientCapabilities: clientCapabilities
+ clientCapabilities: clientCapabilities,
+ audioDeviceModule: peerConnectionFactory.audioDeviceModule
)
publisher
@@ -425,6 +427,8 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
/// Restores screen sharing if an active session exists.
///
+ /// Restores app audio capture when the active session requests it.
+ ///
/// - Throws: Throws an error if the screen sharing session cannot be
/// restored.
func restoreScreenSharing() async throws {
@@ -433,7 +437,8 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
}
try await publisher?.beginScreenSharing(
of: activeSession.screenSharingType,
- ownCapabilities: Array(ownCapabilities)
+ ownCapabilities: Array(ownCapabilities),
+ includeAudio: activeSession.includeAudio
)
}
diff --git a/Sources/StreamVideoSwiftUI/CallViewModel.swift b/Sources/StreamVideoSwiftUI/CallViewModel.swift
index 250992bf2..a210739d5 100644
--- a/Sources/StreamVideoSwiftUI/CallViewModel.swift
+++ b/Sources/StreamVideoSwiftUI/CallViewModel.swift
@@ -648,12 +648,17 @@ open class CallViewModel: ObservableObject {
}
}
- public func startScreensharing(type: ScreensharingType) {
+ /// Starts screensharing for the current call.
+ /// - Parameters:
+ /// - type: The screensharing type (in-app or broadcasting).
+ /// - includeAudio: Whether to capture app audio during screensharing.
+ /// Only valid for `.inApp`; ignored otherwise.
+ public func startScreensharing(type: ScreensharingType, includeAudio: Bool = true) {
Task(disposableBag: disposableBag, priority: .userInitiated) { [weak self] in
guard let self else { return }
do {
await disablePictureInPictureIfRequired(type)
- try await call?.startScreensharing(type: type)
+ try await call?.startScreensharing(type: type, includeAudio: includeAudio)
} catch {
log.error(error)
}
diff --git a/Sources/StreamVideoSwiftUI/Info.plist b/Sources/StreamVideoSwiftUI/Info.plist
index ec8879a18..539de2234 100644
--- a/Sources/StreamVideoSwiftUI/Info.plist
+++ b/Sources/StreamVideoSwiftUI/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.38.2
+ 1.39.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/Sources/StreamVideoUIKit/Info.plist b/Sources/StreamVideoUIKit/Info.plist
index ec8879a18..539de2234 100644
--- a/Sources/StreamVideoUIKit/Info.plist
+++ b/Sources/StreamVideoUIKit/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.38.2
+ 1.39.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/StreamVideo-XCFramework.podspec b/StreamVideo-XCFramework.podspec
index 883d694e7..28f83dab6 100644
--- a/StreamVideo-XCFramework.podspec
+++ b/StreamVideo-XCFramework.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideo-XCFramework'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo iOS Video Client'
spec.description = 'StreamVideo is the official Swift client for Stream Video, a service for building video applications.'
diff --git a/StreamVideo.podspec b/StreamVideo.podspec
index 983484c48..56ffc93d1 100644
--- a/StreamVideo.podspec
+++ b/StreamVideo.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideo'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo iOS Video Client'
spec.description = 'StreamVideo is the official Swift client for Stream Video, a service for building video applications.'
diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj
index 7628dedf5..057117667 100644
--- a/StreamVideo.xcodeproj/project.pbxproj
+++ b/StreamVideo.xcodeproj/project.pbxproj
@@ -626,6 +626,29 @@
40B284DF2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284DE2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift */; };
40B284E12D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */; };
40B284E32D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */; };
+ 40B2F3CC2EFA91FF0067BACE /* AVAudioPCMBuffer+FromCMSampleBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3CB2EFA91FF0067BACE /* AVAudioPCMBuffer+FromCMSampleBuffer.swift */; };
+ 40B2F3CE2EFA94170067BACE /* AVAudioConverter+Convert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3CD2EFA94170067BACE /* AVAudioConverter+Convert.swift */; };
+ 40B2F3D12EFA94E20067BACE /* AudioConverter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3D02EFA94E20067BACE /* AudioConverter.swift */; };
+ 40B2F3D32EFA95400067BACE /* AVAudioFormat+Equality.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3D22EFA95400067BACE /* AVAudioFormat+Equality.swift */; };
+ 40B2F3D52EFA97490067BACE /* AudioBufferRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3D42EFA97490067BACE /* AudioBufferRenderer.swift */; };
+ 40B2F3D82EFA97BB0067BACE /* AVAudioEngine+InputContext.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3D72EFA97BB0067BACE /* AVAudioEngine+InputContext.swift */; };
+ 40B2F3DD2EFAA3C00067BACE /* track_2.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 40B2F3DC2EFAA3C00067BACE /* track_2.mp3 */; };
+ 40B2F3DE2EFAA3C00067BACE /* track_1.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 40B2F3DB2EFAA3C00067BACE /* track_1.mp3 */; };
+ 40B2F3E12EFAA3E20067BACE /* AudioTrackPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3E02EFAA3E20067BACE /* AudioTrackPlayer.swift */; };
+ 40B2F3E22EFAA3E20067BACE /* AudioTrackPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3E02EFAA3E20067BACE /* AudioTrackPlayer.swift */; };
+ 40B2F3F42EFAD43C0067BACE /* AVAudioPCMBuffer+Info.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3F32EFAD43C0067BACE /* AVAudioPCMBuffer+Info.swift */; };
+ 40B2F3F72EFAD65F0067BACE /* AVAudioPCMBuffer+CustomStringConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3F62EFAD65F0067BACE /* AVAudioPCMBuffer+CustomStringConvertible.swift */; };
+ 40B2F3FA2EFAD8AB0067BACE /* CMSampleBuffer+Info.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3F92EFAD8AB0067BACE /* CMSampleBuffer+Info.swift */; };
+ 40B2F3FD2EFBF4790067BACE /* DemoAudioTrackButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3FC2EFBF4790067BACE /* DemoAudioTrackButtonView.swift */; };
+ 40B2F3FE2EFBF4790067BACE /* DemoAudioTrackButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3FC2EFBF4790067BACE /* DemoAudioTrackButtonView.swift */; };
+ 40B2F4032EFC0F310067BACE /* CMSampleBuffer_Info_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4012EFC0F310067BACE /* CMSampleBuffer_Info_Tests.swift */; };
+ 40B2F4042EFC0F310067BACE /* AVAudioPCMBuffer_Info_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4002EFC0F310067BACE /* AVAudioPCMBuffer_Info_Tests.swift */; };
+ 40B2F4052EFC0F310067BACE /* AVAudioEngine_InputContext_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F3FF2EFC0F310067BACE /* AVAudioEngine_InputContext_Tests.swift */; };
+ 40B2F40E2EFC0F660067BACE /* AVAudioConverter_Convert_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4072EFC0F660067BACE /* AVAudioConverter_Convert_Tests.swift */; };
+ 40B2F40F2EFC0F660067BACE /* AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4092EFC0F660067BACE /* AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift */; };
+ 40B2F4102EFC0F660067BACE /* AVAudioFormat_Equality_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4082EFC0F660067BACE /* AVAudioFormat_Equality_Tests.swift */; };
+ 40B2F4112EFC0F660067BACE /* AudioConverter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4062EFC0F660067BACE /* AudioConverter_Tests.swift */; };
+ 40B2F4142EFC0FE40067BACE /* AudioBufferRenderer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B2F4122EFC0FE40067BACE /* AudioBufferRenderer_Tests.swift */; };
40B31AA82D10594F005FB448 /* PublishOptions+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */; };
40B31AA92D10594F005FB448 /* PublishOptions+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */; };
40B3E53C2DBBAF9500DE8F50 /* ProximityMonitor_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B3E53B2DBBAF9500DE8F50 /* ProximityMonitor_Tests.swift */; };
@@ -2318,6 +2341,27 @@
40B284DE2D5241FA0064C1FE /* AVAudioSessionMode+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionMode+Convenience.swift"; sourceTree = ""; };
40B284E02D52422A0064C1FE /* AVAudioSessionPortOverride+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionPortOverride+Convenience.swift"; sourceTree = ""; };
40B284E22D52423B0064C1FE /* AVAudioSessionCategory+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionCategory+Convenience.swift"; sourceTree = ""; };
+ 40B2F3CB2EFA91FF0067BACE /* AVAudioPCMBuffer+FromCMSampleBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioPCMBuffer+FromCMSampleBuffer.swift"; sourceTree = ""; };
+ 40B2F3CD2EFA94170067BACE /* AVAudioConverter+Convert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioConverter+Convert.swift"; sourceTree = ""; };
+ 40B2F3D02EFA94E20067BACE /* AudioConverter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = AudioConverter.swift; path = Sources/StreamVideo/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AudioConverter.swift; sourceTree = SOURCE_ROOT; };
+ 40B2F3D22EFA95400067BACE /* AVAudioFormat+Equality.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioFormat+Equality.swift"; sourceTree = ""; };
+ 40B2F3D42EFA97490067BACE /* AudioBufferRenderer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioBufferRenderer.swift; sourceTree = ""; };
+ 40B2F3D72EFA97BB0067BACE /* AVAudioEngine+InputContext.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioEngine+InputContext.swift"; sourceTree = ""; };
+ 40B2F3DB2EFAA3C00067BACE /* track_1.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = track_1.mp3; sourceTree = ""; };
+ 40B2F3DC2EFAA3C00067BACE /* track_2.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = track_2.mp3; sourceTree = ""; };
+ 40B2F3E02EFAA3E20067BACE /* AudioTrackPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioTrackPlayer.swift; sourceTree = ""; };
+ 40B2F3F32EFAD43C0067BACE /* AVAudioPCMBuffer+Info.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioPCMBuffer+Info.swift"; sourceTree = ""; };
+ 40B2F3F62EFAD65F0067BACE /* AVAudioPCMBuffer+CustomStringConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioPCMBuffer+CustomStringConvertible.swift"; sourceTree = ""; };
+ 40B2F3F92EFAD8AB0067BACE /* CMSampleBuffer+Info.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMSampleBuffer+Info.swift"; sourceTree = ""; };
+ 40B2F3FC2EFBF4790067BACE /* DemoAudioTrackButtonView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoAudioTrackButtonView.swift; sourceTree = ""; };
+ 40B2F3FF2EFC0F310067BACE /* AVAudioEngine_InputContext_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioEngine_InputContext_Tests.swift; sourceTree = ""; };
+ 40B2F4002EFC0F310067BACE /* AVAudioPCMBuffer_Info_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioPCMBuffer_Info_Tests.swift; sourceTree = ""; };
+ 40B2F4012EFC0F310067BACE /* CMSampleBuffer_Info_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CMSampleBuffer_Info_Tests.swift; sourceTree = ""; };
+ 40B2F4062EFC0F660067BACE /* AudioConverter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioConverter_Tests.swift; sourceTree = ""; };
+ 40B2F4072EFC0F660067BACE /* AVAudioConverter_Convert_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioConverter_Convert_Tests.swift; sourceTree = ""; };
+ 40B2F4082EFC0F660067BACE /* AVAudioFormat_Equality_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioFormat_Equality_Tests.swift; sourceTree = ""; };
+ 40B2F4092EFC0F660067BACE /* AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift; sourceTree = ""; };
+ 40B2F4122EFC0FE40067BACE /* AudioBufferRenderer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioBufferRenderer_Tests.swift; sourceTree = ""; };
40B31AA72D10594F005FB448 /* PublishOptions+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "PublishOptions+Dummy.swift"; sourceTree = ""; };
40B3E53B2DBBAF9500DE8F50 /* ProximityMonitor_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProximityMonitor_Tests.swift; sourceTree = ""; };
40B3E53F2DBBB6D900DE8F50 /* MockProximityMonitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockProximityMonitor.swift; sourceTree = ""; };
@@ -3968,6 +4012,7 @@
4030E5912A9DF3E6003E8CBA /* Resources */ = {
isa = PBXGroup;
children = (
+ 40B2F3DA2EFAA3B90067BACE /* audio-tracks */,
406AF2042AF3DE4000ED4D0C /* test.mp4 */,
4030E5932A9DF439003E8CBA /* Assets */,
4030E5922A9DF3ED003E8CBA /* Entitlements */,
@@ -4035,6 +4080,7 @@
4030E5962A9DF48C003E8CBA /* Components */ = {
isa = PBXGroup;
children = (
+ 40B2F3DF2EFAA3D40067BACE /* AudioTrackPlayer */,
4000629E2EDF0CB90086E14B /* VideoFilters */,
4014F1042D8C2EFE004E7EFD /* Gleap */,
845C098F2C0E0B6B00F725B3 /* SessionTimer */,
@@ -4499,6 +4545,7 @@
406B3C072C8F602D00FC93A1 /* v2 */ = {
isa = PBXGroup;
children = (
+ 40B2F40D2EFC0F660067BACE /* VideoCapturing */,
40D36AC12DDDF0F400972D75 /* WebRTCTrackStorage_Tests.swift */,
40FAAC952DDCCFD4007BF93A /* Stats */,
402B34C12DCDF96500574663 /* UpdateSubscriptions */,
@@ -4704,6 +4751,7 @@
409145F12B68FEF0007F3C17 /* MoreControls */ = {
isa = PBXGroup;
children = (
+ 40B2F3FC2EFBF4790067BACE /* DemoAudioTrackButtonView.swift */,
408722382E13CD9D006A68CB /* DemoMoreThermalStateButtonView.swift */,
40BBC47A2C6227DF002AEF92 /* Extensions */,
409145F22B68FEF0007F3C17 /* DemoMoreControlsViewModifier.swift */,
@@ -5236,6 +5284,113 @@
path = Audio;
sourceTree = "";
};
+ 40B2F3CA2EFA91E30067BACE /* Extensions */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3CB2EFA91FF0067BACE /* AVAudioPCMBuffer+FromCMSampleBuffer.swift */,
+ 40B2F3CD2EFA94170067BACE /* AVAudioConverter+Convert.swift */,
+ 40B2F3D22EFA95400067BACE /* AVAudioFormat+Equality.swift */,
+ );
+ path = Extensions;
+ sourceTree = "";
+ };
+ 40B2F3CF2EFA94DB0067BACE /* Components */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3D02EFA94E20067BACE /* AudioConverter.swift */,
+ );
+ path = Components;
+ sourceTree = "";
+ };
+ 40B2F3D62EFA97AB0067BACE /* Extensions */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3D72EFA97BB0067BACE /* AVAudioEngine+InputContext.swift */,
+ 40B2F3F32EFAD43C0067BACE /* AVAudioPCMBuffer+Info.swift */,
+ 40B2F3F62EFAD65F0067BACE /* AVAudioPCMBuffer+CustomStringConvertible.swift */,
+ 40B2F3F92EFAD8AB0067BACE /* CMSampleBuffer+Info.swift */,
+ );
+ path = Extensions;
+ sourceTree = "";
+ };
+ 40B2F3D92EFA9D150067BACE /* Components */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3D42EFA97490067BACE /* AudioBufferRenderer.swift */,
+ );
+ path = Components;
+ sourceTree = "";
+ };
+ 40B2F3DA2EFAA3B90067BACE /* audio-tracks */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3DB2EFAA3C00067BACE /* track_1.mp3 */,
+ 40B2F3DC2EFAA3C00067BACE /* track_2.mp3 */,
+ );
+ path = "audio-tracks";
+ sourceTree = "";
+ };
+ 40B2F3DF2EFAA3D40067BACE /* AudioTrackPlayer */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3E02EFAA3E20067BACE /* AudioTrackPlayer.swift */,
+ );
+ path = AudioTrackPlayer;
+ sourceTree = "";
+ };
+ 40B2F4022EFC0F310067BACE /* Extensions */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F3FF2EFC0F310067BACE /* AVAudioEngine_InputContext_Tests.swift */,
+ 40B2F4002EFC0F310067BACE /* AVAudioPCMBuffer_Info_Tests.swift */,
+ 40B2F4012EFC0F310067BACE /* CMSampleBuffer_Info_Tests.swift */,
+ );
+ path = Extensions;
+ sourceTree = "";
+ };
+ 40B2F40A2EFC0F660067BACE /* Extensions */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F4062EFC0F660067BACE /* AudioConverter_Tests.swift */,
+ 40B2F4072EFC0F660067BACE /* AVAudioConverter_Convert_Tests.swift */,
+ 40B2F4082EFC0F660067BACE /* AVAudioFormat_Equality_Tests.swift */,
+ 40B2F4092EFC0F660067BACE /* AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift */,
+ );
+ path = Extensions;
+ sourceTree = "";
+ };
+ 40B2F40B2EFC0F660067BACE /* ScreenShare */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F40A2EFC0F660067BACE /* Extensions */,
+ );
+ path = ScreenShare;
+ sourceTree = "";
+ };
+ 40B2F40C2EFC0F660067BACE /* ActionHandlers */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F40B2EFC0F660067BACE /* ScreenShare */,
+ );
+ path = ActionHandlers;
+ sourceTree = "";
+ };
+ 40B2F40D2EFC0F660067BACE /* VideoCapturing */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F40C2EFC0F660067BACE /* ActionHandlers */,
+ );
+ path = VideoCapturing;
+ sourceTree = "";
+ };
+ 40B2F4132EFC0FE40067BACE /* Components */ = {
+ isa = PBXGroup;
+ children = (
+ 40B2F4122EFC0FE40067BACE /* AudioBufferRenderer_Tests.swift */,
+ );
+ path = Components;
+ sourceTree = "";
+ };
40B3E5392DBBAF8B00DE8F50 /* Proximity */ = {
isa = PBXGroup;
children = (
@@ -5892,6 +6047,8 @@
40E1C89E2EA1176200AC3647 /* AudioDeviceModule */ = {
isa = PBXGroup;
children = (
+ 40B2F4132EFC0FE40067BACE /* Components */,
+ 40B2F4022EFC0F310067BACE /* Extensions */,
40EF61A22ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift */,
40E1C89F2EA1176C00AC3647 /* AudioDeviceModule_Tests.swift */,
);
@@ -6036,6 +6193,8 @@
40E3636A2D0A24280028C52A /* ScreenShare */ = {
isa = PBXGroup;
children = (
+ 40B2F3CF2EFA94DB0067BACE /* Components */,
+ 40B2F3CA2EFA91E30067BACE /* Extensions */,
40E3636B2D0A24310028C52A /* ScreenShareCaptureHandler.swift */,
);
path = ScreenShare;
@@ -6060,6 +6219,8 @@
40EE9D2A2E969F010000EA92 /* AudioDeviceModule */ = {
isa = PBXGroup;
children = (
+ 40B2F3D92EFA9D150067BACE /* Components */,
+ 40B2F3D62EFA97AB0067BACE /* Extensions */,
40E1C89A2EA1156600AC3647 /* RTCAudioDeviceModuleControlling.swift */,
40EE9D282E969F010000EA92 /* AudioDeviceModule.swift */,
40EE9D292E969F010000EA92 /* AudioEngineLevelNodeAdapter.swift */,
@@ -8310,6 +8471,8 @@
842C7EBF28A2B3FA00C2AB7F /* Assets.xcassets in Resources */,
844ADA672AD3F21000769F6A /* GoogleSignIn.plist in Resources */,
4046DEE92A9E381F00CA6D2F /* AppIntentVocabulary.plist in Resources */,
+ 40B2F3DD2EFAA3C00067BACE /* track_2.mp3 in Resources */,
+ 40B2F3DE2EFAA3C00067BACE /* track_1.mp3 in Resources */,
842D8BDB2865B37800801910 /* Assets.xcassets in Resources */,
40ED6D4B2B14F0E600FB5F69 /* Launch Screen.storyboard in Resources */,
);
@@ -8472,6 +8635,7 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
+ 40B2F3E22EFAA3E20067BACE /* AudioTrackPlayer.swift in Sources */,
40AB353B2B738B5700E465CC /* DemoSnapshotViewModel.swift in Sources */,
4091460D2B690AC9007F3C17 /* DemoParticipantOptionsButton.swift in Sources */,
8456E6C4287EB43A004E180E /* LoginViewModel.swift in Sources */,
@@ -8527,6 +8691,7 @@
409145FD2B68FEF0007F3C17 /* DemoLocalViewModifier.swift in Sources */,
409145FF2B68FEF0007F3C17 /* ChangeEnvironmentViewModifier.swift in Sources */,
401A64B12A9DF83200534ED1 /* TokenResponse.swift in Sources */,
+ 40B2F3FE2EFBF4790067BACE /* DemoAudioTrackButtonView.swift in Sources */,
409145FA2B68FEF0007F3C17 /* ThermalStateViewModifier.swift in Sources */,
40F445DB2A9E2276004BE3DA /* DemoCallContentView.swift in Sources */,
4091460B2B690AA4007F3C17 /* CallButtonView.swift in Sources */,
@@ -8600,6 +8765,7 @@
405687AE2D78A0E700093B98 /* QRCodeView.swift in Sources */,
4029A6272AB069320065DAFB /* DemoChatViewFactory.swift in Sources */,
40AB354A2B738C5100E465CC /* ThermalStateViewModifier.swift in Sources */,
+ 40B2F3E12EFAA3E20067BACE /* AudioTrackPlayer.swift in Sources */,
406A8E952AA1D7CB001F598A /* AddUserView.swift in Sources */,
406A8EB02AA1D80C001F598A /* DeeplinkAdapter.swift in Sources */,
40AB354E2B738C5A00E465CC /* DemoLocalViewModifier.swift in Sources */,
@@ -8610,6 +8776,7 @@
845C09952C10A7D700F725B3 /* SessionTimer.swift in Sources */,
8493224F2908378A0013C029 /* AppDelegate.swift in Sources */,
40AB354D2B738C5A00E465CC /* DemoVideoCallParticipantModifier.swift in Sources */,
+ 40B2F3FD2EFBF4790067BACE /* DemoAudioTrackButtonView.swift in Sources */,
84E4F7922947476400DD4CE3 /* CallViewHelper.swift in Sources */,
406A8EA92AA1D80C001F598A /* ReactionsAdapter.swift in Sources */,
4029A6262AB0692C0065DAFB /* DemoChatViewModel+Injection.swift in Sources */,
@@ -8706,6 +8873,7 @@
40C2B5B62C2B605A00EC2C2D /* DisposableBag.swift in Sources */,
4159F1802C86FA41002B94D3 /* PushNotificationSettingsResponse.swift in Sources */,
84F73858287C1A3400A363F4 /* ConnectionState.swift in Sources */,
+ 40B2F3D52EFA97490067BACE /* AudioBufferRenderer.swift in Sources */,
841BAA3F2BD15CDE000C73E4 /* QueryCallStatsRequest.swift in Sources */,
8478EB13288A054B00525538 /* VideoConfig.swift in Sources */,
841BAA372BD15CDE000C73E4 /* Coordinates.swift in Sources */,
@@ -8793,6 +8961,7 @@
40C9E4482C94743800802B28 /* Stream_Video_Sfu_Signal_TrackSubscriptionDetails+Convenience.swift in Sources */,
4045D9D92DAD4BD50077A660 /* CallSettingsResponse+SettingsPriority.swift in Sources */,
40034C282CFE156800A318B1 /* CallKitAvailabilityPolicy.swift in Sources */,
+ 40B2F3F72EFAD65F0067BACE /* AVAudioPCMBuffer+CustomStringConvertible.swift in Sources */,
40F1016C2D5A654300C49481 /* DefaultAudioSessionPolicy.swift in Sources */,
840042C92A6FF9A200917B30 /* BroadcastConstants.swift in Sources */,
84F73854287C1A2D00A363F4 /* InjectedValuesExtensions.swift in Sources */,
@@ -8810,6 +8979,7 @@
8456E6D6287EC343004E180E /* LogFormatter.swift in Sources */,
43217A0C2A44A28B002B5857 /* ConnectionErrorEvent.swift in Sources */,
40151F932E74410400326540 /* AudioProcessingStore+Namespace.swift in Sources */,
+ 40B2F3D32EFA95400067BACE /* AVAudioFormat+Equality.swift in Sources */,
40BBC49E2C623D03002AEF92 /* RTCIceConnectionState+CustomStringConvertible.swift in Sources */,
84A7E18C288363AC00526C98 /* EventNotificationCenter.swift in Sources */,
40E363492D09F6BB0028C52A /* StreamVideoCapturer.swift in Sources */,
@@ -8821,6 +8991,7 @@
842E70D02B91BE1700D2D68B /* ClosedCaptionEvent.swift in Sources */,
40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */,
4039088D2EC2311A00B19FA1 /* StoreEffect.swift in Sources */,
+ 40B2F3CC2EFA91FF0067BACE /* AVAudioPCMBuffer+FromCMSampleBuffer.swift in Sources */,
40E363382D09E6560028C52A /* Array+Prepare.swift in Sources */,
842D3B5829F667660051698A /* CreateDeviceRequest.swift in Sources */,
84BBF62B28AFC24000387A02 /* PeerConnectionFactory.swift in Sources */,
@@ -8984,6 +9155,8 @@
8458872E28A4EC1F002A81BF /* CallSettings.swift in Sources */,
84DC389829ADFCFD00946713 /* VideoSettingsRequest.swift in Sources */,
404098C92DDF45ED00D7BEC5 /* Protobuf+SelectiveEncodable.swift in Sources */,
+ 40B2F3FA2EFAD8AB0067BACE /* CMSampleBuffer+Info.swift in Sources */,
+ 40B2F3CE2EFA94170067BACE /* AVAudioConverter+Convert.swift in Sources */,
846E4AE429CDE0B0003733AB /* OwnCapability.swift in Sources */,
840042C72A6FF20B00917B30 /* BroadcastObserver.swift in Sources */,
40944D292E532D4100088AF0 /* StoreDelay.swift in Sources */,
@@ -9040,6 +9213,7 @@
40E18AAD2CD51E5900A65C9F /* RecursiveQueue.swift in Sources */,
842B8E242A2DFED900863A87 /* CallSessionParticipantJoinedEvent.swift in Sources */,
40AD64B82DC16AB10077AE15 /* WebRTCTracesAdapter.swift in Sources */,
+ 40B2F3D12EFA94E20067BACE /* AudioConverter.swift in Sources */,
40BBC4D42C639371002AEF92 /* WebRTCCoordinator+Connected.swift in Sources */,
40D36AC02DDDB88200972D75 /* WebRTCStatsAdapting.swift in Sources */,
848CCCE62AB8ED8F002E83A2 /* BroadcastSettingsResponse.swift in Sources */,
@@ -9115,6 +9289,7 @@
842B8E2F2A2DFED900863A87 /* StopRecordingResponse.swift in Sources */,
40BBC4C92C639027002AEF92 /* WebRTCCoordinator+StateMachine.swift in Sources */,
84D6494329E9AD08002CA428 /* RTMPIngress.swift in Sources */,
+ 40B2F3D82EFA97BB0067BACE /* AVAudioEngine+InputContext.swift in Sources */,
84F73853287C1A2900A363F4 /* InjectedValues.swift in Sources */,
8406266B2A379CC5004B8748 /* WSEventsMiddleware.swift in Sources */,
4159F1842C86FA41002B94D3 /* Count.swift in Sources */,
@@ -9281,6 +9456,7 @@
844982492C738A830029734D /* StartRTMPBroadcastsRequest.swift in Sources */,
401A0F032AB1C1B600BE2DBD /* ThermalStateObserver.swift in Sources */,
84D2E37929DC856D001D2118 /* CallMemberAddedEvent.swift in Sources */,
+ 40B2F3F42EFAD43C0067BACE /* AVAudioPCMBuffer+Info.swift in Sources */,
40D36AD02DDDF97500972D75 /* AVAudioRecorder+Sendable.swift in Sources */,
40FB8FFA2D661F3F00F4390A /* CustomStringConvertible+Retroactive.swift in Sources */,
403CA9B42CC7BAF0001A88C2 /* VideoCodec.swift in Sources */,
@@ -9422,6 +9598,9 @@
84F58B7629EE92BF00010C4C /* UniqueValues.swift in Sources */,
40B48C512D14F7AE002C4EAB /* SDPParser_Tests.swift in Sources */,
40C4E83F2E65B6E300FC29BC /* MockDefaultAPIEndpoints.swift in Sources */,
+ 40B2F4032EFC0F310067BACE /* CMSampleBuffer_Info_Tests.swift in Sources */,
+ 40B2F4042EFC0F310067BACE /* AVAudioPCMBuffer_Info_Tests.swift in Sources */,
+ 40B2F4052EFC0F310067BACE /* AVAudioEngine_InputContext_Tests.swift in Sources */,
400062952EDEFD2D0086E14B /* Moderation+ManagerTests.swift in Sources */,
84F58B9529EEBA3900010C4C /* EquatableEvent.swift in Sources */,
40A0E9682B88E04D0089E8D3 /* CIImage_Resize_Tests.swift in Sources */,
@@ -9510,6 +9689,7 @@
84F58B8129EE9C4900010C4C /* WebSocketPingController_Delegate.swift in Sources */,
400C9FCD2D9D648100DB26DC /* RTCConfiguration_DefaultsTests.swift in Sources */,
40AB34D02C5D443F00B5B6B3 /* MockWebSocketEngine.swift in Sources */,
+ 40B2F4142EFC0FE40067BACE /* AudioBufferRenderer_Tests.swift in Sources */,
40B48C322D14D361002C4EAB /* StreamVideoSfuModelsVideoLayer_ConvenienceTests.swift in Sources */,
406B3C0F2C8F866100FC93A1 /* MockStreamCallAudioRecorder.swift in Sources */,
40C71B702E53644300733BF6 /* StreamCallAudioRecorder_ShouldRecordMiddlewareTests.swift in Sources */,
@@ -9687,6 +9867,10 @@
8469593229BB3D7500134EA0 /* SignalServer_Tests.swift in Sources */,
409CA7992BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift in Sources */,
40FAAC982DDCCFDC007BF93A /* WebRTCStatsCollector_Tests.swift in Sources */,
+ 40B2F40E2EFC0F660067BACE /* AVAudioConverter_Convert_Tests.swift in Sources */,
+ 40B2F40F2EFC0F660067BACE /* AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift in Sources */,
+ 40B2F4102EFC0F660067BACE /* AVAudioFormat_Equality_Tests.swift in Sources */,
+ 40B2F4112EFC0F660067BACE /* AudioConverter_Tests.swift in Sources */,
40D2873D2DB12E46006AD8C7 /* OwnCapabilitiesAudioSessionPolicyTests.swift in Sources */,
40AAD1832D2816ED00D10330 /* Stream_Video_Sfu_Event_ChangePublishQuality+Dummy.swift in Sources */,
40D36ACE2DDDF6BB00972D75 /* WebRTCTrace_Tests.swift in Sources */,
diff --git a/StreamVideoArtifacts.json b/StreamVideoArtifacts.json
index 703feccf7..c5cbf774d 100644
--- a/StreamVideoArtifacts.json
+++ b/StreamVideoArtifacts.json
@@ -1 +1 @@
-{"0.4.2":"https://github.com/GetStream/stream-video-swift/releases/download/0.4.2/StreamVideo-All.zip","0.5.0":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.0/StreamVideo-All.zip","0.5.1":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.1/StreamVideo-All.zip","0.5.2":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.2/StreamVideo-All.zip","0.5.3":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.3/StreamVideo-All.zip","1.0.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.0/StreamVideo-All.zip","1.0.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.1/StreamVideo-All.zip","1.0.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.2/StreamVideo-All.zip","1.0.3":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.3/StreamVideo-All.zip","1.0.4":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.4/StreamVideo-All.zip","1.0.5":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.5/StreamVideo-All.zip","1.0.6":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.6/StreamVideo-All.zip","1.0.7":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.7/StreamVideo-All.zip","1.0.8":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.8/StreamVideo-All.zip","1.0.9":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.9/StreamVideo-All.zip","1.10.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.10.0/StreamVideo-All.zip","1.11.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.11.0/StreamVideo-All.zip","1.12.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.12.0/StreamVideo-All.zip","1.13.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.13.0/StreamVideo-All.zip","1.14.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.14.0/StreamVideo-All.zip","1.14.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.14.1/StreamVideo-All.zip","1.15.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.15.0/StreamVideo-All.zip","1.16.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.16.0/StreamVideo-All.zip","1.17.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.17.0/StreamVideo-All.zip","1.18.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.18.0/StreamVideo-All.zip","1.19.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.0/StreamVideo-All.zip","1.19.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.1/StreamVideo-All.zip","1.19.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.2/StreamVideo-All.zip","1.20.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.20.0/StreamVideo-All.zip","1.21.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.21.0/StreamVideo-All.zip","1.21.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.21.1/StreamVideo-All.zip","1.22.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.0/StreamVideo-All.zip","1.22.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.1/StreamVideo-All.zip","1.22.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.2/StreamVideo-All.zip","1.24.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.24.0/StreamVideo-All.zip","1.25.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.25.0/StreamVideo-All.zip","1.26.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.26.0/StreamVideo-All.zip","1.27.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.0/StreamVideo-All.zip","1.27.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.1/StreamVideo-All.zip","1.27.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.2/StreamVideo-All.zip","1.28.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.28.0/StreamVideo-All.zip","1.28.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.28.1/StreamVideo-All.zip","1.29.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.29.0/StreamVideo-All.zip","1.29.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.29.1/StreamVideo-All.zip","1.30.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.30.0/StreamVideo-All.zip","1.31.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.31.0/StreamVideo-All.zip","1.32.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.32.0/StreamVideo-All.zip","1.33.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.33.0/StreamVideo-All.zip","1.34.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.0/StreamVideo-All.zip","1.34.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.1/StreamVideo-All.zip","1.34.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.2/StreamVideo-All.zip","1.35.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.35.0/StreamVideo-All.zip","1.36.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.36.0/StreamVideo-All.zip","1.37.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.37.0/StreamVideo-All.zip","1.38.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.0/StreamVideo-All.zip","1.38.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.1/StreamVideo-All.zip","1.38.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.2/StreamVideo-All.zip"}
\ No newline at end of file
+{"0.4.2":"https://github.com/GetStream/stream-video-swift/releases/download/0.4.2/StreamVideo-All.zip","0.5.0":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.0/StreamVideo-All.zip","0.5.1":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.1/StreamVideo-All.zip","0.5.2":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.2/StreamVideo-All.zip","0.5.3":"https://github.com/GetStream/stream-video-swift/releases/download/0.5.3/StreamVideo-All.zip","1.0.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.0/StreamVideo-All.zip","1.0.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.1/StreamVideo-All.zip","1.0.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.2/StreamVideo-All.zip","1.0.3":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.3/StreamVideo-All.zip","1.0.4":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.4/StreamVideo-All.zip","1.0.5":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.5/StreamVideo-All.zip","1.0.6":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.6/StreamVideo-All.zip","1.0.7":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.7/StreamVideo-All.zip","1.0.8":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.8/StreamVideo-All.zip","1.0.9":"https://github.com/GetStream/stream-video-swift/releases/download/1.0.9/StreamVideo-All.zip","1.10.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.10.0/StreamVideo-All.zip","1.11.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.11.0/StreamVideo-All.zip","1.12.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.12.0/StreamVideo-All.zip","1.13.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.13.0/StreamVideo-All.zip","1.14.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.14.0/StreamVideo-All.zip","1.14.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.14.1/StreamVideo-All.zip","1.15.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.15.0/StreamVideo-All.zip","1.16.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.16.0/StreamVideo-All.zip","1.17.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.17.0/StreamVideo-All.zip","1.18.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.18.0/StreamVideo-All.zip","1.19.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.0/StreamVideo-All.zip","1.19.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.1/StreamVideo-All.zip","1.19.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.19.2/StreamVideo-All.zip","1.20.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.20.0/StreamVideo-All.zip","1.21.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.21.0/StreamVideo-All.zip","1.21.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.21.1/StreamVideo-All.zip","1.22.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.0/StreamVideo-All.zip","1.22.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.1/StreamVideo-All.zip","1.22.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.22.2/StreamVideo-All.zip","1.24.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.24.0/StreamVideo-All.zip","1.25.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.25.0/StreamVideo-All.zip","1.26.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.26.0/StreamVideo-All.zip","1.27.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.0/StreamVideo-All.zip","1.27.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.1/StreamVideo-All.zip","1.27.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.27.2/StreamVideo-All.zip","1.28.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.28.0/StreamVideo-All.zip","1.28.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.28.1/StreamVideo-All.zip","1.29.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.29.0/StreamVideo-All.zip","1.29.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.29.1/StreamVideo-All.zip","1.30.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.30.0/StreamVideo-All.zip","1.31.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.31.0/StreamVideo-All.zip","1.32.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.32.0/StreamVideo-All.zip","1.33.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.33.0/StreamVideo-All.zip","1.34.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.0/StreamVideo-All.zip","1.34.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.1/StreamVideo-All.zip","1.34.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.34.2/StreamVideo-All.zip","1.35.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.35.0/StreamVideo-All.zip","1.36.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.36.0/StreamVideo-All.zip","1.37.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.37.0/StreamVideo-All.zip","1.38.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.0/StreamVideo-All.zip","1.38.1":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.1/StreamVideo-All.zip","1.38.2":"https://github.com/GetStream/stream-video-swift/releases/download/1.38.2/StreamVideo-All.zip","1.39.0":"https://github.com/GetStream/stream-video-swift/releases/download/1.39.0/StreamVideo-All.zip"}
\ No newline at end of file
diff --git a/StreamVideoSwiftUI-XCFramework.podspec b/StreamVideoSwiftUI-XCFramework.podspec
index f44e3e9d5..503c9b2d1 100644
--- a/StreamVideoSwiftUI-XCFramework.podspec
+++ b/StreamVideoSwiftUI-XCFramework.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideoSwiftUI-XCFramework'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo SwiftUI Video Components'
spec.description = 'StreamVideoSwiftUI SDK offers flexible SwiftUI components able to display data provided by StreamVideo SDK.'
diff --git a/StreamVideoSwiftUI.podspec b/StreamVideoSwiftUI.podspec
index f6f99fa62..9a741522a 100644
--- a/StreamVideoSwiftUI.podspec
+++ b/StreamVideoSwiftUI.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideoSwiftUI'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo SwiftUI Video Components'
spec.description = 'StreamVideoSwiftUI SDK offers flexible SwiftUI components able to display data provided by StreamVideo SDK.'
diff --git a/StreamVideoTests/Controllers/CallController_Tests.swift b/StreamVideoTests/Controllers/CallController_Tests.swift
index 62676d747..aba0c92eb 100644
--- a/StreamVideoTests/Controllers/CallController_Tests.swift
+++ b/StreamVideoTests/Controllers/CallController_Tests.swift
@@ -221,7 +221,7 @@ final class CallController_Tests: StreamVideoTestCase, @unchecked Sendable {
// MARK: - startScreensharing
- func test_startScreensharing_typeIsInApp_shouldBeginScreenSharing() async throws {
+ func test_startScreensharing_typeIsInApp_includeAudioTrue_shouldBeginScreenSharing() async throws {
try await prepareAsConnected()
let ownCapabilities = [OwnCapability.createReaction]
await mockWebRTCCoordinatorFactory.mockCoordinatorStack.coordinator.stateAdapter.set(ownCapabilities: Set(ownCapabilities))
@@ -233,19 +233,20 @@ final class CallController_Tests: StreamVideoTestCase, @unchecked Sendable {
.publisher as? MockRTCPeerConnectionCoordinator
)
- try await subject.startScreensharing(type: .inApp)
+ try await subject.startScreensharing(type: .inApp, includeAudio: true)
let actual = try XCTUnwrap(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first
)
XCTAssertEqual(actual.0, .inApp)
XCTAssertEqual(actual.1, ownCapabilities)
+ XCTAssertTrue(actual.2)
}
- func test_startScreensharing_typeIsBroadcast_shouldBeginScreenSharing() async throws {
+ func test_startScreensharing_typeIsInApp_includeAudioFalse_shouldBeginScreenSharing() async throws {
try await prepareAsConnected()
let ownCapabilities = [OwnCapability.createReaction]
await mockWebRTCCoordinatorFactory.mockCoordinatorStack.coordinator.stateAdapter.set(ownCapabilities: Set(ownCapabilities))
@@ -257,16 +258,67 @@ final class CallController_Tests: StreamVideoTestCase, @unchecked Sendable {
.publisher as? MockRTCPeerConnectionCoordinator
)
- try await subject.startScreensharing(type: .broadcast)
+ try await subject.startScreensharing(type: .inApp, includeAudio: false)
let actual = try XCTUnwrap(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
+ for: .beginScreenSharing
+ )?.first
+ )
+ XCTAssertEqual(actual.0, .inApp)
+ XCTAssertEqual(actual.1, ownCapabilities)
+ XCTAssertFalse(actual.2)
+ }
+
+ func test_startScreensharing_typeIsBroadcast_includeAudioTrue_shouldBeginScreenSharing() async throws {
+ try await prepareAsConnected()
+ let ownCapabilities = [OwnCapability.createReaction]
+ await mockWebRTCCoordinatorFactory.mockCoordinatorStack.coordinator.stateAdapter.set(ownCapabilities: Set(ownCapabilities))
+ let mockPublisher = try await XCTAsyncUnwrap(
+ await mockWebRTCCoordinatorFactory
+ .mockCoordinatorStack
+ .coordinator
+ .stateAdapter
+ .publisher as? MockRTCPeerConnectionCoordinator
+ )
+
+ try await subject.startScreensharing(type: .broadcast, includeAudio: true)
+
+ let actual = try XCTUnwrap(
+ mockPublisher.recordedInputPayload(
+ (ScreensharingType, [OwnCapability], Bool).self,
+ for: .beginScreenSharing
+ )?.first
+ )
+ XCTAssertEqual(actual.0, .broadcast)
+ XCTAssertEqual(actual.1, ownCapabilities)
+ XCTAssertTrue(actual.2)
+ }
+
+ func test_startScreensharing_typeIsBroadcast_includeAudioFalse_shouldBeginScreenSharing() async throws {
+ try await prepareAsConnected()
+ let ownCapabilities = [OwnCapability.createReaction]
+ await mockWebRTCCoordinatorFactory.mockCoordinatorStack.coordinator.stateAdapter.set(ownCapabilities: Set(ownCapabilities))
+ let mockPublisher = try await XCTAsyncUnwrap(
+ await mockWebRTCCoordinatorFactory
+ .mockCoordinatorStack
+ .coordinator
+ .stateAdapter
+ .publisher as? MockRTCPeerConnectionCoordinator
+ )
+
+ try await subject.startScreensharing(type: .broadcast, includeAudio: false)
+
+ let actual = try XCTUnwrap(
+ mockPublisher.recordedInputPayload(
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first
)
XCTAssertEqual(actual.0, .broadcast)
XCTAssertEqual(actual.1, ownCapabilities)
+ XCTAssertFalse(actual.2)
}
// MARK: - stopScreensharing
diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift
index c09082dc5..11c3b5910 100644
--- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift
+++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinator.swift
@@ -49,7 +49,7 @@ final class MockRTCPeerConnectionCoordinator:
case setVideoFilter(videoFilter: VideoFilter?)
case ensureSetUpHasBeenCompleted
case setUp(settings: CallSettings, ownCapabilities: [OwnCapability])
- case beginScreenSharing(type: ScreensharingType, ownCapabilities: [OwnCapability])
+ case beginScreenSharing(type: ScreensharingType, ownCapabilities: [OwnCapability], includeAudio: Bool)
case stopScreenSharing
case focus(point: CGPoint)
case addCapturePhotoOutput(capturePhotoOutput: AVCapturePhotoOutput)
@@ -82,8 +82,8 @@ final class MockRTCPeerConnectionCoordinator:
return ()
case let .setUp(settings, ownCapabilities):
return (settings, ownCapabilities)
- case let .beginScreenSharing(type, ownCapabilities):
- return (type, ownCapabilities)
+ case let .beginScreenSharing(type, ownCapabilities, includeAudio):
+ return (type, ownCapabilities, includeAudio)
case .stopScreenSharing:
return ()
case let .focus(point):
@@ -155,7 +155,8 @@ final class MockRTCPeerConnectionCoordinator:
videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(),
screenShareSessionProvider: ScreenShareSessionProvider = .init(),
iceAdapter: ICEAdapter? = nil,
- iceConnectionStateAdapter: ICEConnectionStateAdapter? = nil
+ iceConnectionStateAdapter: ICEConnectionStateAdapter? = nil,
+ audioDeviceModule: AudioDeviceModule = .init(MockRTCAudioDeviceModule())
) throws {
let peerConnectionFactory = PeerConnectionFactory.build(
audioProcessingModule: MockAudioProcessingModule.shared
@@ -182,7 +183,8 @@ final class MockRTCPeerConnectionCoordinator:
videoConfig: videoConfig,
publishOptions: publishOptions,
videoCaptureSessionProvider: videoCaptureSessionProvider,
- screenShareSessionProvider: screenShareSessionProvider
+ screenShareSessionProvider: screenShareSessionProvider,
+ audioDeviceModule: audioDeviceModule
),
iceAdapter: iceAdapter ?? .init(
sessionID: sessionId,
@@ -261,10 +263,15 @@ final class MockRTCPeerConnectionCoordinator:
override func beginScreenSharing(
of type: ScreensharingType,
- ownCapabilities: [OwnCapability]
+ ownCapabilities: [OwnCapability],
+ includeAudio: Bool
) async throws {
stubbedFunctionInput[.beginScreenSharing]?.append(
- .beginScreenSharing(type: type, ownCapabilities: ownCapabilities)
+ .beginScreenSharing(
+ type: type,
+ ownCapabilities: ownCapabilities,
+ includeAudio: includeAudio
+ )
)
}
diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift
index cbfcecd2c..6b1677e90 100644
--- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift
+++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorFactory.swift
@@ -30,7 +30,8 @@ final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinato
sfuAdapter: SFUAdapter,
videoCaptureSessionProvider: VideoCaptureSessionProvider,
screenShareSessionProvider: ScreenShareSessionProvider,
- clientCapabilities: Set
+ clientCapabilities: Set,
+ audioDeviceModule: AudioDeviceModule
) -> RTCPeerConnectionCoordinator {
stubbedBuildCoordinatorResult[peerType] ?? MockRTCPeerConnectionCoordinator(
sessionId: sessionId,
@@ -45,7 +46,8 @@ final class MockRTCPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinato
sfuAdapter: sfuAdapter,
videoCaptureSessionProvider: videoCaptureSessionProvider,
screenShareSessionProvider: screenShareSessionProvider,
- clientCapabilities: clientCapabilities
+ clientCapabilities: clientCapabilities,
+ audioDeviceModule: audioDeviceModule
)
}
}
diff --git a/StreamVideoTests/Mock/MockVideoCapturerFactory.swift b/StreamVideoTests/Mock/MockVideoCapturerFactory.swift
index 7126e5d62..dc5d52bac 100644
--- a/StreamVideoTests/Mock/MockVideoCapturerFactory.swift
+++ b/StreamVideoTests/Mock/MockVideoCapturerFactory.swift
@@ -27,19 +27,22 @@ final class MockVideoCapturerFactory: VideoCapturerProviding, Mockable, @uncheck
enum MockFunctionInputKey: Payloadable {
case buildCameraCapturer(
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule
)
case buildScreenCapturer(
type: ScreensharingType,
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule,
+ includeAudio: Bool
)
var payload: Any {
switch self {
- case let .buildCameraCapturer(source):
- return (source)
- case let .buildScreenCapturer(type, source):
- return (type, source)
+ case let .buildCameraCapturer(source, audioDeviceModule):
+ return (source, audioDeviceModule)
+ case let .buildScreenCapturer(type, source, audioDeviceModule, includeAudio):
+ return (type, source, audioDeviceModule, includeAudio)
}
}
}
@@ -52,12 +55,14 @@ final class MockVideoCapturerFactory: VideoCapturerProviding, Mockable, @uncheck
// MARK: - VideoCapturerProviding
func buildCameraCapturer(
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule
) -> StreamVideoCapturing {
stubbedFunctionInput[.buildCameraCapturer]?
.append(
.buildCameraCapturer(
- source: source
+ source: source,
+ audioDeviceModule: audioDeviceModule
)
)
return stubbedFunction[.buildCameraCapturer] as! StreamVideoCapturing
@@ -65,13 +70,17 @@ final class MockVideoCapturerFactory: VideoCapturerProviding, Mockable, @uncheck
func buildScreenCapturer(
_ type: ScreensharingType,
- source: RTCVideoSource
+ source: RTCVideoSource,
+ audioDeviceModule: AudioDeviceModule,
+ includeAudio: Bool
) -> StreamVideoCapturing {
stubbedFunctionInput[.buildScreenCapturer]?
.append(
.buildScreenCapturer(
type: type,
- source: source
+ source: source,
+ audioDeviceModule: audioDeviceModule,
+ includeAudio: includeAudio
)
)
return stubbedFunction[.buildScreenCapturer] as! StreamVideoCapturing
diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift
index d4cad8315..013294fab 100644
--- a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift
+++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule_Tests.swift
@@ -277,6 +277,36 @@ final class AudioDeviceModule_Tests: XCTestCase, @unchecked Sendable {
XCTAssertEqual(payload?.1, 1024)
}
+ func test_configureInputFromSource_emitsEvent() async {
+ makeSubject()
+ let engine = AVAudioEngine()
+ let sourceNode = AVAudioPlayerNode()
+ let destination = AVAudioMixerNode()
+ let format = AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: 48000,
+ channels: 1,
+ interleaved: false
+ )!
+ let expectedEvent = AudioDeviceModule.Event.configureInputFromSource(
+ engine,
+ source: sourceNode,
+ destination: destination,
+ format: format
+ )
+
+ await expectEvent(expectedEvent) {
+ _ = subject.audioDeviceModule(
+ $0,
+ engine: engine,
+ configureInputFromSource: sourceNode,
+ toDestination: destination,
+ format: format,
+ context: [:]
+ )
+ }
+ }
+
func test_configureOutputFromSource_emitsEvent() async {
makeSubject()
let engine = AVAudioEngine()
diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer_Tests.swift
new file mode 100644
index 000000000..f3b73526e
--- /dev/null
+++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Components/AudioBufferRenderer_Tests.swift
@@ -0,0 +1,75 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AudioBufferRenderer_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AudioBufferRenderer!
+
+ override func setUp() {
+ super.setUp()
+ subject = AudioBufferRenderer()
+ }
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_configure_withContext_updatesStoredContext() {
+ let engine = AVAudioEngine()
+ let destination = AVAudioMixerNode()
+ let format = makeFormat()
+ let context = AVAudioEngine.InputContext(
+ engine: engine,
+ source: nil,
+ destination: destination,
+ format: format
+ )
+
+ subject.configure(with: context)
+
+ let storedContext = rendererContext(subject)
+ XCTAssertNotNil(storedContext)
+ XCTAssertTrue(storedContext?.engine === engine)
+ XCTAssertTrue(storedContext?.destination === destination)
+ }
+
+ func test_reset_clearsStoredContext() {
+ let engine = AVAudioEngine()
+ let destination = AVAudioMixerNode()
+ let context = AVAudioEngine.InputContext(
+ engine: engine,
+ source: nil,
+ destination: destination,
+ format: makeFormat()
+ )
+ subject.configure(with: context)
+
+ subject.reset()
+
+ XCTAssertNil(rendererContext(subject))
+ }
+
+ // MARK: - Helpers
+
+ private func makeFormat() -> AVAudioFormat {
+ AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: 48000,
+ channels: 1,
+ interleaved: false
+ )!
+ }
+
+ private func rendererContext(
+ _ renderer: AudioBufferRenderer
+ ) -> AVAudioEngine.InputContext? {
+ Mirror(reflecting: renderer)
+ .descendant("context") as? AVAudioEngine.InputContext
+ }
+}
diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine_InputContext_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine_InputContext_Tests.swift
new file mode 100644
index 000000000..6e7dd32f5
--- /dev/null
+++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioEngine_InputContext_Tests.swift
@@ -0,0 +1,91 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AVAudioEngine_InputContext_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AVAudioEngine.InputContext!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_equatable_withMatchingValues_returnsTrue() {
+ let engine = AVAudioEngine()
+ let source = AVAudioPlayerNode()
+ let destination = AVAudioMixerNode()
+ let format = makeFormat(sampleRate: 48000)
+
+ subject = .init(
+ engine: engine,
+ source: source,
+ destination: destination,
+ format: format
+ )
+ let other = AVAudioEngine.InputContext(
+ engine: engine,
+ source: source,
+ destination: destination,
+ format: format
+ )
+
+ XCTAssertEqual(subject, other)
+ }
+
+ func test_equatable_withDifferentEngine_returnsFalse() {
+ let engine = AVAudioEngine()
+ let destination = AVAudioMixerNode()
+ let format = makeFormat(sampleRate: 48000)
+
+ subject = .init(
+ engine: engine,
+ source: nil,
+ destination: destination,
+ format: format
+ )
+ let other = AVAudioEngine.InputContext(
+ engine: AVAudioEngine(),
+ source: nil,
+ destination: destination,
+ format: format
+ )
+
+ XCTAssertNotEqual(subject, other)
+ }
+
+ func test_equatable_withDifferentFormat_returnsFalse() {
+ let engine = AVAudioEngine()
+ let destination = AVAudioMixerNode()
+
+ subject = .init(
+ engine: engine,
+ source: nil,
+ destination: destination,
+ format: makeFormat(sampleRate: 48000)
+ )
+ let other = AVAudioEngine.InputContext(
+ engine: engine,
+ source: nil,
+ destination: destination,
+ format: makeFormat(sampleRate: 44100)
+ )
+
+ XCTAssertNotEqual(subject, other)
+ }
+
+ // MARK: - Helpers
+
+ private func makeFormat(sampleRate: Double) -> AVAudioFormat {
+ AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: sampleRate,
+ channels: 1,
+ interleaved: false
+ )!
+ }
+}
diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer_Info_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer_Info_Tests.swift
new file mode 100644
index 000000000..8e84a743c
--- /dev/null
+++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/AVAudioPCMBuffer_Info_Tests.swift
@@ -0,0 +1,110 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AVAudioPCMBuffer_Info_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AVAudioPCMBuffer!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_rmsAndPeak_withFloatSamples_returnsExpectedValues() {
+ subject = makeFloatBuffer(samples: [0, 0.5, -0.5, 1])
+
+ let info = subject.rmsAndPeak
+
+ XCTAssertEqual(info.peak, 1, accuracy: 0.001)
+ XCTAssertEqual(info.rms, 0.612_372, accuracy: 0.001)
+ XCTAssertFalse(info.isSilent)
+ }
+
+ func test_rmsAndPeak_withInt16Samples_returnsExpectedValues() {
+ subject = makeInt16Buffer(samples: [0, Int16.max])
+
+ let info = subject.rmsAndPeak
+
+ XCTAssertEqual(info.peak, 1, accuracy: 0.001)
+ XCTAssertEqual(info.rms, 0.707_106, accuracy: 0.001)
+ XCTAssertFalse(info.isSilent)
+ }
+
+ func test_rmsAndPeak_withZeroFrames_returnsEmpty() {
+ subject = makeFloatBuffer(samples: [0])
+ subject.frameLength = 0
+
+ let info = subject.rmsAndPeak
+
+ XCTAssertEqual(info, .empty)
+ }
+
+ func test_rmsAndPeakInfo_withLowRms_marksSilent() {
+ let info = AVAudioPCMBuffer.RMSAndPeakInfo(rms: 0.000_1, peak: 0)
+
+ XCTAssertTrue(info.isSilent)
+ }
+
+ func test_rmsAndPeakInfo_withHigherRms_marksNotSilent() {
+ let info = AVAudioPCMBuffer.RMSAndPeakInfo(rms: 0.1, peak: 0)
+
+ XCTAssertFalse(info.isSilent)
+ }
+
+ func test_description_includesFormatDetails() {
+ subject = makeFloatBuffer(samples: [0.1, -0.1])
+
+ let description = subject.description
+
+ XCTAssertTrue(description.contains("channelCount:1"))
+ XCTAssertTrue(description.contains("commonFormat:"))
+ XCTAssertTrue(description.contains("isInterleaved:"))
+ }
+
+ // MARK: - Helpers
+
+ private func makeFloatBuffer(samples: [Float]) -> AVAudioPCMBuffer {
+ let format = AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: 48000,
+ channels: 1,
+ interleaved: false
+ )!
+ let buffer = AVAudioPCMBuffer(
+ pcmFormat: format,
+ frameCapacity: AVAudioFrameCount(samples.count)
+ )!
+ buffer.frameLength = AVAudioFrameCount(samples.count)
+ if let channel = buffer.floatChannelData?.pointee {
+ for (index, sample) in samples.enumerated() {
+ channel[index] = sample
+ }
+ }
+ return buffer
+ }
+
+ private func makeInt16Buffer(samples: [Int16]) -> AVAudioPCMBuffer {
+ let format = AVAudioFormat(
+ commonFormat: .pcmFormatInt16,
+ sampleRate: 48000,
+ channels: 1,
+ interleaved: false
+ )!
+ let buffer = AVAudioPCMBuffer(
+ pcmFormat: format,
+ frameCapacity: AVAudioFrameCount(samples.count)
+ )!
+ buffer.frameLength = AVAudioFrameCount(samples.count)
+ if let channel = buffer.int16ChannelData?.pointee {
+ for (index, sample) in samples.enumerated() {
+ channel[index] = sample
+ }
+ }
+ return buffer
+ }
+}
diff --git a/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/CMSampleBuffer_Info_Tests.swift b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/CMSampleBuffer_Info_Tests.swift
new file mode 100644
index 000000000..ada3d1d4d
--- /dev/null
+++ b/StreamVideoTests/Utils/AudioSession/AudioDeviceModule/Extensions/CMSampleBuffer_Info_Tests.swift
@@ -0,0 +1,117 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AudioToolbox
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class CMSampleBuffer_Info_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: CMSampleBuffer!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_rmsAndPeak_withFloatSamples_returnsExpectedValues() {
+ subject = makeSampleBuffer(samples: [0, 0.5, -0.5, 1])
+
+ let info = subject.rmsAndPeak
+
+ XCTAssertEqual(info.peak, 1, accuracy: 0.001)
+ XCTAssertEqual(info.rms, 0.612_372, accuracy: 0.001)
+ XCTAssertFalse(info.isSilent)
+ }
+
+ func test_format_withValidBuffer_returnsStreamDescription() {
+ subject = makeSampleBuffer(samples: [0.25, -0.25])
+
+ let format = subject.format
+
+ XCTAssertEqual(format?.mFormatID, kAudioFormatLinearPCM)
+ XCTAssertEqual(format?.mBitsPerChannel, 32)
+ XCTAssertEqual(format?.mChannelsPerFrame, 1)
+ }
+
+ func test_description_includesFormatDetails() {
+ subject = makeSampleBuffer(samples: [0.25, -0.25])
+
+ let description = subject.description
+
+ XCTAssertTrue(description.contains("channelCount:1"))
+ XCTAssertTrue(description.contains("format:"))
+ }
+
+ // MARK: - Helpers
+
+ private func makeSampleBuffer(samples: [Float]) -> CMSampleBuffer {
+ var streamDescription = AudioStreamBasicDescription(
+ mSampleRate: 48000,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked,
+ mBytesPerPacket: UInt32(MemoryLayout.size),
+ mFramesPerPacket: 1,
+ mBytesPerFrame: UInt32(MemoryLayout.size),
+ mChannelsPerFrame: 1,
+ mBitsPerChannel: 32,
+ mReserved: 0
+ )
+
+ var formatDescription: CMAudioFormatDescription?
+ let formatStatus = CMAudioFormatDescriptionCreate(
+ allocator: kCFAllocatorDefault,
+ asbd: &streamDescription,
+ layoutSize: 0,
+ layout: nil,
+ magicCookieSize: 0,
+ magicCookie: nil,
+ extensions: nil,
+ formatDescriptionOut: &formatDescription
+ )
+ XCTAssertEqual(formatStatus, noErr)
+ let format = formatDescription!
+
+ let dataSize = samples.count * MemoryLayout.size
+ var blockBuffer: CMBlockBuffer?
+ let blockStatus = CMBlockBufferCreateWithMemoryBlock(
+ allocator: kCFAllocatorDefault,
+ memoryBlock: nil,
+ blockLength: dataSize,
+ blockAllocator: kCFAllocatorDefault,
+ customBlockSource: nil,
+ offsetToData: 0,
+ dataLength: dataSize,
+ flags: 0,
+ blockBufferOut: &blockBuffer
+ )
+ XCTAssertEqual(blockStatus, noErr)
+ let block = blockBuffer!
+
+ samples.withUnsafeBytes { buffer in
+ guard let baseAddress = buffer.baseAddress else { return }
+ let replaceStatus = CMBlockBufferReplaceDataBytes(
+ with: baseAddress,
+ blockBuffer: block,
+ offsetIntoDestination: 0,
+ dataLength: dataSize
+ )
+ XCTAssertEqual(replaceStatus, noErr)
+ }
+
+ var sampleBuffer: CMSampleBuffer?
+ let sampleStatus = CMAudioSampleBufferCreateReadyWithPacketDescriptions(
+ allocator: kCFAllocatorDefault,
+ dataBuffer: block,
+ formatDescription: format,
+ sampleCount: samples.count,
+ presentationTimeStamp: .zero,
+ packetDescriptions: nil,
+ sampleBufferOut: &sampleBuffer
+ )
+ XCTAssertEqual(sampleStatus, noErr)
+ return sampleBuffer!
+ }
+}
diff --git a/StreamVideoTests/Utils/IdleTimerAdapter/IdleTimerAdapter_Tests.swift b/StreamVideoTests/Utils/IdleTimerAdapter/IdleTimerAdapter_Tests.swift
index 249944ef6..8d8708dd6 100644
--- a/StreamVideoTests/Utils/IdleTimerAdapter/IdleTimerAdapter_Tests.swift
+++ b/StreamVideoTests/Utils/IdleTimerAdapter/IdleTimerAdapter_Tests.swift
@@ -10,6 +10,7 @@ import XCTest
final class IdleTimerAdapter_Tests: XCTestCase, @unchecked Sendable {
private var activeCallSubject: CurrentValueSubject! = .init(nil)
+ private var mockStreamVideo: MockStreamVideo! = .init()
private lazy var subject: IdleTimerAdapter! = .init(activeCallSubject.eraseToAnyPublisher())
override func setUp() {
@@ -19,6 +20,7 @@ final class IdleTimerAdapter_Tests: XCTestCase, @unchecked Sendable {
override func tearDown() {
subject = nil
+ mockStreamVideo = nil
activeCallSubject.send(nil)
activeCallSubject = nil
super.tearDown()
diff --git a/StreamVideoTests/Utils/Store/Store_PerformanceTests.swift b/StreamVideoTests/Utils/Store/Store_PerformanceTests.swift
index bdd1d1d22..555680a15 100644
--- a/StreamVideoTests/Utils/Store/Store_PerformanceTests.swift
+++ b/StreamVideoTests/Utils/Store/Store_PerformanceTests.swift
@@ -232,8 +232,8 @@ final class Store_PerformanceTests: XCTestCase, @unchecked Sendable {
let iterations = 10000
measure(
- baseline: .init(local: 13, ci: 24, stringTransformer: { String(format: "%.4fs", $0) }),
- allowedRegression: .init(local: 0.25, ci: 0.35),
+ baseline: .init(24, stringTransformer: { String(format: "%.4fs", $0) }),
+ allowedRegression: .init(local: 0.1, ci: 0.35),
iterations: 2
) {
// Wait for completion
diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter_Tests.swift
index 5db3918e2..8a005962a 100644
--- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter_Tests.swift
+++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalScreenShareMediaAdapter_Tests.swift
@@ -18,6 +18,7 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
private lazy var spySubject: PassthroughSubject! = .init()
private lazy var publishOptions: [PublishOptions.VideoPublishOptions] = [.dummy(codec: .h264)]
private lazy var screenShareSessionProvider: ScreenShareSessionProvider! = .init()
+ private lazy var mockAudioDeviceModule: MockRTCAudioDeviceModule! = .init()
private var temporaryPeerConnection: RTCPeerConnection?
private lazy var subject: LocalScreenShareMediaAdapter! = .init(
sessionID: sessionId,
@@ -27,7 +28,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
publishOptions: publishOptions,
subject: spySubject,
screenShareSessionProvider: screenShareSessionProvider,
- capturerFactory: mockCapturerFactory
+ capturerFactory: mockCapturerFactory,
+ audioDeviceModule: .init(mockAudioDeviceModule)
)
override func tearDown() {
@@ -40,6 +42,7 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider = nil
temporaryPeerConnection = nil
disposableBag = nil
+ mockAudioDeviceModule = nil
super.tearDown()
}
@@ -52,8 +55,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenSharingType: .broadcast,
capturer: mockCapturerFactory.buildScreenCapturer(
.broadcast,
- source: track.source
- )
+ source: track.source,
+ audioDeviceModule: .init(mockAudioDeviceModule),
+ includeAudio: true
+ ),
+ includeAudio: true
)
XCTAssertTrue(subject.primaryTrack.source === track.source)
@@ -83,7 +89,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: MockStreamVideoCapturer()
+ capturer: MockStreamVideoCapturer(),
+ includeAudio: true
)
subject.primaryTrack.isEnabled = true
@@ -99,7 +106,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: MockStreamVideoCapturer()
+ capturer: MockStreamVideoCapturer(),
+ includeAudio: true
)
subject.primaryTrack.isEnabled = true
@@ -119,7 +127,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: MockStreamVideoCapturer()
+ capturer: MockStreamVideoCapturer(),
+ includeAudio: true
)
subject.primaryTrack.isEnabled = false
subject.publish()
@@ -142,7 +151,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: MockStreamVideoCapturer()
+ capturer: MockStreamVideoCapturer(),
+ includeAudio: true
)
subject.primaryTrack.isEnabled = false
subject.publish()
@@ -171,7 +181,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
} operation: { subject in
try await subject.beginScreenSharing(
of: .inApp,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
} validation: { [sessionId] id, trackType, track in
XCTAssertEqual(id, sessionId)
@@ -182,7 +193,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
func test_beginScreenSharing_withoutCapabilityWithActiveSession_stopsCapturingAndSession() async throws {
try await assertStopCapturing {
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [],
+ includeAudio: true
+ )
}
}
@@ -201,11 +216,13 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
try await subject.beginScreenSharing(
of: screensharingType,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
try await subject.beginScreenSharing(
of: screensharingType,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
XCTAssertEqual(mockCapturerFactory.timesCalled(.buildScreenCapturer), 1)
@@ -219,7 +236,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
try await subject.beginScreenSharing(
of: screensharingType,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
await fulfillment { capturer.timesCalled(.startCapture) == 1 }
@@ -228,7 +246,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
func test_beginScreenSharing_withCapability_updateMuteStateOnSFU() async throws {
try await subject.beginScreenSharing(
of: .inApp,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
let request = try XCTUnwrap(mockSFUStack.service.updateMuteStatesWasCalledWithRequest)
@@ -244,7 +263,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
try await subject.beginScreenSharing(
of: .inApp,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
await fulfillment { capturer.timesCalled(.startCapture) == 1 }
@@ -267,7 +287,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
mockCapturerFactory.stub(for: .buildScreenCapturer, with: capturer)
try await subject.beginScreenSharing(
of: .inApp,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
await fulfillment { capturer.timesCalled(.startCapture) == 1 }
@@ -293,7 +314,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
.dummy(codec: .h264, fmtp: "a"),
.dummy(codec: .av1, fmtp: "b")
]
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [.screenshare])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [.screenshare],
+ includeAudio: true
+ )
await fulfillment { self.mockPeerConnection.timesCalled(.addTransceiver) == 2 }
let trackInfo = subject.trackInfo(for: .allAvailable)
@@ -319,7 +344,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
$0 == 1 ? h264Transceiver : av1Transceiver
})
publishOptions = [.dummy(codec: .h264, fmtp: "a")]
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [.screenshare])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [.screenshare],
+ includeAudio: true
+ )
await fulfillment { self.mockPeerConnection.timesCalled(.addTransceiver) == 1 }
try await subject.didUpdatePublishOptions(
@@ -350,7 +379,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
$0 == 1 ? h264Transceiver : av1Transceiver
})
publishOptions = [.dummy(codec: .h264)]
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [.screenshare])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [.screenshare],
+ includeAudio: true
+ )
await fulfillment { self.mockPeerConnection.timesCalled(.addTransceiver) == 1 }
try await subject.didUpdatePublishOptions(
.dummy(screenShare: [.dummy(codec: .av1, fmtp: "b")])
@@ -380,7 +413,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: capturer
+ capturer: capturer,
+ includeAudio: true
)
subject.publish()
@@ -397,7 +431,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: .inApp,
- capturer: capturer
+ capturer: capturer,
+ includeAudio: true
)
subject.publish()
@@ -415,7 +450,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
mockCapturerFactory.stub(for: .buildScreenCapturer, with: capturer)
let mockTransceiver = try makeTransceiver(of: .video, videoOptions: .dummy(codec: .h264))
mockPeerConnection.stub(for: .addTransceiver, with: mockTransceiver)
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [.screenshare])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [.screenshare],
+ includeAudio: true
+ )
await fulfillment { capturer.timesCalled(.startCapture) == 1 }
XCTAssertTrue(subject.primaryTrack.isEnabled)
@@ -433,7 +472,11 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
for: .addTransceiver,
with: try makeTransceiver(of: .video, videoOptions: .dummy(codec: .h264))
)
- try await subject.beginScreenSharing(of: .inApp, ownCapabilities: [.screenshare])
+ try await subject.beginScreenSharing(
+ of: .inApp,
+ ownCapabilities: [.screenshare],
+ includeAudio: true
+ )
await fulfillment { capturer.timesCalled(.startCapture) == 1 }
subject.unpublish()
@@ -519,7 +562,8 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: peerConnectionFactory.mockVideoTrack(forScreenShare: true),
screenSharingType: .inApp,
- capturer: capturer
+ capturer: capturer,
+ includeAudio: true
)
try await operation()
@@ -542,13 +586,15 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
screenShareSessionProvider.activeSession = .init(
localTrack: subject.primaryTrack,
screenSharingType: screensharingType == .inApp ? .broadcast : .inApp,
- capturer: capturerA
+ capturer: capturerA,
+ includeAudio: true
)
}
try await subject.beginScreenSharing(
of: screensharingType,
- ownCapabilities: [.screenshare]
+ ownCapabilities: [.screenshare],
+ includeAudio: true
)
await fulfillment(file: file, line: line) {
@@ -559,7 +605,7 @@ final class LocalScreenShareMediaAdapter_Tests: XCTestCase, @unchecked Sendable
let input = try XCTUnwrap(
mockCapturerFactory
.recordedInputPayload(
- (ScreensharingType, RTCVideoSource).self,
+ (ScreensharingType, RTCVideoSource, AudioDeviceModule, Bool).self,
for: .buildScreenCapturer
)?.first
)
diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter_Tests.swift
index 53a1c0be7..98e7813a6 100644
--- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter_Tests.swift
+++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalVideoMediaAdapter_Tests.swift
@@ -18,6 +18,7 @@ final class LocalVideoMediaAdapter_Tests: XCTestCase, @unchecked Sendable {
private lazy var videoCaptureSessionProvider: VideoCaptureSessionProvider! = .init()
private lazy var mockVideoCapturer: MockStreamVideoCapturer! = .init()
private lazy var mockCaptureDeviceProvider: MockCaptureDeviceProvider! = .init()
+ private lazy var mockAudioDeviceModule: MockRTCAudioDeviceModule! = .init()
private lazy var subject: LocalVideoMediaAdapter! = .init(
sessionID: sessionId,
peerConnection: mockPeerConnection,
@@ -28,7 +29,8 @@ final class LocalVideoMediaAdapter_Tests: XCTestCase, @unchecked Sendable {
publishOptions: publishOptions,
subject: spySubject,
capturerFactory: mockCapturerFactory,
- videoCaptureSessionProvider: videoCaptureSessionProvider
+ videoCaptureSessionProvider: videoCaptureSessionProvider,
+ audioDeviceModule: .init(mockAudioDeviceModule)
)
private var temporaryPeerConnection: RTCPeerConnection?
private var disposableBag: DisposableBag! = .init()
@@ -58,6 +60,7 @@ final class LocalVideoMediaAdapter_Tests: XCTestCase, @unchecked Sendable {
videoCaptureSessionProvider = nil
mockVideoCapturer = nil
mockCaptureDeviceProvider = nil
+ mockAudioDeviceModule = nil
super.tearDown()
}
diff --git a/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter_Convert_Tests.swift b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter_Convert_Tests.swift
new file mode 100644
index 000000000..e8aa0a29a
--- /dev/null
+++ b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioConverter_Convert_Tests.swift
@@ -0,0 +1,72 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AVAudioConverter_Convert_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AVAudioConverter!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_convert_withValidInput_returnsOutputBuffer() {
+ let inputBuffer = makeFloatBuffer(samples: Array(repeating: 0.2, count: 4800))
+ let outputFormat = makeFormat(sampleRate: 16000)
+ subject = AVAudioConverter(from: inputBuffer.format, to: outputFormat)
+
+ let outputBuffer = subject.convert(
+ from: inputBuffer,
+ to: outputFormat
+ )
+
+ XCTAssertNotNil(outputBuffer)
+ XCTAssertTrue(outputBuffer?.frameLength ?? 0 > 0)
+ XCTAssertTrue(outputBuffer?.format == outputFormat)
+ }
+
+ func test_convert_withEmptyInput_returnsNil() {
+ let inputBuffer = makeFloatBuffer(samples: [0])
+ inputBuffer.frameLength = 0
+ let outputFormat = makeFormat(sampleRate: 16000)
+ subject = AVAudioConverter(from: inputBuffer.format, to: outputFormat)
+
+ let outputBuffer = subject.convert(
+ from: inputBuffer,
+ to: outputFormat
+ )
+
+ XCTAssertNil(outputBuffer)
+ }
+
+ // MARK: - Helpers
+
+ private func makeFloatBuffer(samples: [Float]) -> AVAudioPCMBuffer {
+ let format = makeFormat(sampleRate: 48000)
+ let buffer = AVAudioPCMBuffer(
+ pcmFormat: format,
+ frameCapacity: AVAudioFrameCount(samples.count)
+ )!
+ buffer.frameLength = AVAudioFrameCount(samples.count)
+ if let channel = buffer.floatChannelData?.pointee {
+ for (index, sample) in samples.enumerated() {
+ channel[index] = sample
+ }
+ }
+ return buffer
+ }
+
+ private func makeFormat(sampleRate: Double) -> AVAudioFormat {
+ AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: sampleRate,
+ channels: 1,
+ interleaved: false
+ )!
+ }
+}
diff --git a/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat_Equality_Tests.swift b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat_Equality_Tests.swift
new file mode 100644
index 000000000..e2caaf684
--- /dev/null
+++ b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioFormat_Equality_Tests.swift
@@ -0,0 +1,118 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AVAudioFormat_Equality_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AVAudioFormat!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_equatable_withMatchingValues_returnsTrue() {
+ subject = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+ let other = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+
+ XCTAssertTrue(subject == other)
+ }
+
+ func test_equatable_withDifferentSampleRate_returnsFalse() {
+ subject = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+ let other = makeFormat(
+ sampleRate: 44100,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+
+ XCTAssertFalse(subject == other)
+ }
+
+ func test_equatable_withDifferentChannels_returnsFalse() {
+ subject = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+ let other = makeFormat(
+ sampleRate: 48000,
+ channels: 2,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+
+ XCTAssertFalse(subject == other)
+ }
+
+ func test_equatable_withDifferentCommonFormat_returnsFalse() {
+ subject = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+ let other = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatInt16,
+ interleaved: false
+ )
+
+ XCTAssertFalse(subject == other)
+ }
+
+ func test_equatable_withDifferentInterleaving_returnsFalse() {
+ subject = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: false
+ )
+ let other = makeFormat(
+ sampleRate: 48000,
+ channels: 1,
+ commonFormat: .pcmFormatFloat32,
+ interleaved: true
+ )
+
+ XCTAssertNotEqual(subject, other)
+ }
+
+ // MARK: - Helpers
+
+ private func makeFormat(
+ sampleRate: Double,
+ channels: AVAudioChannelCount,
+ commonFormat: AVAudioCommonFormat,
+ interleaved: Bool
+ ) -> AVAudioFormat {
+ AVAudioFormat(
+ commonFormat: commonFormat,
+ sampleRate: sampleRate,
+ channels: channels,
+ interleaved: interleaved
+ )!
+ }
+}
diff --git a/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift
new file mode 100644
index 000000000..e49a4778b
--- /dev/null
+++ b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AVAudioPCMBuffer_FromCMSampleBuffer_Tests.swift
@@ -0,0 +1,108 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AudioToolbox
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AVAudioPCMBuffer_FromCMSampleBuffer_Tests: XCTestCase,
+ @unchecked Sendable {
+
+ private var subject: AVAudioPCMBuffer!
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_from_withFloatSampleBuffer_returnsPCMBuffer() {
+ let samples: [Float] = [0.25, -0.25, 0.5, -0.5]
+ let sampleBuffer = makeSampleBuffer(samples: samples)
+
+ subject = AVAudioPCMBuffer.from(sampleBuffer)
+
+ XCTAssertNotNil(subject)
+ XCTAssertEqual(subject.frameLength, AVAudioFrameCount(samples.count))
+ XCTAssertEqual(subject.format.sampleRate, 48000)
+ XCTAssertEqual(subject.format.channelCount, 1)
+ if let channel = subject.floatChannelData?.pointee {
+ for (index, sample) in samples.enumerated() {
+ XCTAssertEqual(channel[index], sample, accuracy: 0.000_1)
+ }
+ } else {
+ XCTFail("Expected floatChannelData for float format.")
+ }
+ }
+
+ // MARK: - Helpers
+
+ private func makeSampleBuffer(samples: [Float]) -> CMSampleBuffer {
+ var streamDescription = AudioStreamBasicDescription(
+ mSampleRate: 48000,
+ mFormatID: kAudioFormatLinearPCM,
+ mFormatFlags: kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked,
+ mBytesPerPacket: UInt32(MemoryLayout.size),
+ mFramesPerPacket: 1,
+ mBytesPerFrame: UInt32(MemoryLayout.size),
+ mChannelsPerFrame: 1,
+ mBitsPerChannel: 32,
+ mReserved: 0
+ )
+
+ var formatDescription: CMAudioFormatDescription?
+ let formatStatus = CMAudioFormatDescriptionCreate(
+ allocator: kCFAllocatorDefault,
+ asbd: &streamDescription,
+ layoutSize: 0,
+ layout: nil,
+ magicCookieSize: 0,
+ magicCookie: nil,
+ extensions: nil,
+ formatDescriptionOut: &formatDescription
+ )
+ XCTAssertEqual(formatStatus, noErr)
+ let format = formatDescription!
+
+ let dataSize = samples.count * MemoryLayout.size
+ var blockBuffer: CMBlockBuffer?
+ let blockStatus = CMBlockBufferCreateWithMemoryBlock(
+ allocator: kCFAllocatorDefault,
+ memoryBlock: nil,
+ blockLength: dataSize,
+ blockAllocator: kCFAllocatorDefault,
+ customBlockSource: nil,
+ offsetToData: 0,
+ dataLength: dataSize,
+ flags: 0,
+ blockBufferOut: &blockBuffer
+ )
+ XCTAssertEqual(blockStatus, noErr)
+ let block = blockBuffer!
+
+ samples.withUnsafeBytes { buffer in
+ guard let baseAddress = buffer.baseAddress else { return }
+ let replaceStatus = CMBlockBufferReplaceDataBytes(
+ with: baseAddress,
+ blockBuffer: block,
+ offsetIntoDestination: 0,
+ dataLength: dataSize
+ )
+ XCTAssertEqual(replaceStatus, noErr)
+ }
+
+ var sampleBuffer: CMSampleBuffer?
+ let sampleStatus = CMAudioSampleBufferCreateReadyWithPacketDescriptions(
+ allocator: kCFAllocatorDefault,
+ dataBuffer: block,
+ formatDescription: format,
+ sampleCount: samples.count,
+ presentationTimeStamp: .zero,
+ packetDescriptions: nil,
+ sampleBufferOut: &sampleBuffer
+ )
+ XCTAssertEqual(sampleStatus, noErr)
+ return sampleBuffer!
+ }
+}
diff --git a/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AudioConverter_Tests.swift b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AudioConverter_Tests.swift
new file mode 100644
index 000000000..2cec2aeb0
--- /dev/null
+++ b/StreamVideoTests/WebRTC/v2/VideoCapturing/ActionHandlers/ScreenShare/Extensions/AudioConverter_Tests.swift
@@ -0,0 +1,73 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+@testable import StreamVideo
+import XCTest
+
+final class AudioConverter_Tests: XCTestCase, @unchecked Sendable {
+
+ private var subject: AudioConverter!
+
+ override func setUp() {
+ super.setUp()
+ subject = AudioConverter()
+ }
+
+ override func tearDown() {
+ subject = nil
+ super.tearDown()
+ }
+
+ func test_convertIfRequired_withMatchingFormat_returnsInput() {
+ let inputBuffer = makeFloatBuffer(samples: [0.1, 0.2])
+
+ let outputBuffer = subject.convertIfRequired(
+ inputBuffer,
+ to: inputBuffer.format
+ )
+
+ XCTAssertTrue(outputBuffer === inputBuffer)
+ }
+
+ func test_convertIfRequired_withDifferentFormat_returnsConvertedBuffer() {
+ let inputBuffer = makeFloatBuffer(samples: Array(repeating: 0.2, count: 4800))
+ let outputFormat = makeFormat(sampleRate: 16000)
+
+ let outputBuffer = subject.convertIfRequired(
+ inputBuffer,
+ to: outputFormat
+ )
+
+ XCTAssertNotNil(outputBuffer)
+ XCTAssertTrue(outputBuffer?.frameLength ?? 0 > 0)
+ XCTAssertTrue(outputBuffer?.format == outputFormat)
+ }
+
+ // MARK: - Helpers
+
+ private func makeFloatBuffer(samples: [Float]) -> AVAudioPCMBuffer {
+ let format = makeFormat(sampleRate: 48000)
+ let buffer = AVAudioPCMBuffer(
+ pcmFormat: format,
+ frameCapacity: AVAudioFrameCount(samples.count)
+ )!
+ buffer.frameLength = AVAudioFrameCount(samples.count)
+ if let channel = buffer.floatChannelData?.pointee {
+ for (index, sample) in samples.enumerated() {
+ channel[index] = sample
+ }
+ }
+ return buffer
+ }
+
+ private func makeFormat(sampleRate: Double) -> AVAudioFormat {
+ AVAudioFormat(
+ commonFormat: .pcmFormatFloat32,
+ sampleRate: sampleRate,
+ channels: 1,
+ interleaved: false
+ )!
+ }
+}
diff --git a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift
index ab9be5403..121382efa 100644
--- a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift
+++ b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift
@@ -293,12 +293,12 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable {
.publisher as? MockRTCPeerConnectionCoordinator
)
- try await subject.startScreensharing(type: .inApp)
+ try await subject.startScreensharing(type: .inApp, includeAudio: true)
await fulfillment { mockPublisher.timesCalled(.beginScreenSharing) == 1 }
let actual = try XCTUnwrap(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first
)
@@ -316,12 +316,12 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable {
.publisher as? MockRTCPeerConnectionCoordinator
)
- try await subject.startScreensharing(type: .broadcast)
+ try await subject.startScreensharing(type: .broadcast, includeAudio: true)
await fulfillment { mockPublisher.timesCalled(.beginScreenSharing) == 1 }
let actual = try XCTUnwrap(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first
)
diff --git a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift
index edea62f15..7c5ca14dc 100644
--- a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift
+++ b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift
@@ -446,7 +446,8 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable {
screenShareSessionProvider.activeSession = .init(
localTrack: await subject.peerConnectionFactory.mockVideoTrack(forScreenShare: true),
screenSharingType: .inApp,
- capturer: MockStreamVideoCapturer()
+ capturer: MockStreamVideoCapturer(),
+ includeAudio: true
)
let ownCapabilities = Set([OwnCapability.blockUsers])
await subject.set(ownCapabilities: ownCapabilities)
@@ -456,18 +457,24 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable {
XCTAssertEqual(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first?.0,
.inApp
)
XCTAssertEqual(
mockPublisher.recordedInputPayload(
- (ScreensharingType, [OwnCapability]).self,
+ (ScreensharingType, [OwnCapability], Bool).self,
for: .beginScreenSharing
)?.first?.1,
[.blockUsers]
)
+ XCTAssertTrue(
+ mockPublisher.recordedInputPayload(
+ (ScreensharingType, [OwnCapability], Bool).self,
+ for: .beginScreenSharing
+ )?.first?.2 ?? false
+ )
}
func test_configurePeerConnections_withoutActiveSession_shouldNotBeginScreenSharing() async throws {
@@ -566,7 +573,8 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable {
screenShareSessionProvider.activeSession = .init(
localTrack: PeerConnectionFactory.mock().mockVideoTrack(forScreenShare: true),
screenSharingType: .inApp,
- capturer: mockVideoCapturer
+ capturer: mockVideoCapturer,
+ includeAudio: true
)
await subject.cleanUp()
diff --git a/StreamVideoUIKit-XCFramework.podspec b/StreamVideoUIKit-XCFramework.podspec
index 75e1a59cc..f09ece22b 100644
--- a/StreamVideoUIKit-XCFramework.podspec
+++ b/StreamVideoUIKit-XCFramework.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideoUIKit-XCFramework'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo UIKit Video Components'
spec.description = 'StreamVideoUIKit SDK offers flexible UIKit components able to display data provided by StreamVideo SDK.'
diff --git a/StreamVideoUIKit.podspec b/StreamVideoUIKit.podspec
index 5e1bef56f..18b0ebae2 100644
--- a/StreamVideoUIKit.podspec
+++ b/StreamVideoUIKit.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideoUIKit'
- spec.version = '1.38.2'
+ spec.version = '1.39.0'
spec.summary = 'StreamVideo UIKit Video Components'
spec.description = 'StreamVideoUIKit SDK offers flexible UIKit components able to display data provided by StreamVideo SDK.'