Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

### 🔄 Changed

# [1.39.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.39.0)
_December 24, 2025_

### ✅ Added
- Support for capture inApp audio during ScreenSharing sessions. [#1020](https://github.com/GetStream/stream-video-swift/pull/1020)

# [1.38.2](https://github.com/GetStream/stream-video-swift/releases/tag/1.38.2)
_December 22, 2025_

Expand Down
Binary file added DemoApp/Resources/audio-tracks/track_1.mp3
Binary file not shown.
Binary file added DemoApp/Resources/audio-tracks/track_2.mp3
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
//
// Copyright © 2025 Stream.io Inc. All rights reserved.
//

import AVFoundation
import Foundation
import StreamVideo

final class AudioTrackPlayer: NSObject, AVAudioPlayerDelegate {
enum Track: String, Equatable, CaseIterable {
case track1 = "track_1"
case track2 = "track_2"

var fileExtension: String {
switch self {
case .track1:
return ".mp3"
case .track2:
return ".mp3"
}
}
}

@Published private(set) var isPlaying: Bool = false
@Published private(set) var track: Track?

private var audioPlayer: AVAudioPlayer?
private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)

func play(_ track: Track) {
processingQueue.addTaskOperation { @MainActor [weak self] in
guard
let self,
self.track != track,
let url = Bundle.main.url(forResource: track.rawValue, withExtension: track.fileExtension),
let audioPlayer = try? AVAudioPlayer(contentsOf: url)
else {
return
}

self.audioPlayer = audioPlayer
audioPlayer.play()
audioPlayer.numberOfLoops = 1000
self.track = track
self.isPlaying = true
}
}

func stop() {
processingQueue.addTaskOperation { @MainActor [weak self] in
guard
let self
else {
return
}

audioPlayer?.stop()
audioPlayer = nil
isPlaying = false
track = nil
}
}

// MARK: - AVAudioPlayerDelegate

func audioPlayerDidFinishPlaying(
_ player: AVAudioPlayer,
successfully flag: Bool
) {
stop()
}
}

extension AudioTrackPlayer: InjectionKey {
static var currentValue: AudioTrackPlayer = .init()
}

extension InjectedValues {
var audioPlayer: AudioTrackPlayer {
get { Self[AudioTrackPlayer.self] }
set { _ = newValue }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,29 @@ struct DemoBroadcastMoreControlsListButtonView: View {

@ViewBuilder
private var inAppScreenshareButtonView: some View {
DemoMoreControlListButtonView(
action: {
viewModel.startScreensharing(type: .inApp)
Menu {
Button {
viewModel.startScreensharing(type: .inApp, includeAudio: false)
selection = .inApp
},
label: "Screenshare"
) {
Image(systemName: "record.circle")
.foregroundColor(appearance.colors.text)
} label: {
Text("Without audio")
}

Button {
viewModel.startScreensharing(type: .inApp, includeAudio: true)
selection = .inApp
} label: {
Text("With audio")
}

} label: {
DemoMoreControlListButtonView(
action: {},
label: "Screenshare"
) {
Image(systemName: "record.circle")
.foregroundColor(appearance.colors.text)
}
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
//
// Copyright © 2025 Stream.io Inc. All rights reserved.
//

import Foundation
import StreamVideo
import SwiftUI

struct DemoAudioTrackButtonView: View {
@Injected(\.audioPlayer) var audioPlayer: AudioTrackPlayer

@State private var isPlaying: Bool = AudioTrackPlayer.currentValue.isPlaying
@State private var track: AudioTrackPlayer.Track? = AudioTrackPlayer.currentValue.track

var body: some View {
Menu {
Button {
audioPlayer.stop()
} label: {
Label {
Text("None")
} icon: {
if track == nil {
Image(systemName: "checkmark")
}
}
}

Divider()

ForEach(AudioTrackPlayer.Track.allCases, id: \.self) { track in
Button {
if self.track == track {
audioPlayer.stop()
} else {
audioPlayer.play(track)
}
} label: {
Label {
Text(track.rawValue)
} icon: {
if self.track == track {
Image(systemName: "checkmark")
}
}
}
}
} label: {
DemoMoreControlListButtonView(
action: {},
label: "In-App audio"
) {
Image(
systemName: isPlaying ? "pause.circle" : "play.circle"
)
}
}
.onReceive(audioPlayer.$isPlaying.receive(on: DispatchQueue.main)) { isPlaying = $0 }
.onReceive(audioPlayer.$track.receive(on: DispatchQueue.main)) { track = $0 }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ struct DemoMoreControlsViewModifier: ViewModifier {
VStack {
Divider()

DemoAudioTrackButtonView()

DemoMoreLogsAndGleapButtonView()

DemoBroadcastMoreControlsListButtonView(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@ private func content() {
}
}

asyncContainer {
Task {
let call = streamVideo.call(callType: "default", callId: "123")
try await call.join()
try await call.startScreensharing(type: .inApp, includeAudio: true)
}
}

asyncContainer {
Task {
try await call.stopScreensharing()
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
<a href="https://swift.org"><img src="https://img.shields.io/badge/Swift-5.9%2B-orange.svg" /></a>
</p>
<p align="center">
<img id="stream-video-label" alt="StreamVideo" src="https://img.shields.io/badge/StreamVideo-9.0%20MB-blue"/>
<img id="stream-video-label" alt="StreamVideo" src="https://img.shields.io/badge/StreamVideo-9.07%20MB-blue"/>
<img id="stream-video-swiftui-label" alt="StreamVideoSwiftUI" src="https://img.shields.io/badge/StreamVideoSwiftUI-2.38%20MB-blue"/>
<img id="stream-video-uikit-label" alt="StreamVideoUIKit" src="https://img.shields.io/badge/StreamVideoUIKit-2.5%20MB-blue"/>
<img id="stream-web-rtc-label" alt="StreamWebRTC" src="https://img.shields.io/badge/StreamWebRTC-11.02%20MB-blue"/>
Expand Down
15 changes: 12 additions & 3 deletions Sources/StreamVideo/Call.swift
Original file line number Diff line number Diff line change
Expand Up @@ -546,9 +546,18 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
}

/// Starts screensharing from the device.
/// - Parameter type: The screensharing type (in-app or broadcasting).
public func startScreensharing(type: ScreensharingType) async throws {
try await callController.startScreensharing(type: type)
/// - Parameters:
/// - type: The screensharing type (in-app or broadcasting).
/// - includeAudio: Whether to capture app audio during screensharing.
/// Only valid for `.inApp`; ignored otherwise.
public func startScreensharing(
type: ScreensharingType,
includeAudio: Bool = true
) async throws {
try await callController.startScreensharing(
type: type,
includeAudio: includeAudio
)
}

/// Stops screensharing from the current device.
Expand Down
15 changes: 13 additions & 2 deletions Sources/StreamVideo/Controllers/CallController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -211,8 +211,19 @@ class CallController: @unchecked Sendable {
}
}

func startScreensharing(type: ScreensharingType) async throws {
try await webRTCCoordinator.startScreensharing(type: type)
/// Starts screensharing for the current call.
/// - Parameters:
/// - type: The screensharing type (in-app or broadcasting).
/// - includeAudio: Whether to capture app audio during screensharing.
/// Only valid for `.inApp`; ignored otherwise.
func startScreensharing(
type: ScreensharingType,
includeAudio: Bool
) async throws {
try await webRTCCoordinator.startScreensharing(
type: type,
includeAudio: includeAudio
)
}

func stopScreensharing() async throws {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import Foundation

extension SystemEnvironment {
/// A Stream Video version.
public static let version: String = "1.38.2"
public static let version: String = "1.39.0"
/// The WebRTC version.
public static let webRTCVersion: String = "137.0.54"
}
2 changes: 1 addition & 1 deletion Sources/StreamVideo/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<key>CFBundlePackageType</key>
<string>$(PRODUCT_BUNDLE_PACKAGE_TYPE)</string>
<key>CFBundleShortVersionString</key>
<string>1.38.2</string>
<string>1.39.0</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
<key>NSHumanReadableCopyright</key>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,14 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
}
}

private struct AudioBufferInjectionPreState {
var isAdvancedDuckingEnabled: Bool
var duckingLevel: Int
var isVoiceProcessingBypassed: Bool
var isVoiceProcessingEnabled: Bool
var isVoiceProcessingAGCEnabled: Bool
}

/// Tracks whether WebRTC is currently playing back audio.
private let isPlayingSubject: CurrentValueSubject<Bool, Never>
/// `true` while audio playout is active.
Expand Down Expand Up @@ -168,6 +176,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable

/// Strong reference to the current engine so we can introspect it if needed.
private var engine: AVAudioEngine?
@Atomic private var engineInputContext: AVAudioEngine.InputContext? {
didSet { audioBufferRenderer.configure(with: engineInputContext) }
}

private let audioBufferRenderer: AudioBufferRenderer = .init()

private var preAudioBufferInjectionSnapshot: AudioBufferInjectionPreState?

/// Textual diagnostics for logging and debugging.
override var description: String {
Expand Down Expand Up @@ -317,6 +332,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
source.refreshStereoPlayoutState()
}

// MARK: - Audio Buffer injection

/// Enqueues a screen share audio sample buffer for playback.
func enqueue(_ sampleBuffer: CMSampleBuffer) {
audioBufferRenderer.enqueue(sampleBuffer)
}

// MARK: - RTCAudioDeviceModuleDelegate

/// Receives speech activity notifications emitted by WebRTC VAD.
Expand Down Expand Up @@ -403,6 +425,7 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
)
isPlayingSubject.send(isPlayoutEnabled)
isRecordingSubject.send(isRecordingEnabled)
audioBufferRenderer.reset()
return Constant.successResult
}

Expand All @@ -423,6 +446,8 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
)
isPlayingSubject.send(isPlayoutEnabled)
isRecordingSubject.send(isRecordingEnabled)
audioBufferRenderer.reset()
engineInputContext = nil
return Constant.successResult
}

Expand All @@ -434,6 +459,8 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
self.engine = nil
subject.send(.willReleaseAudioEngine(engine))
audioLevelsAdapter.uninstall(on: 0)
audioBufferRenderer.reset()
engineInputContext = nil
return Constant.successResult
}

Expand All @@ -447,6 +474,13 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
format: AVAudioFormat,
context: [AnyHashable: Any]
) -> Int {
engineInputContext = .init(
engine: engine,
source: source,
destination: destination,
format: format
)

subject.send(
.configureInputFromSource(
engine,
Expand All @@ -455,12 +489,14 @@ final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable
format: format
)
)

audioLevelsAdapter.installInputTap(
on: destination,
format: format,
bus: 0,
bufferSize: 1024
)

return Constant.successResult
}

Expand Down
Loading
Loading