1515import AVFoundation
1616
1717/// Controls audio playback and recording.
18- class AudioController {
18+ actor AudioController {
1919 /// Data processed from the microphone.
2020 private let microphoneData : AsyncStream < AVAudioPCMBuffer >
2121 private let microphoneDataQueue : AsyncStream < AVAudioPCMBuffer > . Continuation
2222 private var audioPlayer : AudioPlayer ?
2323 private var audioEngine : AVAudioEngine ?
2424 private var microphone : Microphone ?
2525 private var listenTask : Task < Void , Never > ?
26+ private var routeTask : Task < Void , Never > ?
2627
2728 /// Port types that are considered "headphones" for our use-case.
2829 ///
@@ -40,7 +41,7 @@ class AudioController {
4041
4142 private var stopped = false
4243
43- public init ( ) throws {
44+ public init ( ) async throws {
4445 let session = AVAudioSession . sharedInstance ( )
4546 try session. setCategory (
4647 . playAndRecord,
@@ -80,7 +81,25 @@ class AudioController {
8081 }
8182
8283 deinit {
83- stop ( )
84+ stopped = true
85+ listenTask? . cancel ( )
86+ // audio engine needs to be stopped before disconnecting nodes
87+ audioEngine? . pause ( )
88+ audioEngine? . stop ( )
89+ if let audioEngine {
90+ do {
91+ // the VP IO leaves behind artifacts, so we need to disable it to properly clean up
92+ if audioEngine. inputNode. isVoiceProcessingEnabled {
93+ try audioEngine. inputNode. setVoiceProcessingEnabled ( false )
94+ }
95+ } catch {
96+ print ( " Failed to disable voice processing: \( error. localizedDescription) " )
97+ }
98+ }
99+ microphone? . stop ( )
100+ audioPlayer? . stop ( )
101+ microphoneDataQueue. finish ( )
102+ routeTask? . cancel ( )
84103 }
85104
86105 /// Kicks off audio processing, and returns a stream of recorded microphone audio data.
@@ -96,11 +115,7 @@ class AudioController {
96115 stopped = true
97116 stopListeningAndPlayback ( )
98117 microphoneDataQueue. finish ( )
99- NotificationCenter . default. removeObserver (
100- self ,
101- name: AVAudioSession . routeChangeNotification,
102- object: nil
103- )
118+ routeTask? . cancel ( )
104119 }
105120
106121 /// Queues audio for playback.
@@ -206,15 +221,17 @@ class AudioController {
206221
207222 /// When the output device changes, ensure the audio playback and recording classes are properly restarted.
208223 private func listenForRouteChange( ) {
209- NotificationCenter . default. addObserver (
210- self ,
211- selector: #selector( handleRouteChange) ,
212- name: AVAudioSession . routeChangeNotification,
213- object: nil
214- )
224+ routeTask? . cancel ( )
225+ routeTask = Task { [ weak self] in
226+ for await notification in NotificationCenter . default. notifications (
227+ named: AVAudioSession . routeChangeNotification
228+ ) {
229+ await self ? . handleRouteChange ( notification: notification)
230+ }
231+ }
215232 }
216233
217- @ objc private func handleRouteChange( notification: Notification ) {
234+ private func handleRouteChange( notification: Notification ) {
218235 guard let userInfo = notification. userInfo,
219236 let reasonValue = userInfo [ AVAudioSessionRouteChangeReasonKey] as? UInt ,
220237 let reason = AVAudioSession . RouteChangeReason ( rawValue: reasonValue) else {
0 commit comments