Skip to content

Commit b569bf0

Browse files
committed
Implement an IOS native (Objective-C) plugin for reading and setting IOS AudioSession configuration + add a temporary debug option to print the current settings to the console
1 parent e289d9a commit b569bf0

File tree

11 files changed

+1299
-209
lines changed

11 files changed

+1299
-209
lines changed

Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/MainScene.unity

Lines changed: 537 additions & 209 deletions
Large diffs are not rendered by default.

Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Plugins.meta

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Plugins/iOS.meta

Lines changed: 8 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
#import <Foundation/Foundation.h>
2+
3+
NS_ASSUME_NONNULL_BEGIN
4+
5+
@interface AudioSessionMonitor : NSObject
6+
7+
- (NSDictionary *)getCurrentAudioSettings;
8+
- (void)startMonitoring;
9+
- (void)stopMonitoring;
10+
11+
//#ifdev __cplusplus
12+
extern "C" {
13+
//#endif
14+
15+
const char* AudioMonitor_GetCurrentSettings();
16+
void AudioMonitor_StartMonitoring();
17+
void AudioMonitor_StopMonitoring();
18+
void AudioMonitor_StopMonitoring();
19+
void AudioMonitor_PrepareAudioSessionForRecording();
20+
21+
//#ifdev __cplusplus
22+
}
23+
//#endif
24+
25+
26+
@end
27+
28+
NS_ASSUME_NONNULL_END

Assets/Samples/Stream Video & Audio Chat SDK/0.7.0/Video & Audio Chat Example Project/Plugins/iOS/AudioSessionMonitor.h.meta

Lines changed: 80 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 265 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,265 @@
1+
#import "AudioSessionMonitor.h"
2+
#import <AVFoundation/AVFoundation.h>
3+
4+
static AudioSessionMonitor *sharedInstance = nil;
5+
6+
@interface AudioSessionMonitor()
7+
8+
@property (nonatomic, strong) NSNotificationCenter *notificationCenter;
9+
10+
@end
11+
12+
// Silence missing UnitySendMessage implementation
13+
#if !defined(UnitySendMessage)
14+
void UnitySendMessage(const char* gameObjectName, const char* methodName, const char* message);
15+
#endif
16+
17+
@implementation AudioSessionMonitor
18+
19+
+ (instancetype)sharedInstance {
20+
static dispatch_once_t onceToken;
21+
dispatch_once(&onceToken, ^{
22+
sharedInstance = [[self alloc] init];
23+
});
24+
return sharedInstance;
25+
}
26+
27+
- (instancetype)init {
28+
self = [super init];
29+
if (self) {
30+
_notificationCenter = [NSNotificationCenter defaultCenter];
31+
}
32+
return self;
33+
}
34+
35+
- (void)prepareAudioSessionForRecording {
36+
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
37+
NSError *error = nil;
38+
39+
// Set category
40+
AVAudioSessionCategoryOptions options = AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetooth;
41+
42+
if(@available(iOS 10.0, *)){
43+
options |= AVAudioSessionCategoryOptionAllowBluetoothA2DP;
44+
}
45+
46+
BOOL success = [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:options error:&error];
47+
48+
if(!success){
49+
NSLog(@"Error setting audio session category: %@", error.localizedDescription);
50+
}
51+
52+
// Set audio session mode
53+
if(@available(iOS 5.0, *)){
54+
success = [audioSession setMode:AVAudioSessionModeVideoChat error:&error];
55+
56+
if(!success){
57+
NSLog(@"Error setting audio session category: %@", error.localizedDescription);
58+
}
59+
}
60+
61+
// Set preferred sample rate and buffer duration
62+
if(@available(iOS 6.0, *)) {
63+
success = [audioSession setPreferredSampleRate:16000 error:&error];
64+
65+
if(!success){
66+
NSLog(@"Error setting audio session category: %@", error.localizedDescription);
67+
}
68+
69+
success = [audioSession setPreferredIOBufferDuration:0.01 error:&error];
70+
71+
if(!success){
72+
NSLog(@"Error setting audio session category: %@", error.localizedDescription);
73+
}
74+
}
75+
76+
// Activate the audio session
77+
success = [audioSession setActive:YES error:&error];
78+
if(!success) {
79+
NSLog(@"Error setting audio session category: %@", error.localizedDescription);
80+
} else {
81+
NSLog(@"Audio Session prepared sucessfully for recording with low latency.");
82+
}
83+
84+
}
85+
86+
87+
- (NSDictionary *)getCurrentAudioSettings {
88+
AVAudioSession *session = [AVAudioSession sharedInstance];
89+
NSMutableDictionary *settings = [NSMutableDictionary dictionary];
90+
91+
@try {
92+
// Basic session properties
93+
settings[@"category"] = session.category ?: @"Unknown";
94+
settings[@"mode"] = session.mode ?: @"Unknown";
95+
96+
// Category options
97+
AVAudioSessionCategoryOptions options = session.categoryOptions;
98+
settings[@"categoryOptions"] = @{
99+
@"allowBluetooth": @(((options & AVAudioSessionCategoryOptionAllowBluetooth) != 0)),
100+
@"allowBluetoothA2DP": @(((options & AVAudioSessionCategoryOptionAllowBluetoothA2DP) != 0)),
101+
@"allowAirPlay": @(((options & AVAudioSessionCategoryOptionAllowAirPlay) != 0)),
102+
@"defaultToSpeaker": @(((options & AVAudioSessionCategoryOptionDefaultToSpeaker) != 0)),
103+
@"mixWithOthers": @(((options & AVAudioSessionCategoryOptionMixWithOthers) != 0)),
104+
@"interruptSpokenAudioAndMixWithOthers": @(((options & AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers) != 0)),
105+
};
106+
107+
// Routing information
108+
AVAudioSessionRouteDescription *route = session.currentRoute;
109+
NSMutableArray *inputs = [NSMutableArray array];
110+
NSMutableArray *outputs = [NSMutableArray array];
111+
112+
for (AVAudioSessionPortDescription *port in route.inputs) {
113+
[inputs addObject:@{
114+
@"portType": port.portType,
115+
@"portName": port.portName,
116+
@"channels": @(port.channels.count),
117+
//@"isBuiltIn": @(port.isBuiltInMicrophone) <- prop doesn't exit
118+
}];
119+
}
120+
121+
for (AVAudioSessionPortDescription *port in route.outputs) {
122+
[outputs addObject:@{
123+
@"portType": port.portType,
124+
@"portName": port.portName,
125+
@"channels": @(port.channels.count),
126+
}];
127+
}
128+
129+
settings[@"routing"] = @{
130+
@"inputs": inputs,
131+
@"outputs": outputs
132+
};
133+
134+
// Technical settings
135+
settings[@"sampleRate"] = @{
136+
@"preffered": @(session.preferredSampleRate),
137+
@"current": @(session.sampleRate)
138+
};
139+
140+
settings[@"IOBufferDuration"] = @{
141+
@"preffered": @(session.preferredIOBufferDuration),
142+
@"current": @(session.IOBufferDuration)
143+
};
144+
145+
settings[@"latency"] = @{
146+
@"input": @(session.inputLatency),
147+
@"output": @(session.outputLatency)
148+
};
149+
150+
// Hardware status
151+
settings[@"hardware"] = @{
152+
@"inputAvailable": @(session.inputAvailable),
153+
@"otherAudioPlaying": @(session.otherAudioPlaying),
154+
@"inputGain": @(session.inputGain),
155+
@"outputVolume": @(session.outputVolume),
156+
};
157+
158+
159+
} @catch (NSException *exception) {
160+
NSLog(@"Error getting audio settings: %@", exception);
161+
settings[@"error"] = exception.description;
162+
}
163+
164+
return settings;
165+
}
166+
167+
- (void)startMonitoring{
168+
[self registerNotifications];
169+
}
170+
171+
- (void)stopMonitoring{
172+
[self unregisterNotifications];
173+
}
174+
175+
- (void)registerNotifications{
176+
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
177+
178+
[center addObserverForName:AVAudioSessionRouteChangeNotification
179+
object:nil
180+
queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *notification) {
181+
NSDictionary *settings = [self getCurrentAudioSettings];
182+
NSLog(@"Audio Route Changed: %@", settings);
183+
184+
NSDictionary *eventInfo = @{
185+
@"type": @"routeChange",
186+
@"settings": settings
187+
};
188+
189+
// To JSON and send to Unity
190+
NSError *error;
191+
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:eventInfo options:0 error:&error];
192+
if(jsonData){
193+
NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
194+
UnitySendMessage("AudioMonitor", "OnAudioSessionEvent", [jsonString UTF8String]);
195+
}
196+
}];
197+
198+
[center addObserverForName:AVAudioSessionInterruptionNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *notification) {
199+
NSInteger type = [notification.userInfo[AVAudioSessionInterruptionTypeKey] integerValue];
200+
NSLog(@"Audio Session Interrupted: %ld", (long)type);
201+
202+
NSDictionary *eventInfo = @{
203+
@"type": @"interruption",
204+
};
205+
206+
// To JSON and send to Unity
207+
NSError *error;
208+
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:eventInfo options:0 error:&error];
209+
if(jsonData){
210+
NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
211+
UnitySendMessage("AudioMonitor", "OnAudioSessionEvent", [jsonString UTF8String]);
212+
}
213+
}];
214+
215+
[center addObserverForName:AVAudioSessionMediaServicesWereResetNotification object:nil queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *notification) {
216+
NSLog(@"Audio Session Media Services Reset");
217+
218+
NSDictionary *eventInfo = @{
219+
@"type": @"reset",
220+
};
221+
222+
// To JSON and send to Unity
223+
NSError *error;
224+
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:eventInfo options:0 error:&error];
225+
if(jsonData){
226+
NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
227+
UnitySendMessage("AudioMonitor", "OnAudioSessionEvent", [jsonString UTF8String]);
228+
}
229+
}];
230+
}
231+
232+
- (void)unregisterNotifications {
233+
[self.notificationCenter removeObserver:self];
234+
}
235+
236+
@end
237+
238+
239+
// Unity C interface implementation
240+
const char* AudioMonitor_GetCurrentSettings(){
241+
NSDictionary *settings = [[AudioSessionMonitor sharedInstance] getCurrentAudioSettings];
242+
NSError *error;
243+
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:settings options:0 error:&error];
244+
245+
if(jsonData){
246+
NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding];
247+
248+
return strdup([jsonString UTF8String]);
249+
}
250+
251+
return strdup("{}");
252+
}
253+
254+
void AudioMonitor_StartMonitoring(){
255+
[[AudioSessionMonitor sharedInstance] startMonitoring];
256+
}
257+
258+
void AudioMonitor_StopMonitoring(){
259+
[[AudioSessionMonitor sharedInstance] stopMonitoring];
260+
}
261+
262+
void AudioMonitor_PrepareAudioSessionForRecording(){
263+
[[AudioSessionMonitor sharedInstance] prepareAudioSessionForRecording];
264+
}
265+

0 commit comments

Comments
 (0)