diff --git a/.github/workflows/ios_spm.yaml b/.github/workflows/ios_spm.yaml new file mode 100644 index 0000000000..9b34c8f4e9 --- /dev/null +++ b/.github/workflows/ios_spm.yaml @@ -0,0 +1,56 @@ +name: iOS Build CI (CocoaPods + SPM) + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + build-ios: + runs-on: macos-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Flutter + uses: subosito/flutter-action@v2 + with: + channel: "stable" + + - name: Flutter pub get + run: flutter pub get + + # 🧪 Build iOS example using CocoaPods + - name: Build iOS example with CocoaPods + working-directory: example + run: | + echo "=== Building with CocoaPods ===" + cd ios + pod repo update + pod install + cd .. + flutter config --no-enable-swift-package-manager + flutter build ios --no-codesign --verbose + + # 🧹 Clean build + prepare for SPM test + - name: Clean and prepare for SPM test + working-directory: example + run: | + echo "=== Cleaning Pods ===" + rm -rf ios/Pods ios/Podfile.lock + flutter clean + + # 🧪 Step 7: Build iOS example using Swift Package Manager + - name: Build iOS example with Swift Package Manager + working-directory: example + run: | + echo "=== Building with SPM ===" + flutter config --enable-swift-package-manager + flutter build ios --no-codesign --verbose + + # ✅ Step 8: Verify output + - name: Verify build artifacts + run: | + echo "✅ Both CocoaPods and SPM builds succeeded." diff --git a/.gitignore b/.gitignore index a92255e2b9..f1b13aff52 100644 --- a/.gitignore +++ b/.gitignore @@ -56,4 +56,8 @@ android/.settings/org.eclipse.buildship.core.prefs !webrtc.iml # vs -*.pdb \ No newline at end of file +*.pdb + +# Swift +.build/ +.swiftpm/ \ No newline at end of file diff --git a/Package.resolved b/Package.resolved new file mode 100644 index 0000000000..d8dfeaaf49 --- /dev/null +++ b/Package.resolved @@ -0,0 +1,14 @@ +{ + "pins" : [ + { + "identity" : "stream-video-swift-webrtc", + "kind" : "remoteSourceControl", + "location" : "https://github.com/GetStream/stream-video-swift-webrtc.git", + "state" : { + "revision" : "4d58b1cdc35e2689968579078555779f2bb3a405", + "version" : "125.6422.70" + } + } + ], + "version" : 2 +} diff --git a/Package.swift b/Package.swift new file mode 100644 index 0000000000..3d3768f26a --- /dev/null +++ b/Package.swift @@ -0,0 +1,31 @@ +// swift-tools-version:5.9 +import PackageDescription + +let package = Package( + name: "stream_webrtc_flutter", + platforms: [ + .iOS("13.0") // update as needed + ], + products: [ + .library(name: "stream-webrtc-flutter", type: .static, targets: ["stream_webrtc_flutter"]) + ], + dependencies: [ + .package( + url: "https://github.com/GetStream/stream-video-swift-webrtc.git", exact: "125.6422.070" + ) + ], + targets: [ + .target( + name: "stream_webrtc_flutter", + dependencies: [ + .product(name: "StreamWebRTC", package: "stream-video-swift-webrtc") + ], + path: "ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter", + resources: [ + // If you have PrivacyInfo.xcprivacy or other resources: + // .process("PrivacyInfo.xcprivacy") + ], + publicHeadersPath: "include" + ) + ] +) diff --git a/example/ios/Runner.xcodeproj/project.pbxproj b/example/ios/Runner.xcodeproj/project.pbxproj index 3bbe384c33..66ff5cc614 100644 --- a/example/ios/Runner.xcodeproj/project.pbxproj +++ b/example/ios/Runner.xcodeproj/project.pbxproj @@ -17,6 +17,7 @@ 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */ = {isa = PBXBuildFile; productRef = 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -67,6 +68,7 @@ BA896DE0E3457D7F8E7B874C /* Pods-RunnerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.release.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.release.xcconfig"; sourceTree = ""; }; EBDE73226368E7C4C301B721 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; F5CFB2CC6A32D774511849CE /* Pods-RunnerTests.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-RunnerTests.profile.xcconfig"; path = "Target Support Files/Pods-RunnerTests/Pods-RunnerTests.profile.xcconfig"; sourceTree = ""; }; + 78E0A7A72DC9AD7400C4905E /* FlutterGeneratedPluginSwiftPackage */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = FlutterGeneratedPluginSwiftPackage; path = Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -82,6 +84,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 78A318202AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage in Frameworks */, 5CDFF0275A08E50A6164A3F8 /* libPods-Runner.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -100,6 +103,7 @@ 9740EEB11CF90186004384FC /* Flutter */ = { isa = PBXGroup; children = ( + 78E0A7A72DC9AD7400C4905E /* FlutterGeneratedPluginSwiftPackage */, 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, 9740EEB21CF90195004384FC /* Debug.xcconfig */, 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, @@ -199,6 +203,9 @@ productType = "com.apple.product-type.bundle.unit-test"; }; 97C146ED1CF9000F007C117D /* Runner */ = { + packageProductDependencies = ( + 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */, + ); isa = PBXNativeTarget; buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; buildPhases = ( @@ -225,6 +232,9 @@ /* Begin PBXProject section */ 97C146E61CF9000F007C117D /* Project object */ = { + packageReferences = ( + 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage" */, + ); isa = PBXProject; attributes = { LastUpgradeCheck = 1510; @@ -481,7 +491,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -597,7 +607,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -646,7 +656,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -725,6 +735,18 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ +/* Begin XCLocalSwiftPackageReference section */ + 781AD8BC2B33823900A9FFBB /* XCLocalSwiftPackageReference "Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = Flutter/ephemeral/Packages/FlutterGeneratedPluginSwiftPackage; + }; +/* End XCLocalSwiftPackageReference section */ +/* Begin XCSwiftPackageProductDependency section */ + 78A3181F2AECB46A00862997 /* FlutterGeneratedPluginSwiftPackage */ = { + isa = XCSwiftPackageProductDependency; + productName = FlutterGeneratedPluginSwiftPackage; + }; +/* End XCSwiftPackageProductDependency section */ }; rootObject = 97C146E61CF9000F007C117D /* Project object */; } diff --git a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index 7bdc624433..1f70c354f7 100644 --- a/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -5,6 +5,24 @@ + + + + + + + + + + '../LICENSE' } s.author = { 'getstream.io' => 'support@getstream.io' } s.source = { :path => '.' } - s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' + s.source_files = 'stream_webrtc_flutter/Sources/stream_webrtc_flutter/**/*.{h,hpp,m,mm,c,cpp}' + s.public_header_files = 'stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/**/*.h' s.dependency 'Flutter' - s.dependency 'StreamWebRTC', '125.6422.070' s.ios.deployment_target = '13.0' s.static_framework = true + s.vendored_frameworks = 'Frameworks/StreamWebRTC.xcframework' + s.prepare_command = <<-CMD + mkdir -p Frameworks/ + curl -sL "https://github.com/GetStream/stream-video-swift-webrtc/releases/download/125.6422.070/StreamWebRTC.xcframework.zip" -o Frameworks/StreamWebRTC.zip + unzip -o Frameworks/StreamWebRTC.zip -d Frameworks/ + rm Frameworks/StreamWebRTC.zip + CMD s.pod_target_xcconfig = { 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', 'USER_HEADER_SEARCH_PATHS' => 'Classes/**/*.h' diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioManager.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioManager.m new file mode 100644 index 0000000000..86a5a05104 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioManager.m @@ -0,0 +1,50 @@ +#import "./include/stream_webrtc_flutter/AudioManager.h" +#import "./include/stream_webrtc_flutter/AudioProcessingAdapter.h" + +@implementation AudioManager { + RTCDefaultAudioProcessingModule* _audioProcessingModule; + AudioProcessingAdapter* _capturePostProcessingAdapter; + AudioProcessingAdapter* _renderPreProcessingAdapter; +} + +@synthesize capturePostProcessingAdapter = _capturePostProcessingAdapter; +@synthesize renderPreProcessingAdapter = _renderPreProcessingAdapter; +@synthesize audioProcessingModule = _audioProcessingModule; + ++ (instancetype)sharedInstance { + static dispatch_once_t onceToken; + static AudioManager* sharedInstance = nil; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + if (self = [super init]) { + _audioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _capturePostProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _renderPreProcessingAdapter = [[AudioProcessingAdapter alloc] init]; + _audioProcessingModule.capturePostProcessingDelegate = _capturePostProcessingAdapter; + _audioProcessingModule.renderPreProcessingDelegate = _renderPreProcessingAdapter; + } + return self; +} + +- (void)addLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter addAudioRenderer:renderer]; +} + +- (void)removeLocalAudioRenderer:(nonnull id)renderer { + [_capturePostProcessingAdapter removeAudioRenderer:renderer]; +} + +- (void)addRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter addAudioRenderer:sink]; +} + +- (void)removeRemoteAudioSink:(nonnull id)sink { + [_renderPreProcessingAdapter removeAudioRenderer:sink]; +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioProcessingAdapter.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioProcessingAdapter.m new file mode 100644 index 0000000000..ac898aa760 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioProcessingAdapter.m @@ -0,0 +1,105 @@ +#import +#import +#import "./include/stream_webrtc_flutter/AudioProcessingAdapter.h" + +@implementation AudioProcessingAdapter { + NSMutableArray>* _renderers; + NSMutableArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)init { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [[NSMutableArray> alloc] init]; + _processors = [[NSMutableArray> alloc] init]; + } + return self; +} + +- (void)addProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + [_processors addObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id _Nonnull)processor { + os_unfair_lock_lock(&_lock); + _processors = [[_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)addAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeAudioRenderer:(nonnull id)renderer { + os_unfair_lock_lock(&_lock); + _renderers = [[_renderers + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != renderer; + }]] mutableCopy]; + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingInitializeWithSampleRate:sampleRateHz channels:channels]; + } + os_unfair_lock_unlock(&_lock); +} + +- (AVAudioPCMBuffer*)toPCMBuffer:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + AVAudioFormat* format = + [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16 + sampleRate:audioBuffer.frames * 100.0 + channels:(AVAudioChannelCount)audioBuffer.channels + interleaved:NO]; + AVAudioPCMBuffer* pcmBuffer = + [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:(AVAudioFrameCount)audioBuffer.frames]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return nil; + } + pcmBuffer.frameLength = (AVAudioFrameCount)audioBuffer.frames; + for (int i = 0; i < audioBuffer.channels; i++) { + float* sourceBuffer = [audioBuffer rawBufferForChannel:i]; + int16_t* targetBuffer = (int16_t*)pcmBuffer.int16ChannelData[i]; + for (int frame = 0; frame < audioBuffer.frames; frame++) { + targetBuffer[frame] = sourceBuffer[frame]; + } + } + return pcmBuffer; +} + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingProcess:audioBuffer]; + } + + for (id renderer in _renderers) { + [renderer renderPCMBuffer:[self toPCMBuffer:audioBuffer]]; + } + os_unfair_lock_unlock(&_lock); +} + +- (void)audioProcessingRelease { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + [processor audioProcessingRelease]; + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioUtils.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioUtils.m new file mode 100644 index 0000000000..6fc47bebaf --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/AudioUtils.m @@ -0,0 +1,229 @@ +#if TARGET_OS_IPHONE +#import +#import "./include/stream_webrtc_flutter/AudioUtils.h" + +@implementation AudioUtils + ++ (void)ensureAudioSessionWithRecording:(BOOL)recording { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + // we also need to set default WebRTC audio configuration, since it may be activated after + // this method is called + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + // require audio session to be either PlayAndRecord or MultiRoute + if (recording && session.category != AVAudioSessionCategoryPlayAndRecord && + session.category != AVAudioSessionCategoryMultiRoute) { + config.category = AVAudioSessionCategoryPlayAndRecord; + config.categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowAirPlay; + + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setCategory:config.category + withOptions:config.categoryOptions + error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setCategory failed due to: %@", error); + success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[true]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } else if (!recording && (session.category == AVAudioSessionCategoryAmbient || + session.category == AVAudioSessionCategorySoloAmbient)) { + config.mode = AVAudioSessionModeDefault; + [session lockForConfiguration]; + NSError* error = nil; + bool success = [session setMode:config.mode error:&error]; + if (!success) + NSLog(@"ensureAudioSessionWithRecording[false]: setMode failed due to: %@", error); + [session unlockForConfiguration]; + } +} + ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type { + RTCAudioSession* rtcSession = [RTCAudioSession sharedInstance]; + AVAudioSessionPortDescription* inputPort = nil; + for (AVAudioSessionPortDescription* port in rtcSession.session.availableInputs) { + if ([port.portType isEqualToString:type]) { + inputPort = port; + break; + } + } + if (inputPort != nil) { + NSError* errOut = nil; + [rtcSession lockForConfiguration]; + [rtcSession setPreferredInput:inputPort error:&errOut]; + [rtcSession unlockForConfiguration]; + if (errOut != nil) { + return NO; + } + return YES; + } + return NO; +} + ++ (void)setSpeakerphoneOn:(BOOL)enable { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + if (enable && config.category != AVAudioSessionCategoryPlayAndRecord) { + NSLog(@"setSpeakerphoneOn: Category option 'defaultToSpeaker' is only applicable with category " + @"'playAndRecord', ignore."); + return; + } + + [session lockForConfiguration]; + NSError* error = nil; + if (!enable) { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } else { + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | + AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionProperty_OverrideAudioRoute + error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOn: Port override failed due to: %@", error); + } + [session unlockForConfiguration]; +} + ++ (void)setSpeakerphoneOnButPreferBluetooth { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + [session lockForConfiguration]; + NSError* error = nil; + [session setMode:config.mode error:&error]; + BOOL success = [session setCategory:config.category + withOptions:AVAudioSessionCategoryOptionAllowAirPlay | + AVAudioSessionCategoryOptionAllowBluetoothA2DP | + AVAudioSessionCategoryOptionAllowBluetooth | + AVAudioSessionCategoryOptionDefaultToSpeaker + error:&error]; + + success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Port override failed due to: %@", error); + + success = [session setActive:YES error:&error]; + if (!success) + NSLog(@"setSpeakerphoneOnButPreferBluetooth: Audio session override failed: %@", error); + else + NSLog(@"AudioSession override with bluetooth preference via " + @"setSpeakerphoneOnButPreferBluetooth successfull "); + [session unlockForConfiguration]; +} + ++ (void)deactiveRtcAudioSession { + NSError* error = nil; + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + [session lockForConfiguration]; + if ([session isActive]) { + BOOL success = [session setActive:NO error:&error]; + if (!success) + NSLog(@"RTC Audio session deactive failed: %@", error); + else + NSLog(@"RTC AudioSession deactive is successful "); + } + [session unlockForConfiguration]; +} + ++ (AVAudioSessionMode)audioSessionModeFromString:(NSString*)mode { + if ([@"default_" isEqualToString:mode]) { + return AVAudioSessionModeDefault; + } else if ([@"voicePrompt" isEqualToString:mode]) { + return AVAudioSessionModeVoicePrompt; + } else if ([@"videoRecording" isEqualToString:mode]) { + return AVAudioSessionModeVideoRecording; + } else if ([@"videoChat" isEqualToString:mode]) { + return AVAudioSessionModeVideoChat; + } else if ([@"voiceChat" isEqualToString:mode]) { + return AVAudioSessionModeVoiceChat; + } else if ([@"gameChat" isEqualToString:mode]) { + return AVAudioSessionModeGameChat; + } else if ([@"measurement" isEqualToString:mode]) { + return AVAudioSessionModeMeasurement; + } else if ([@"moviePlayback" isEqualToString:mode]) { + return AVAudioSessionModeMoviePlayback; + } else if ([@"spokenAudio" isEqualToString:mode]) { + return AVAudioSessionModeSpokenAudio; + } + return AVAudioSessionModeDefault; +} + ++ (AVAudioSessionCategory)audioSessionCategoryFromString:(NSString*)category { + if ([@"ambient" isEqualToString:category]) { + return AVAudioSessionCategoryAmbient; + } else if ([@"soloAmbient" isEqualToString:category]) { + return AVAudioSessionCategorySoloAmbient; + } else if ([@"playback" isEqualToString:category]) { + return AVAudioSessionCategoryPlayback; + } else if ([@"record" isEqualToString:category]) { + return AVAudioSessionCategoryRecord; + } else if ([@"playAndRecord" isEqualToString:category]) { + return AVAudioSessionCategoryPlayAndRecord; + } else if ([@"multiRoute" isEqualToString:category]) { + return AVAudioSessionCategoryMultiRoute; + } + return AVAudioSessionCategoryAmbient; +} + ++ (void)setAppleAudioConfiguration:(NSDictionary*)configuration { + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; + + NSString* appleAudioCategory = configuration[@"appleAudioCategory"]; + NSArray* appleAudioCategoryOptions = configuration[@"appleAudioCategoryOptions"]; + NSString* appleAudioMode = configuration[@"appleAudioMode"]; + + [session lockForConfiguration]; + + if (appleAudioCategoryOptions != nil) { + config.categoryOptions = 0; + for (NSString* option in appleAudioCategoryOptions) { + if ([@"mixWithOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; + } else if ([@"duckOthers" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDuckOthers; + } else if ([@"allowBluetooth" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetooth; + } else if ([@"allowBluetoothA2DP" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetoothA2DP; + } else if ([@"allowAirPlay" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionAllowAirPlay; + } else if ([@"defaultToSpeaker" isEqualToString:option]) { + config.categoryOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker; + } + } + } + + if (appleAudioCategory != nil) { + config.category = [AudioUtils audioSessionCategoryFromString:appleAudioCategory]; + [session setCategory:config.category withOptions:config.categoryOptions error:nil]; + } + + if (appleAudioMode != nil) { + config.mode = [AudioUtils audioSessionModeFromString:appleAudioMode]; + [session setMode:config.mode error:nil]; + } + + [session unlockForConfiguration]; +} + +@end +#endif diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.m similarity index 88% rename from ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.m index 147207a1eb..6fef27305d 100644 --- a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.m @@ -5,9 +5,9 @@ // Created by Alex-Dan Bumbu on 06/01/2021. // -#import "FlutterBroadcastScreenCapturer.h" -#import "FlutterSocketConnection.h" -#import "FlutterSocketConnectionFrameReader.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.h" NSString* const kRTCScreensharingSocketFD = @"rtc_SSFD"; NSString* const kRTCAppGroupIdentifier = @"RTCAppGroupIdentifier"; diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.m similarity index 98% rename from ios/Classes/Broadcast/FlutterSocketConnection.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.m index 4a0cbe3797..063c175a2c 100644 --- a/ios/Classes/Broadcast/FlutterSocketConnection.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.m @@ -8,7 +8,7 @@ #include #include -#import "FlutterSocketConnection.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.h" @interface FlutterSocketConnection () diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.m similarity index 96% rename from ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.m index 7485a3492a..1b0073c1d2 100644 --- a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.m @@ -8,11 +8,11 @@ #include #import -#import -#import +#import +#import -#import "FlutterSocketConnection.h" -#import "FlutterSocketConnectionFrameReader.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.h" +#import "../include/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.h" const NSUInteger kMaxReadLength = 10 * 1024; diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/CameraUtils.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/CameraUtils.m new file mode 100644 index 0000000000..9090802e33 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/CameraUtils.m @@ -0,0 +1,379 @@ +#import "./include/stream_webrtc_flutter/CameraUtils.h" + +@implementation FlutterWebRTCPlugin (CameraUtils) + +- (AVCaptureDevice*)currentDevice { + if (!self.videoCapturer) { + return nil; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + return nil; + } + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + return deviceInput.device; +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't check torch"); + result(@NO); + return; + } + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +#else + NSLog(@"Not supported on macOS. Can't check torch"); + result(@NO); +#endif +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" + message:@"device is nil" + details:nil]); + return; + } + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" + message:@"device does not support torch" + details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetTorchFailed" + message:error.localizedDescription + details:nil]); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" + message:@"device is nil" + details:nil]); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" + message:error.localizedDescription + details:nil]); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#else + NSLog(@"Not supported on macOS. Can't set zoom"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetZoomFailed" + message:@"Not supported on macOS" + details:nil]); +#endif +} + +- (void)applyFocusMode:(NSString*)focusMode onDevice:(AVCaptureDevice*)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if ([@"locked" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } else if ([@"auto" isEqualToString:focusMode]) { + if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; + } else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { + [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" + message:@"device is nil" + details:nil]); + return; + } + self.focusMode = focusMode; + [self applyFocusMode:focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusModeFailed" + message:@"Not supported on macOS" + details:nil]); +#endif +} + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" + message:@"device is nil" + details:nil]); + return; + } + BOOL reset = ((NSNumber*)focusPoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber*)focusPoint[@"x"]).doubleValue; + y = ((NSNumber*)focusPoint[@"y"]).doubleValue; + } + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" + message:@"Focus point of interest is not supported" + details:nil]); + return; + } + + if (!device.isFocusPointOfInterestSupported) { + NSLog(@"Focus point of interest is not supported. Can't set focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" + message:@"Focus point of interest is not supported" + details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + + [device setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation x:x y:y]]; + [device unlockForConfiguration]; + + [self applyFocusMode:self.focusMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't focusPoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetFocusPointFailed" + message:@"Not supported on macOS" + details:nil]); +#endif +} + +- (void)applyExposureMode:(NSString*)exposureMode onDevice:(AVCaptureDevice*)captureDevice { +#if TARGET_OS_IPHONE + [captureDevice lockForConfiguration:nil]; + if ([@"locked" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } else if ([@"auto" isEqualToString:exposureMode]) { + if ([captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) { + [captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure]; + } else if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) { + [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; + } + } + [captureDevice unlockForConfiguration]; +#endif +} + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + if (!device) { + NSLog(@"Video capturer is null. Can't set exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" + message:@"device is nil" + details:nil]); + return; + } + self.exposureMode = exposureMode; + [self applyExposureMode:exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposureMode"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposureModeFailed" + message:@"Not supported on macOS" + details:nil]); +#endif +} + +#if TARGET_OS_IPHONE +- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation + x:(double)x + y:(double)y { + double oldX = x, oldY = y; + switch (orientation) { + case UIDeviceOrientationPortrait: // 90 ccw + y = 1 - oldX; + x = oldY; + break; + case UIDeviceOrientationPortraitUpsideDown: // 90 cw + x = 1 - oldY; + y = oldX; + break; + case UIDeviceOrientationLandscapeRight: // 180 + x = 1 - x; + y = 1 - y; + break; + case UIDeviceOrientationLandscapeLeft: + default: + // No rotation required + break; + } + return CGPointMake(x, y); +} +#endif + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result { +#if TARGET_OS_IPHONE + AVCaptureDevice* device = [self currentDevice]; + + if (!device) { + NSLog(@"Video capturer is null. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" + message:@"device is nil" + details:nil]); + return; + } + + BOOL reset = ((NSNumber*)exposurePoint[@"reset"]).boolValue; + double x = 0.5; + double y = 0.5; + if (!reset) { + x = ((NSNumber*)exposurePoint[@"x"]).doubleValue; + y = ((NSNumber*)exposurePoint[@"y"]).doubleValue; + } + if (!device.isExposurePointOfInterestSupported) { + NSLog(@"Exposure point of interest is not supported. Can't set exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" + message:@"Exposure point of interest is not supported" + details:nil]); + return; + } + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + [device lockForConfiguration:nil]; + [device setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation x:x y:y]]; + [device unlockForConfiguration]; + + [self applyExposureMode:self.exposureMode onDevice:device]; + result(nil); +#else + NSLog(@"Not supported on macOS. Can't exposurePoint"); + result([FlutterError errorWithCode:@"mediaStreamTrackSetExposurePointFailed" + message:@"Not supported on macOS" + details:nil]); +#endif +} + +- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't switch camera"); + return; + } +#if TARGET_OS_IPHONE + [self.videoCapturer stopCapture]; +#endif + self._usingFrontCamera = !self._usingFrontCamera; + AVCaptureDevicePosition position = + self._usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack; + AVCaptureDevice* videoDevice = [self findDeviceForPosition:position]; + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:self._lastTargetWidth + targetHeight:self._lastTargetHeight]; + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:[self selectFpsForFormat:selectedFormat + targetFps:self._lastTargetFps] + completionHandler:^(NSError* error) { + if (error != nil) { + result([FlutterError errorWithCode:@"Error while switching camera" + message:@"Error while switching camera" + details:error]); + } else { + result([NSNumber numberWithBool:self._usingFrontCamera]); + } + }]; +} + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + return captureDevices[0]; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + // NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", + // format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRPScreenRecorder.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRPScreenRecorder.m new file mode 100644 index 0000000000..768abb4c58 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRPScreenRecorder.m @@ -0,0 +1,94 @@ +#import "./include/stream_webrtc_flutter/FlutterRPScreenRecorder.h" +#if TARGET_OS_IPHONE +#import + +// See: https://developer.apple.com/videos/play/wwdc2017/606/ + +@implementation FlutterRPScreenRecorder { + RPScreenRecorder* screenRecorder; + RTCVideoSource* source; +} + +- (instancetype)initWithDelegate:(__weak id)delegate { + source = delegate; + return [super initWithDelegate:delegate]; +} + +- (void)startCapture { + if (screenRecorder == NULL) + screenRecorder = [RPScreenRecorder sharedRecorder]; + + [screenRecorder setMicrophoneEnabled:NO]; + + if (![screenRecorder isAvailable]) { + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available!"); + return; + } + + if (@available(iOS 11.0, *)) { + [screenRecorder + startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, + RPSampleBufferType bufferType, NSError* _Nullable error) { + if (bufferType == RPSampleBufferTypeVideo) { // We want video only now + [self handleSourceBuffer:sampleBuffer sampleType:bufferType]; + } + } + completionHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCapture { + if (@available(iOS 11.0, *)) { + [screenRecorder stopCaptureWithHandler:^(NSError* _Nullable error) { + if (error != nil) + NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error); + }]; + } else { + // Fallback on earlier versions + NSLog(@"FlutterRPScreenRecorder.stopCapture: Screen recorder is not available in versions " + @"lower than iOS 11 !"); + } +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if (completionHandler != nil) { + completionHandler(); + } +} + +- (void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer + sampleType:(RPSampleBufferType)sampleType { + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || + !CMSampleBufferDataIsReady(sampleBuffer)) { + return; + } + + CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (pixelBuffer == nil) { + return; + } + + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + + [source adaptOutputFormatToWidth:(int)(width / 2) height:(int)(height / 2) fps:8]; + + RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer]; + int64_t timeStampNs = + CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC; + RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + [self.delegate capturer:self didCaptureVideoFrame:videoFrame]; +} + +@end +#endif diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCAudioSink.mm b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCAudioSink.mm new file mode 100644 index 0000000000..dfb300a01e --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCAudioSink.mm @@ -0,0 +1,75 @@ +#import +#import "./include/stream_webrtc_flutter/FlutterRTCAudioSink.h" +#import "./include/stream_webrtc_flutter/RTCAudioSource+Private.h" +#include "audio_sink_bridge.cpp" +#include "./include/stream_webrtc_flutter/media_stream_interface.h" + +@implementation FlutterRTCAudioSink { + AudioSinkBridge* _bridge; + webrtc::AudioSourceInterface* _audioSource; +} + +- (instancetype)initWithAudioTrack:(RTCAudioTrack*)audio { + self = [super init]; + rtc::scoped_refptr audioSourcePtr = audio.source.nativeAudioSource; + _audioSource = audioSourcePtr.get(); + _bridge = new AudioSinkBridge((void*)CFBridgingRetain(self)); + _audioSource->AddSink(_bridge); + return self; +} + +- (void)close { + _audioSource->RemoveSink(_bridge); + delete _bridge; + _bridge = nil; + _audioSource = nil; +} + +void RTCAudioSinkCallback(void* object, + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + AudioBufferList audioBufferList; + AudioBuffer audioBuffer; + audioBuffer.mData = (void*)audio_data; + audioBuffer.mDataByteSize = bits_per_sample / 8 * number_of_channels * number_of_frames; + audioBuffer.mNumberChannels = number_of_channels; + audioBufferList.mNumberBuffers = 1; + audioBufferList.mBuffers[0] = audioBuffer; + AudioStreamBasicDescription audioDescription; + audioDescription.mBytesPerFrame = bits_per_sample / 8 * number_of_channels; + audioDescription.mBitsPerChannel = bits_per_sample; + audioDescription.mBytesPerPacket = bits_per_sample / 8 * number_of_channels; + audioDescription.mChannelsPerFrame = number_of_channels; + audioDescription.mFormatID = kAudioFormatLinearPCM; + audioDescription.mFormatFlags = + kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + audioDescription.mFramesPerPacket = 1; + audioDescription.mReserved = 0; + audioDescription.mSampleRate = sample_rate; + CMAudioFormatDescriptionRef formatDesc; + CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &audioDescription, 0, nil, 0, nil, nil, + &formatDesc); + CMSampleBufferRef buffer; + CMSampleTimingInfo timing; + timing.decodeTimeStamp = kCMTimeInvalid; + timing.presentationTimeStamp = CMTimeMake(0, sample_rate); + timing.duration = CMTimeMake(1, sample_rate); + CMSampleBufferCreate(kCFAllocatorDefault, nil, false, nil, nil, formatDesc, + number_of_frames * number_of_channels, 1, &timing, 0, nil, &buffer); + CMSampleBufferSetDataBufferFromAudioBufferList(buffer, kCFAllocatorDefault, kCFAllocatorDefault, + 0, &audioBufferList); + @autoreleasepool { + FlutterRTCAudioSink* sink = (__bridge FlutterRTCAudioSink*)(object); + sink.format = formatDesc; + if (sink.bufferCallback != nil) { + sink.bufferCallback(buffer); + } else { + NSLog(@"Buffer callback is nil"); + } + } +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDataChannel.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDataChannel.m new file mode 100644 index 0000000000..08b93b981c --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDataChannel.m @@ -0,0 +1,219 @@ +#import +#import +#import "./include/stream_webrtc_flutter/FlutterRTCDataChannel.h" +#import "./include/stream_webrtc_flutter/FlutterRTCPeerConnection.h" + +@implementation RTCDataChannel (Flutter) + +- (NSString*)peerConnectionId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setPeerConnectionId:(NSString*)peerConnectionId { + objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSArray*)eventQueue { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventQueue:(NSArray*)eventQueue { + objc_setAssociatedObject(self, @selector(eventQueue), eventQueue, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSNumber*)flutterChannelId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterChannelId:(NSNumber*)flutterChannelId { + objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + NSEnumerator* enumerator = [self.eventQueue objectEnumerator]; + id event; + while ((event = enumerator.nextObject) != nil) { + postEvent(sink, event); + }; + self.eventQueue = nil; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(NSString*)label + config:(RTCDataChannelConfiguration*)config + messenger:(NSObject*)messenger + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = [peerConnection dataChannelForLabel:label configuration:config]; + + if (nil != dataChannel) { + dataChannel.peerConnectionId = peerConnectionId; + NSString* flutterId = [[NSUUID UUID] UUIDString]; + peerConnection.dataChannels[flutterId] = dataChannel; + dataChannel.flutterChannelId = flutterId; + dataChannel.delegate = self; + dataChannel.eventQueue = nil; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnectionId, flutterId] + binaryMessenger:messenger]; + + dataChannel.eventChannel = eventChannel; + [eventChannel setStreamHandler:dataChannel]; + + result(@{ + @"label" : label, + @"id" : [NSNumber numberWithInt:dataChannel.channelId], + @"flutterId" : flutterId + }); + } +} + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + RTCDataChannel* dataChannel = dataChannels[dataChannelId]; + if (dataChannel) { + FlutterEventChannel* eventChannel = dataChannel.eventChannel; + [dataChannel close]; + [dataChannels removeObjectForKey:dataChannelId]; + [eventChannel setStreamHandler:nil]; + dataChannel.eventChannel = nil; + } +} + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + if (dataChannel == NULL || dataChannel.readyState != RTCDataChannelStateOpen) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", @"dataChannelGetBufferedAmount"] + message:[NSString stringWithFormat:@"Error: dataChannel not found or not opened!"] + details:nil]); + } else { + result(@{@"bufferedAmount" : @(dataChannel.bufferedAmount)}); + } +} + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(id)data + type:(NSString*)type { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId]; + + NSData* bytes = [type isEqualToString:@"binary"] ? ((FlutterStandardTypedData*)data).data + : [data dataUsingEncoding:NSUTF8StringEncoding]; + + RTCDataBuffer* buffer = [[RTCDataBuffer alloc] initWithData:bytes + isBinary:[type isEqualToString:@"binary"]]; + [dataChannel sendData:buffer]; +} + +- (NSString*)stringForDataChannelState:(RTCDataChannelState)state { + switch (state) { + case RTCDataChannelStateConnecting: + return @"connecting"; + case RTCDataChannelStateOpen: + return @"open"; + case RTCDataChannelStateClosing: + return @"closing"; + case RTCDataChannelStateClosed: + return @"closed"; + } + return nil; +} + +- (void)sendEvent:(id)event withChannel:(RTCDataChannel*)channel { + if (channel.eventSink) { + postEvent(channel.eventSink, event); + } else { + if (!channel.eventQueue) { + channel.eventQueue = [NSMutableArray array]; + } + channel.eventQueue = [channel.eventQueue arrayByAddingObject:event]; + } +} + +#pragma mark - RTCDataChannelDelegate methods + +// Called when the data channel state has changed. +- (void)dataChannelDidChangeState:(RTCDataChannel*)channel { + [self sendEvent:@{ + @"event" : @"dataChannelStateChanged", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"state" : [self stringForDataChannelState:channel.readyState] + } + withChannel:channel]; +} + +// Called when a data buffer was successfully received. +- (void)dataChannel:(RTCDataChannel*)channel didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer { + NSString* type; + id data; + if (buffer.isBinary) { + type = @"binary"; + data = [FlutterStandardTypedData typedDataWithBytes:buffer.data]; + } else { + type = @"text"; + data = [[NSString alloc] initWithData:buffer.data encoding:NSUTF8StringEncoding]; + } + + [self sendEvent:@{ + @"event" : @"dataChannelReceiveMessage", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"type" : type, + @"data" : (data ? data : [NSNull null]) + } + withChannel:channel]; +} + +- (void)dataChannel:(RTCDataChannel*)channel didChangeBufferedAmount:(uint64_t)amount { + [self sendEvent:@{ + @"event" : @"dataChannelBufferedAmountChange", + @"id" : [NSNumber numberWithInt:channel.channelId], + @"bufferedAmount" : [NSNumber numberWithLongLong:channel.bufferedAmount], + @"changedAmount" : [NSNumber numberWithLongLong:amount] + } + withChannel:channel]; +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDesktopCapturer.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDesktopCapturer.m new file mode 100644 index 0000000000..0a9696df33 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCDesktopCapturer.m @@ -0,0 +1,434 @@ +#import + +#import "./include/stream_webrtc_flutter/FlutterRTCDesktopCapturer.h" + +#if TARGET_OS_IPHONE +#import +#import "./include/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.h" +#import "./include/stream_webrtc_flutter/FlutterRPScreenRecorder.h" +#endif + +#import "./include/stream_webrtc_flutter/LocalVideoTrack.h" +#import "./include/stream_webrtc_flutter/VideoProcessingAdapter.h" + +#if TARGET_OS_OSX +RTCDesktopMediaList* _screen = nil; +RTCDesktopMediaList* _window = nil; +NSArray* _captureSources; +#endif + +@implementation FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES]; + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + VideoProcessingAdapter* videoProcessingAdapter = + [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + +#if TARGET_OS_IPHONE + BOOL useBroadcastExtension = false; + BOOL presentBroadcastPicker = false; + + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + useBroadcastExtension = [((NSDictionary*)videoConstraints)[@"deviceId"] hasPrefix:@"broadcast"]; + presentBroadcastPicker = useBroadcastExtension && + ![((NSDictionary*)videoConstraints)[@"deviceId"] hasSuffix:@"-manual"]; + } + + id screenCapturer; + + if (useBroadcastExtension) { + screenCapturer = + [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoProcessingAdapter]; + } else { + screenCapturer = + [[FlutterRPScreenRecorder alloc] initWithDelegate:[videoProcessingAdapter source]]; + } + + [screenCapturer startCapture]; + NSLog(@"start %@ capture", useBroadcastExtension ? @"broadcast" : @"replykit"); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop %@ capture, trackID %@", useBroadcastExtension ? @"broadcast" : @"replykit", + trackUUID); + [screenCapturer stopCaptureWithCompletionHandler:handler]; + }; + + if (presentBroadcastPicker) { + NSString* extension = + [[[NSBundle mainBundle] infoDictionary] valueForKey:kRTCScreenSharingExtension]; + + RPSystemBroadcastPickerView* picker = [[RPSystemBroadcastPickerView alloc] init]; + picker.showsMicrophoneButton = false; + if (extension) { + picker.preferredExtension = extension; + } else { + NSLog(@"Not able to find the %@ key", kRTCScreenSharingExtension); + } + SEL selector = NSSelectorFromString(@"buttonPressed:"); + if ([picker respondsToSelector:selector]) { + [picker performSelector:selector withObject:nil]; + } + } +#endif + +#if TARGET_OS_OSX + /* example for constraints: + { + 'audio': false, + 'video": { + 'deviceId': {'exact': sourceId}, + 'mandatory': { + 'frameRate': 30.0 + }, + } + } + */ + NSString* sourceId = nil; + BOOL useDefaultScreen = NO; + NSInteger fps = 30; + id videoConstraints = constraints[@"video"]; + if ([videoConstraints isKindOfClass:[NSNumber class]] && [videoConstraints boolValue] == YES) { + useDefaultScreen = YES; + } else if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + NSDictionary* deviceId = videoConstraints[@"deviceId"]; + if (deviceId != nil && [deviceId isKindOfClass:[NSDictionary class]]) { + if (deviceId[@"exact"] != nil) { + sourceId = deviceId[@"exact"]; + if (sourceId == nil) { + result(@{@"error" : @"No deviceId.exact found"}); + return; + } + } + } else { + // fall back to default screen if no deviceId is specified + useDefaultScreen = YES; + } + id mandatory = videoConstraints[@"mandatory"]; + if (mandatory != nil && [mandatory isKindOfClass:[NSDictionary class]]) { + id frameRate = mandatory[@"frameRate"]; + if (frameRate != nil && [frameRate isKindOfClass:[NSNumber class]]) { + fps = [frameRate integerValue]; + } + } + } + RTCDesktopCapturer* desktopCapturer; + RTCDesktopSource* source = nil; + + if (useDefaultScreen) { + desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self + captureDelegate:videoProcessingAdapter]; + } else { + source = [self getSourceById:sourceId]; + if (source == nil) { + result(@{@"error" : [NSString stringWithFormat:@"No source found for id: %@", sourceId]}); + return; + } + desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source + delegate:self + captureDelegate:videoProcessingAdapter]; + } + [desktopCapturer startCaptureWithFPS:fps]; + NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", fps); + + self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) { + NSLog(@"stop desktop capture: sourceId: %@, type: %@, trackID %@", sourceId, + source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", trackUUID); + [desktopCapturer stopCapture]; + handler(); + }; +#endif + + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + [mediaStream addVideoTrack:videoTrack]; + + LocalVideoTrack* localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack + videoProcessing:videoProcessingAdapter]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result( + @{@"streamId" : mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); +} + +- (void)getDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSources"); + + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + + if (![self buildDesktopSourcesListWithTypes:types forceReload:YES result:result]) { + NSLog(@"getDesktopSources failed."); + return; + } + + NSMutableArray* sources = [NSMutableArray array]; + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + /*NSData *data = nil; + if([object thumbnail]) { + data = [[NSData alloc] init]; + NSImage *resizedImg = [self resizeImage:[object thumbnail] forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + }*/ + [sources addObject:@{ + @"id" : object.sourceId, + @"name" : object.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : object.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + //@"thumbnail": data, + }]; + } + result(@{@"sources" : sources}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)getDesktopSourceThumbnail:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"getDesktopSourceThumbnail"); + NSString* sourceId = argsMap[@"sourceId"]; + RTCDesktopSource* object = [self getSourceById:sourceId]; + if (object == nil) { + result(@{@"error" : @"No source found"}); + return; + } + NSImage* image = [object UpdateThumbnail]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + result(data); + } else { + result(@{@"error" : @"No thumbnail found"}); + } + +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +- (void)updateDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"updateDesktopSources"); + NSArray* types = [argsMap objectForKey:@"types"]; + if (types == nil) { + result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]); + return; + } + if (![self buildDesktopSourcesListWithTypes:types forceReload:NO result:result]) { + NSLog(@"updateDesktopSources failed."); + return; + } + result(@{@"result" : @YES}); +#else + result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]); +#endif +} + +#if TARGET_OS_OSX +- (NSImage*)resizeImage:(NSImage*)sourceImage forSize:(CGSize)targetSize { + CGSize imageSize = sourceImage.size; + CGFloat width = imageSize.width; + CGFloat height = imageSize.height; + CGFloat targetWidth = targetSize.width; + CGFloat targetHeight = targetSize.height; + CGFloat scaleFactor = 0.0; + CGFloat scaledWidth = targetWidth; + CGFloat scaledHeight = targetHeight; + CGPoint thumbnailPoint = CGPointMake(0.0, 0.0); + + if (CGSizeEqualToSize(imageSize, targetSize) == NO) { + CGFloat widthFactor = targetWidth / width; + CGFloat heightFactor = targetHeight / height; + + // scale to fit the longer + scaleFactor = (widthFactor > heightFactor) ? widthFactor : heightFactor; + scaledWidth = ceil(width * scaleFactor); + scaledHeight = ceil(height * scaleFactor); + + // center the image + if (widthFactor > heightFactor) { + thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5; + } else if (widthFactor < heightFactor) { + thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5; + } + } + + NSImage* newImage = [[NSImage alloc] initWithSize:NSMakeSize(scaledWidth, scaledHeight)]; + CGRect thumbnailRect = {thumbnailPoint, {scaledWidth, scaledHeight}}; + NSRect imageRect = NSMakeRect(0.0, 0.0, width, height); + + [newImage lockFocus]; + [sourceImage drawInRect:thumbnailRect + fromRect:imageRect + operation:NSCompositingOperationCopy + fraction:1.0]; + [newImage unlockFocus]; + + return newImage; +} + +- (RTCDesktopSource*)getSourceById:(NSString*)sourceId { + NSEnumerator* enumerator = [_captureSources objectEnumerator]; + RTCDesktopSource* object; + while ((object = enumerator.nextObject) != nil) { + if ([sourceId isEqualToString:object.sourceId]) { + return object; + } + } + return nil; +} + +- (BOOL)buildDesktopSourcesListWithTypes:(NSArray*)types + forceReload:(BOOL)forceReload + result:(FlutterResult)result { + BOOL captureWindow = NO; + BOOL captureScreen = NO; + _captureSources = [NSMutableArray array]; + + NSEnumerator* typesEnumerator = [types objectEnumerator]; + NSString* type; + while ((type = typesEnumerator.nextObject) != nil) { + if ([type isEqualToString:@"screen"]) { + captureScreen = YES; + } else if ([type isEqualToString:@"window"]) { + captureWindow = YES; + } else { + result([FlutterError errorWithCode:@"ERROR" message:@"Invalid type" details:nil]); + return NO; + } + } + + if (!captureWindow && !captureScreen) { + result([FlutterError errorWithCode:@"ERROR" + message:@"At least one type is required" + details:nil]); + return NO; + } + + if (captureWindow) { + if (!_window) + _window = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeWindow delegate:self]; + [_window UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_window getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + if (captureScreen) { + if (!_screen) + _screen = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeScreen delegate:self]; + [_screen UpdateSourceList:forceReload updateAllThumbnails:YES]; + NSArray* sources = [_screen getSources]; + _captureSources = [_captureSources arrayByAddingObjectsFromArray:sources]; + } + NSLog(@"captureSources: %lu", [_captureSources count]); + return YES; +} + +#pragma mark - RTCDesktopMediaListDelegate delegate + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* image = [source UpdateThumbnail]; + NSData* data = [[NSData alloc] init]; + if (image != nil) { + NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)]; + data = [resizedImg TIFFRepresentation]; + } + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceAdded", + @"id" : source.sourceId, + @"name" : source.name, + @"thumbnailSize" : @{@"width" : @0, @"height" : @0}, + @"type" : source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", + @"thumbnail" : data + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceRemoved", + @"id" : source.sourceId, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceNameChanged", + @"id" : source.sourceId, + @"name" : source.name, + }); + } +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source { + // NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId); + if (self.eventSink) { + NSImage* resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)]; + NSData* data = [resizedImg TIFFRepresentation]; + postEvent(self.eventSink, @{ + @"event" : @"desktopSourceThumbnailChanged", + @"id" : source.sourceId, + @"thumbnail" : data + }); + } +} + +#pragma mark - RTCDesktopCapturerDelegate delegate + +- (void)didSourceCaptureStart:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStart"); +} + +- (void)didSourceCapturePaused:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCapturePaused"); +} + +- (void)didSourceCaptureStop:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureStop"); +} + +- (void)didSourceCaptureError:(RTCDesktopCapturer*)capturer { + NSLog(@"didSourceCaptureError"); +} + +#endif + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCapturer.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCapturer.m new file mode 100644 index 0000000000..3a6e8d3bbd --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCapturer.m @@ -0,0 +1,175 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import "./include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h" + +@import CoreImage; +@import CoreVideo; + +@implementation FlutterRTCFrameCapturer { + RTCVideoTrack* _track; + NSString* _path; + FlutterResult _result; + bool _gotFrame; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self = [super init]; + if (self) { + _gotFrame = false; + _track = track; + _path = path; + _result = result; + [track addRenderer:self]; + } + return self; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame*)frame { + if (_gotFrame || frame == nil) + return; + _gotFrame = true; + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + bool shouldRelease; + if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } else { + pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer; + shouldRelease = false; + } + CIImage* ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef]; + CGRect outputSize; + if (@available(iOS 11, macOS 10.13, *)) { + switch (frame.rotation) { + case RTCVideoRotation_90: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + case RTCVideoRotation_180: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown]; + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + case RTCVideoRotation_270: + ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft]; + outputSize = CGRectMake(0, 0, frame.height, frame.width); + break; + default: + outputSize = CGRectMake(0, 0, frame.width, frame.height); + break; + } + } else { + outputSize = CGRectMake(0, 0, frame.width, frame.height); + } + CIContext* tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData* imageData; +#if TARGET_OS_IPHONE + UIImage* uiImage = [UIImage imageWithCGImage:cgImage]; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = UIImageJPEGRepresentation(uiImage, 1.0f); + } else { + imageData = UIImagePNGRepresentation(uiImage); + } +#else + NSBitmapImageRep* newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + NSDictionary* quality = @{NSImageCompressionFactor : @1.0f}; + if ([[_path pathExtension] isEqualToString:@"jpg"]) { + imageData = [newRep representationUsingType:NSBitmapImageFileTypeJPEG properties:quality]; + } else { + imageData = [newRep representationUsingType:NSBitmapImageFileTypePNG properties:quality]; + } +#endif + CGImageRelease(cgImage); + if (shouldRelease) + CVPixelBufferRelease(pixelBufferRef); + if (imageData && [imageData writeToFile:_path atomically:NO]) { + NSLog(@"File writed successfully to %@", _path); + _result(nil); + } else { + NSLog(@"Failed to write to file"); + _result([FlutterError errorWithCode:@"CaptureFrameFailed" + message:@"Failed to write image data to file" + details:nil]); + } + dispatch_async(dispatch_get_main_queue(), ^{ + [self->_track removeRenderer:self]; + self->_track = nil; + }); +} + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame { + id i420Buffer = [frame.buffer toI420]; + CVPixelBufferRef outputPixelBuffer; + size_t w = (size_t)roundf(i420Buffer.width); + size_t h = (size_t)roundf(i420Buffer.height); + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer); + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); + return outputPixelBuffer; +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCryptor.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCryptor.m new file mode 100644 index 0000000000..2262ee7b30 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCFrameCryptor.m @@ -0,0 +1,614 @@ +#import "./include/stream_webrtc_flutter/FlutterRTCFrameCryptor.h" + +#import + +@implementation RTCFrameCryptor (Flutter) + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call + result:(nonnull FlutterResult)result { + NSDictionary* constraints = call.arguments; + NSString* method = call.method; + if ([method isEqualToString:@"frameCryptorFactoryCreateFrameCryptor"]) { + [self frameCryptorFactoryCreateFrameCryptor:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetKeyIndex"]) { + [self frameCryptorSetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetKeyIndex"]) { + [self frameCryptorGetKeyIndex:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorSetEnabled"]) { + [self frameCryptorSetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorGetEnabled"]) { + [self frameCryptorGetEnabled:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorDispose"]) { + [self frameCryptorDispose:constraints result:result]; + } else if ([method isEqualToString:@"frameCryptorFactoryCreateKeyProvider"]) { + [self frameCryptorFactoryCreateKeyProvider:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSharedKey"]) { + [self keyProviderSetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetSharedKey"]) { + [self keyProviderRatchetSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportSharedKey"]) { + [self keyProviderExportSharedKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetKey"]) { + [self keyProviderSetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderRatchetKey"]) { + [self keyProviderRatchetKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderExportKey"]) { + [self keyProviderExportKey:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderSetSifTrailer"]) { + [self keyProviderSetSifTrailer:constraints result:result]; + } else if ([method isEqualToString:@"keyProviderDispose"]) { + [self keyProviderDispose:constraints result:result]; + } else { + result(FlutterMethodNotImplemented); + } +} + +- (RTCCryptorAlgorithm)getAlgorithm:(NSNumber*)algorithm { + switch ([algorithm intValue]) { + case 0: + return RTCCryptorAlgorithmAesGcm; + default: + return RTCCryptorAlgorithmAesGcm; + } +} + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = constraints[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSNumber* algorithm = constraints[@"algorithm"]; + if (algorithm == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid algorithm" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:@"Invalid keyProvider" + details:nil]); + return; + } + + NSString* type = constraints[@"type"]; + NSString* rtpSenderId = constraints[@"rtpSenderId"]; + NSString* rtpReceiverId = constraints[@"rtpReceiverId"]; + + if ([type isEqualToString:@"sender"]) { + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:rtpSenderId]; + if (sender == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpSender:sender + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else if ([type isEqualToString:@"receiver"]) { + RTCRtpReceiver* receiver = [self getRtpReceiverById:peerConnection Id:rtpReceiverId]; + if (receiver == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateFrameCryptorFailed" + message:[NSString stringWithFormat:@"Error: receiver not found!"] + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = + [[RTCFrameCryptor alloc] initWithFactory:self.peerConnectionFactory + rtpReceiver:receiver + participantId:participantId + algorithm:[self getAlgorithm:algorithm] + keyProvider:keyProvider]; + NSString* frameCryptorId = [[NSUUID UUID] UUIDString]; + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/frameCryptorEvent%@", + frameCryptorId] + binaryMessenger:self.messenger]; + + frameCryptor.eventChannel = eventChannel; + [eventChannel setStreamHandler:frameCryptor]; + frameCryptor.delegate = self; + self.frameCryptors[frameCryptorId] = frameCryptor; + result(@{@"frameCryptorId" : frameCryptorId}); + } else { + result([FlutterError errorWithCode:@"InvalidArgument" message:@"Invalid type" details:nil]); + return; + } +} + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetKeyIndexFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + [frameCryptor setKeyIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetKeyIndexFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"keyIndex" : [NSNumber numberWithInt:frameCryptor.keyIndex]}); +} + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + + NSNumber* enabled = constraints[@"enabled"]; + if (enabled == nil) { + result([FlutterError errorWithCode:@"frameCryptorSetEnabledFailed" + message:@"Invalid enabled" + details:nil]); + return; + } + frameCryptor.enabled = [enabled boolValue]; + result(@{@"result" : enabled}); +} + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorGetEnabledFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + result(@{@"enabled" : [NSNumber numberWithBool:frameCryptor.enabled]}); +} + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* frameCryptorId = constraints[@"frameCryptorId"]; + if (frameCryptorId == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptorId" + details:nil]); + return; + } + RTCFrameCryptor* frameCryptor = self.frameCryptors[frameCryptorId]; + if (frameCryptor == nil) { + result([FlutterError errorWithCode:@"frameCryptorDisposeFailed" + message:@"Invalid frameCryptor" + details:nil]); + return; + } + [self.frameCryptors removeObjectForKey:frameCryptorId]; + frameCryptor.enabled = NO; + result(@{@"result" : @"success"}); +} + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + NSString* keyProviderId = [[NSUUID UUID] UUIDString]; + + id keyProviderOptions = constraints[@"keyProviderOptions"]; + if (keyProviderOptions == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid keyProviderOptions" + details:nil]); + return; + } + + NSNumber* sharedKey = keyProviderOptions[@"sharedKey"]; + if (sharedKey == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid sharedKey" + details:nil]); + return; + } + + FlutterStandardTypedData* ratchetSalt = keyProviderOptions[@"ratchetSalt"]; + if (ratchetSalt == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetSalt" + details:nil]); + return; + } + + NSNumber* ratchetWindowSize = keyProviderOptions[@"ratchetWindowSize"]; + if (ratchetWindowSize == nil) { + result([FlutterError errorWithCode:@"frameCryptorFactoryCreateKeyProviderFailed" + message:@"Invalid ratchetWindowSize" + details:nil]); + return; + } + + NSNumber* failureTolerance = keyProviderOptions[@"failureTolerance"]; + + FlutterStandardTypedData* uncryptedMagicBytes = keyProviderOptions[@"uncryptedMagicBytes"]; + + NSNumber* keyRingSize = keyProviderOptions[@"keyRingSize"]; + + NSNumber* discardFrameWhenCryptorNotReady = + keyProviderOptions[@"discardFrameWhenCryptorNotReady"]; + + RTCFrameCryptorKeyProvider* keyProvider = [[RTCFrameCryptorKeyProvider alloc] + initWithRatchetSalt:ratchetSalt.data + ratchetWindowSize:[ratchetWindowSize intValue] + sharedKeyMode:[sharedKey boolValue] + uncryptedMagicBytes:uncryptedMagicBytes != nil ? uncryptedMagicBytes.data : nil + failureTolerance:failureTolerance != nil ? [failureTolerance intValue] : -1 + keyRingSize:keyRingSize != nil ? [keyRingSize intValue] : 0 + discardFrameWhenCryptorNotReady:discardFrameWhenCryptorNotReady != nil + ? [discardFrameWhenCryptorNotReady boolValue] + : NO]; + self.keyProviders[keyProviderId] = keyProvider; + result(@{@"keyProviderId" : keyProviderId}); +} + +- (nullable RTCFrameCryptorKeyProvider*)getKeyProviderForId:(NSString*)keyProviderId + result:(nonnull FlutterResult)result { + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return nil; + } + RTCFrameCryptorKeyProvider* keyProvider = self.keyProviders[keyProviderId]; + if (keyProvider == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProvider" + details:nil]); + return nil; + } + return keyProvider; +} + +- (void)keyProviderSetSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSharedKey:key.data withIndex:[keyIndex intValue]]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetSharedKey:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + +- (void)keyProviderExportSharedKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportSharedKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSData* key = [keyProvider exportSharedKey:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + FlutterStandardTypedData* key = constraints[@"key"]; + if (key == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid key" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderSetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + [keyProvider setKey:key.data withIndex:[keyIndex intValue] forParticipant:participantId]; + result(@{@"result" : @YES}); +} + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderRatchetKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* newKey = [keyProvider ratchetKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : newKey}); +} + +- (void)keyProviderExportKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + NSNumber* keyIndex = constraints[@"keyIndex"]; + if (keyIndex == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid keyIndex" + details:nil]); + return; + } + + NSString* participantId = constraints[@"participantId"]; + if (participantId == nil) { + result([FlutterError errorWithCode:@"keyProviderExportKeyFailed" + message:@"Invalid participantId" + details:nil]); + return; + } + + NSData* key = [keyProvider exportKey:participantId withIndex:[keyIndex intValue]]; + result(@{@"result" : key}); +} + +- (void)keyProviderSetSifTrailer:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result { + RTCFrameCryptorKeyProvider* keyProvider = [self getKeyProviderForId:constraints[@"keyProviderId"] + result:result]; + if (keyProvider == nil) { + return; + } + + FlutterStandardTypedData* sifTrailer = constraints[@"sifTrailer"]; + if (sifTrailer == nil) { + result([FlutterError errorWithCode:@"keyProviderSetSifTrailerFailed" + message:@"Invalid key" + details:nil]); + return; + } + + [keyProvider setSifTrailer:sifTrailer.data]; + result(nil); +} + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result { + NSString* keyProviderId = constraints[@"keyProviderId"]; + if (keyProviderId == nil) { + result([FlutterError errorWithCode:@"getKeyProviderForIdFailed" + message:@"Invalid keyProviderId" + details:nil]); + return; + } + [self.keyProviders removeObjectForKey:keyProviderId]; + result(@{@"result" : @"success"}); +} + +- (NSString*)stringFromState:(FrameCryptionState)state { + switch (state) { + case FrameCryptionStateNew: + return @"new"; + case FrameCryptionStateOk: + return @"ok"; + case FrameCryptionStateEncryptionFailed: + return @"encryptionFailed"; + case FrameCryptionStateDecryptionFailed: + return @"decryptionFailed"; + case FrameCryptionStateMissingKey: + return @"missingKey"; + case FrameCryptionStateKeyRatcheted: + return @"keyRatcheted"; + case FrameCryptionStateInternalError: + return @"internalError"; + default: + return @"unknown"; + } +} + +#pragma mark - RTCFrameCryptorDelegate methods + +- (void)frameCryptor:(RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor + didStateChangeWithParticipantId:(NSString*)participantId + withState:(FrameCryptionState)stateChanged { + if (frameCryptor.eventSink) { + postEvent(frameCryptor.eventSink, @{ + @"event" : @"frameCryptionStateChanged", + @"participantId" : participantId, + @"state" : [self stringFromState:stateChanged] + }); + } +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaRecorder.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaRecorder.m new file mode 100644 index 0000000000..70eb9c99fe --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaRecorder.m @@ -0,0 +1,163 @@ +#import +#import "./include/stream_webrtc_flutter/FlutterRTCAudioSink.h" +#import "./include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h" +#import "./include/stream_webrtc_flutter/FlutterRTCMediaRecorder.h" + +@import AVFoundation; + +@implementation FlutterRTCMediaRecorder { + int framesCount; + bool isInitialized; + CGSize _renderSize; + FlutterRTCAudioSink* _audioSink; + AVAssetWriterInput* _audioWriter; + int64_t _startTime; +} + +- (instancetype)initWithVideoTrack:(RTCVideoTrack*)video + audioTrack:(RTCAudioTrack*)audio + outputFile:(NSURL*)out { + self = [super init]; + isInitialized = false; + self.videoTrack = video; + self.output = out; + [video addRenderer:self]; + framesCount = 0; + if (audio != nil) + _audioSink = [[FlutterRTCAudioSink alloc] initWithAudioTrack:audio]; + else + NSLog(@"Audio track is nil"); + _startTime = -1; + return self; +} + +- (void)initialize:(CGSize)size { + _renderSize = size; + NSDictionary* videoSettings = @{ + AVVideoCompressionPropertiesKey : @{AVVideoAverageBitRateKey : @(6 * 1024 * 1024)}, + AVVideoCodecKey : AVVideoCodecTypeH264, + AVVideoHeightKey : @(size.height), + AVVideoWidthKey : @(size.width), + }; + self.writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo + outputSettings:videoSettings]; + self.writerInput.expectsMediaDataInRealTime = true; + self.writerInput.mediaTimeScale = 30; + + if (_audioSink != nil) { + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; + NSDictionary* audioSettings = @{ + AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC], + AVNumberOfChannelsKey : @1, + AVSampleRateKey : @44100.0, + AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(AudioChannelLayout)], + AVEncoderBitRateKey : @64000, + }; + _audioWriter = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio + outputSettings:audioSettings + sourceFormatHint:_audioSink.format]; + _audioWriter.expectsMediaDataInRealTime = true; + } + + NSError* error; + self.assetWriter = [[AVAssetWriter alloc] initWithURL:self.output + fileType:AVFileTypeMPEG4 + error:&error]; + if (error != nil) + NSLog(@"%@", [error localizedDescription]); + self.assetWriter.shouldOptimizeForNetworkUse = true; + [self.assetWriter addInput:self.writerInput]; + if (_audioWriter != nil) { + [self.assetWriter addInput:_audioWriter]; + _audioSink.bufferCallback = ^(CMSampleBufferRef buffer) { + if (self->_audioWriter.readyForMoreMediaData) { + if ([self->_audioWriter appendSampleBuffer:buffer]) + NSLog(@"Audio frame appended"); + else + NSLog(@"Audioframe not appended %@", self.assetWriter.error); + } + }; + } + [self.assetWriter startWriting]; + [self.assetWriter startSessionAtSourceTime:kCMTimeZero]; + + isInitialized = true; +} + +- (void)setSize:(CGSize)size { +} + +- (void)renderFrame:(nullable RTCVideoFrame*)frame { + if (frame == nil) { + return; + } + if (!isInitialized) { + [self initialize:CGSizeMake((CGFloat)frame.width, (CGFloat)frame.height)]; + } + if (!self.writerInput.readyForMoreMediaData) { + NSLog(@"Drop frame, not ready"); + return; + } + id buffer = frame.buffer; + CVPixelBufferRef pixelBufferRef; + BOOL shouldRelease = false; + if ([buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer; + } else { + pixelBufferRef = [FlutterRTCFrameCapturer convertToCVPixelBuffer:frame]; + shouldRelease = true; + } + CMVideoFormatDescriptionRef formatDescription; + OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer( + kCFAllocatorDefault, pixelBufferRef, &formatDescription); + + CMSampleTimingInfo timingInfo; + + timingInfo.decodeTimeStamp = kCMTimeInvalid; + if (_startTime == -1) { + _startTime = frame.timeStampNs / 1000; + } + int64_t frameTime = (frame.timeStampNs / 1000) - _startTime; + timingInfo.presentationTimeStamp = CMTimeMake(frameTime, 1000000); + framesCount++; + + CMSampleBufferRef outBuffer; + + status = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBufferRef, + formatDescription, &timingInfo, &outBuffer); + + if (![self.writerInput appendSampleBuffer:outBuffer]) { + NSLog(@"Frame not appended %@", self.assetWriter.error); + } +#if TARGET_OS_IPHONE + if (shouldRelease) { + CVPixelBufferRelease(pixelBufferRef); + } +#endif +} + +- (void)stop:(FlutterResult _Nonnull)result { + if (_audioSink != nil) { + _audioSink.bufferCallback = nil; + [_audioSink close]; + } + [self.videoTrack removeRenderer:self]; + [self.writerInput markAsFinished]; + [_audioWriter markAsFinished]; + dispatch_async(dispatch_get_main_queue(), ^{ + [self.assetWriter finishWritingWithCompletionHandler:^{ + NSError* error = self.assetWriter.error; + if (error == nil) { + result(nil); + } else { + result([FlutterError errorWithCode:@"Failed to save recording" + message:[error localizedDescription] + details:nil]); + } + }]; + }); +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaStream.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaStream.m new file mode 100644 index 0000000000..b07d3f64ad --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCMediaStream.m @@ -0,0 +1,1107 @@ +#import +#import "./include/stream_webrtc_flutter/AudioUtils.h" +#import "./include/stream_webrtc_flutter/CameraUtils.h" +#import "./include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h" +#import "./include/stream_webrtc_flutter/FlutterRTCMediaStream.h" +#import "./include/stream_webrtc_flutter/FlutterRTCPeerConnection.h" +#import "./include/stream_webrtc_flutter/LocalAudioTrack.h" +#import "./include/stream_webrtc_flutter/LocalVideoTrack.h" +#import "./include/stream_webrtc_flutter/VideoProcessingAdapter.h" +#import "AVKit/AVKit.h" + +@implementation RTCMediaStreamTrack (Flutter) + +- (id)settings { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setSettings:(id)settings { + objc_setAssociatedObject(self, @selector(settings), settings, OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} +@end + +@implementation AVCaptureDevice (Flutter) + +- (NSString*)positionString { + switch (self.position) { + case AVCaptureDevicePositionUnspecified: + return @"unspecified"; + case AVCaptureDevicePositionBack: + return @"back"; + case AVCaptureDevicePositionFront: + return @"front"; + } + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCMediaStream) + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback} + */ +typedef void (^NavigatorUserMediaErrorCallback)(NSString* errorType, NSString* errorMessage); + +/** + * {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback} + */ +typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream* mediaStream); + +- (NSDictionary*)defaultVideoConstraints { + return @{@"minWidth" : @"1280", @"minHeight" : @"720", @"minFrameRate" : @"30"}; +} + +- (NSDictionary*)defaultAudioConstraints { + return @{}; +} + +- (RTCMediaConstraints*)defaultMediaStreamConstraints { + RTCMediaConstraints* constraints = + [[RTCMediaConstraints alloc] initWithMandatoryConstraints:[self defaultVideoConstraints] + optionalConstraints:nil]; + return constraints; +} + +- (NSArray*)captureDevices { + if (@available(iOS 13.0, macOS 10.15, macCatalyst 14.0, tvOS 17.0, *)) { + NSArray* deviceTypes = @[ +#if TARGET_OS_IPHONE + AVCaptureDeviceTypeBuiltInTripleCamera, + AVCaptureDeviceTypeBuiltInDualCamera, + AVCaptureDeviceTypeBuiltInDualWideCamera, + AVCaptureDeviceTypeBuiltInWideAngleCamera, + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInUltraWideCamera, +#else + AVCaptureDeviceTypeBuiltInWideAngleCamera, +#endif + ]; + +#if !defined(TARGET_OS_IPHONE) + if (@available(macOS 13.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObject:AVCaptureDeviceTypeDeskViewCamera]; + } +#endif + + if (@available(iOS 17.0, macOS 14.0, tvOS 17.0, *)) { + deviceTypes = [deviceTypes arrayByAddingObjectsFromArray:@[ + AVCaptureDeviceTypeContinuityCamera, + AVCaptureDeviceTypeExternal, + ]]; + } + + return [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:deviceTypes + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified] + .devices; + } + return @[]; +} + +/** + * Initializes a new {@link RTCAudioTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the audio-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCAudioTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCAudioTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserAudio:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id audioConstraints = constraints[@"audio"]; + NSString* audioDeviceId = @""; + RTCMediaConstraints* rtcConstraints; + if ([audioConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.audio.deviceId + NSString* deviceId = audioConstraints[@"deviceId"]; + + if (deviceId) { + audioDeviceId = deviceId; + } + + rtcConstraints = [self parseMediaConstraints:audioConstraints]; + // constraints.audio.optional.sourceId + id optionalConstraints = audioConstraints[@"optional"]; + if (optionalConstraints && [optionalConstraints isKindOfClass:[NSArray class]] && !deviceId) { + NSArray* options = optionalConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + audioDeviceId = sourceId; + } + } + } + } + } else { + rtcConstraints = [self parseMediaConstraints:[self defaultAudioConstraints]]; + } + +#if !defined(TARGET_OS_IPHONE) + if (audioDeviceId != nil) { + [self selectAudioInput:audioDeviceId result:nil]; + } +#endif + + NSString* trackId = [[NSUUID UUID] UUIDString]; + RTCAudioSource* audioSource = + [self.peerConnectionFactory audioSourceWithConstraints:rtcConstraints]; + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:audioSource + trackId:trackId]; + LocalAudioTrack* localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; + + audioTrack.settings = @{ + @"deviceId" : audioDeviceId, + @"kind" : @"audioinput", + @"autoGainControl" : @YES, + @"echoCancellation" : @YES, + @"noiseSuppression" : @YES, + @"channelCount" : @1, + @"latency" : @0, + }; + + [mediaStream addAudioTrack:audioTrack]; + + [self.localTracks setObject:localAudioTrack forKey:trackId]; + + [self ensureAudioSession]; + + successCallback(mediaStream); +} + +// TODO: Use RCTConvert for constraints ... +- (void)getUserMedia:(NSDictionary*)constraints result:(FlutterResult)result { + // Initialize RTCMediaStream with a unique label in order to allow multiple + // RTCMediaStream instances initialized by multiple getUserMedia calls to be + // added to 1 RTCPeerConnection instance. As suggested by + // https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good + // practice, use a UUID (conforming to RFC4122). + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + [self getUserMedia:constraints + successCallback:^(RTCMediaStream* mediaStream) { + NSString* mediaStreamId = mediaStream.streamId; + + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCAudioTrack* track in mediaStream.audioTracks) { + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + for (RTCVideoTrack* track in mediaStream.videoTracks) { + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live", + @"settings" : track.settings + }]; + } + + self.localStreams[mediaStreamId] = mediaStream; + result(@{ + @"streamId" : mediaStreamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks + }); + } + errorCallback:^(NSString* errorType, NSString* errorMessage) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType] + message:errorMessage + details:nil]); + } + mediaStream:mediaStream]; +} + +/** + * Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which + * satisfies specific constraints and adds it to a specific + * {@link RTCMediaStream} if the specified {@code mediaStream} contains no track + * of the respective media type and the specified {@code constraints} specify + * that a track of the respective media type is required; otherwise, reports + * success for the specified {@code mediaStream} to a specific + * {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media + * type-specific iteration of or successfully concludes the + * {@code getUserMedia()} algorithm. The method will be recursively invoked to + * conclude the whole {@code getUserMedia()} algorithm either with (successful) + * satisfaction of the specified {@code constraints} or with failure. + * + * @param constraints The {@code MediaStreamConstraints} which specifies the + * requested media types and which the new {@code RTCAudioTrack} or + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm. + */ +- (void)getUserMedia:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // If mediaStream contains no audioTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local audio content. + if (mediaStream.audioTracks.count == 0) { + // constraints.audio + id audioConstraints = constraints[@"audio"]; + BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]]; + if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) { + [self requestAccessForMediaType:AVMediaTypeAudio + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } + } + + // If mediaStream contains no videoTracks and the constraints request such a + // track, then run an iteration of the getUserMedia() algorithm to obtain + // local video content. + if (mediaStream.videoTracks.count == 0) { + // constraints.video + id videoConstraints = constraints[@"video"]; + if (videoConstraints) { + BOOL requestAccessForVideo = [videoConstraints isKindOfClass:[NSNumber class]] + ? [videoConstraints boolValue] + : [videoConstraints isKindOfClass:[NSDictionary class]]; +#if !TARGET_IPHONE_SIMULATOR + if (requestAccessForVideo) { + [self requestAccessForMediaType:AVMediaTypeVideo + constraints:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + return; + } +#endif + } + } + + // There are audioTracks and/or videoTracks in mediaStream as requested by + // constraints so the getUserMedia() is to conclude with success. + successCallback(mediaStream); +} + +- (int)getConstrainInt:(NSDictionary*)constraints forKey:(NSString*)key { + if (![constraints isKindOfClass:[NSDictionary class]]) { + return 0; + } + + id constraint = constraints[key]; + if ([constraint isKindOfClass:[NSNumber class]]) { + return [constraint intValue]; + } else if ([constraint isKindOfClass:[NSString class]]) { + int possibleValue = [constraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } else if ([constraint isKindOfClass:[NSDictionary class]]) { + id idealConstraint = constraint[@"ideal"]; + if ([idealConstraint isKindOfClass:[NSString class]]) { + int possibleValue = [idealConstraint intValue]; + if (possibleValue != 0) { + return possibleValue; + } + } + } + + return 0; +} + +- (RTCMediaStreamTrack*)cloneTrack:(nonnull NSString*)trackId { + NSString* newTrackId = [[NSUUID UUID] UUIDString]; + + RTCMediaStreamTrack* originalTrack = [self trackForId:trackId peerConnectionId:nil]; + LocalVideoTrack* originalLocalTrack = self.localTracks[trackId]; + + if (originalTrack != nil && [originalTrack.kind isEqualToString:@"audio"]) { + RTCAudioTrack* originalAudioTrack = (RTCAudioTrack*)originalTrack; + RTCAudioSource* originalAudioSource = originalAudioTrack.source; + + RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithSource:originalAudioSource + trackId:newTrackId]; + LocalAudioTrack* localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack]; + + audioTrack.settings = originalAudioTrack.settings; + [self.localTracks setObject:localAudioTrack forKey:newTrackId]; + + for (NSString* streamId in self.localStreams) { + RTCMediaStream* stream = [self.localStreams objectForKey:streamId]; + for (RTCAudioTrack* track in stream.audioTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream addAudioTrack:audioTrack]; + } + } + } + + return audioTrack; + } else if (originalTrack != nil && [originalTrack.kind isEqualToString:@"video"]) { + RTCVideoTrack* originalVideoTrack = (RTCVideoTrack*)originalTrack; + RTCVideoSource* videoSource = originalVideoTrack.source; + + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:newTrackId]; + LocalVideoTrack* localVideoTrack = + [[LocalVideoTrack alloc] initWithTrack:videoTrack + videoProcessing:originalLocalTrack.processing]; + + videoTrack.settings = originalVideoTrack.settings; + [self.localTracks setObject:localVideoTrack forKey:newTrackId]; + + for (NSString* streamId in self.localStreams) { + RTCMediaStream* stream = [self.localStreams objectForKey:streamId]; + for (RTCVideoTrack* track in stream.videoTracks) { + if ([trackId isEqualToString:trackId]) { + [stream addVideoTrack:videoTrack]; + } + } + } + + return videoTrack; + } + + return originalTrack; +} + +/** + * Initializes a new {@link RTCVideoTrack} which satisfies specific constraints, + * adds it to a specific {@link RTCMediaStream}, and reports success to a + * specific callback. Implements the video-specific counterpart of the + * {@code getUserMedia()} algorithm. + * + * @param constraints The {@code MediaStreamConstraints} which the new + * {@code RTCVideoTrack} instance is to satisfy. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is being initialized as + * part of the execution of the {@code getUserMedia()} algorithm, to which a + * new {@code RTCVideoTrack} is to be added, and which is to be reported to + * {@code successCallback} upon success. + */ +- (void)getUserVideo:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + id videoConstraints = constraints[@"video"]; + AVCaptureDevice* videoDevice; + NSString* videoDeviceId = nil; + NSString* facingMode = nil; + NSArray* captureDevices = [self captureDevices]; + + if ([videoConstraints isKindOfClass:[NSDictionary class]]) { + // constraints.video.deviceId + NSString* deviceId = videoConstraints[@"deviceId"]; + + if (deviceId) { + for (AVCaptureDevice* device in captureDevices) { + if ([deviceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = deviceId; + } + } + } + + // constraints.video.optional + id optionalVideoConstraints = videoConstraints[@"optional"]; + if (optionalVideoConstraints && [optionalVideoConstraints isKindOfClass:[NSArray class]] && + !videoDevice) { + NSArray* options = optionalVideoConstraints; + for (id item in options) { + if ([item isKindOfClass:[NSDictionary class]]) { + NSString* sourceId = ((NSDictionary*)item)[@"sourceId"]; + if (sourceId) { + for (AVCaptureDevice* device in captureDevices) { + if ([sourceId isEqualToString:device.uniqueID]) { + videoDevice = device; + videoDeviceId = sourceId; + } + } + if (videoDevice) { + break; + } + } + } + } + } + + if (!videoDevice) { + // constraints.video.facingMode + // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode + facingMode = videoConstraints[@"facingMode"]; + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + self._usingFrontCamera = YES; + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back to + // the default video device. + self._usingFrontCamera = NO; + position = AVCaptureDevicePositionUnspecified; + } + videoDevice = [self findDeviceForPosition:position]; + } + } + } + + if ([videoConstraints isKindOfClass:[NSNumber class]]) { + videoConstraints = @{@"mandatory" : [self defaultVideoConstraints]}; + } + + NSInteger targetWidth = 0; + NSInteger targetHeight = 0; + NSInteger targetFps = 0; + + if (!videoDevice) { + videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + + int possibleWidth = [self getConstrainInt:videoConstraints forKey:@"width"]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + + int possibleHeight = [self getConstrainInt:videoConstraints forKey:@"height"]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + + int possibleFps = [self getConstrainInt:videoConstraints forKey:@"frameRate"]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + + id mandatory = + [videoConstraints isKindOfClass:[NSDictionary class]] ? videoConstraints[@"mandatory"] : nil; + + // constraints.video.mandatory + if (mandatory && [mandatory isKindOfClass:[NSDictionary class]]) { + id widthConstraint = mandatory[@"minWidth"]; + if ([widthConstraint isKindOfClass:[NSString class]] || + [widthConstraint isKindOfClass:[NSNumber class]]) { + int possibleWidth = [widthConstraint intValue]; + if (possibleWidth != 0) { + targetWidth = possibleWidth; + } + } + id heightConstraint = mandatory[@"minHeight"]; + if ([heightConstraint isKindOfClass:[NSString class]] || + [heightConstraint isKindOfClass:[NSNumber class]]) { + int possibleHeight = [heightConstraint intValue]; + if (possibleHeight != 0) { + targetHeight = possibleHeight; + } + } + id fpsConstraint = mandatory[@"minFrameRate"]; + if ([fpsConstraint isKindOfClass:[NSString class]] || + [fpsConstraint isKindOfClass:[NSNumber class]]) { + int possibleFps = [fpsConstraint intValue]; + if (possibleFps != 0) { + targetFps = possibleFps; + } + } + } + + if (videoDevice) { + RTCVideoSource* videoSource = [self.peerConnectionFactory videoSource]; +#if TARGET_OS_OSX + if (self.videoCapturer) { + [self.videoCapturer stopCapture]; + } +#endif + + VideoProcessingAdapter* videoProcessingAdapter = + [[VideoProcessingAdapter alloc] initWithRTCVideoSource:videoSource]; + self.videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoProcessingAdapter]; + + AVCaptureDeviceFormat* selectedFormat = [self selectFormatForDevice:videoDevice + targetWidth:targetWidth + targetHeight:targetHeight]; + + CMVideoDimensions selectedDimension = + CMVideoFormatDescriptionGetDimensions(selectedFormat.formatDescription); + NSInteger selectedWidth = (NSInteger)selectedDimension.width; + NSInteger selectedHeight = (NSInteger)selectedDimension.height; + NSInteger selectedFps = [self selectFpsForFormat:selectedFormat targetFps:targetFps]; + + self._lastTargetFps = selectedFps; + self._lastTargetWidth = targetWidth; + self._lastTargetHeight = targetHeight; + + NSLog(@"target format %ldx%ld, targetFps: %ld, selected format: %ldx%ld, selected fps %ld", + targetWidth, targetHeight, targetFps, selectedWidth, selectedHeight, selectedFps); + + if ([videoDevice lockForConfiguration:NULL]) { + @try { + videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)selectedFps); + } @catch (NSException* exception) { + NSLog(@"Failed to set active frame rate!\n User info:%@", exception.userInfo); + } + [videoDevice unlockForConfiguration]; + } + + [self.videoCapturer startCaptureWithDevice:videoDevice + format:selectedFormat + fps:selectedFps + completionHandler:^(NSError* error) { + if (error) { + NSLog(@"Start capture error: %@", [error localizedDescription]); + } + }]; + + NSString* trackUUID = [[NSUUID UUID] UUIDString]; + RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource + trackId:trackUUID]; + LocalVideoTrack* localVideoTrack = + [[LocalVideoTrack alloc] initWithTrack:videoTrack videoProcessing:videoProcessingAdapter]; + + __weak RTCCameraVideoCapturer* capturer = self.videoCapturer; + self.videoCapturerStopHandlers[videoTrack.trackId] = ^(CompletionHandler handler) { + NSLog(@"Stop video capturer, trackID %@", videoTrack.trackId); + [capturer stopCaptureWithCompletionHandler:handler]; + }; + + if (!videoDeviceId) { + videoDeviceId = videoDevice.uniqueID; + } + + if (!facingMode) { + facingMode = videoDevice.position == AVCaptureDevicePositionBack ? @"environment" + : videoDevice.position == AVCaptureDevicePositionFront ? @"user" + : @"unspecified"; + } + + videoTrack.settings = @{ + @"deviceId" : videoDeviceId, + @"kind" : @"videoinput", + @"width" : [NSNumber numberWithInteger:selectedWidth], + @"height" : [NSNumber numberWithInteger:selectedHeight], + @"frameRate" : [NSNumber numberWithInteger:selectedFps], + @"facingMode" : facingMode, + }; + + [mediaStream addVideoTrack:videoTrack]; + + [self.localTracks setObject:localVideoTrack forKey:trackUUID]; + + successCallback(mediaStream); + } else { + // According to step 6.2.3 of the getUserMedia() algorithm, if there is no + // source, fail with a new OverconstrainedError. + errorCallback(@"OverconstrainedError", /* errorMessage */ nil); + } +} + +- (void)mediaStreamRelease:(RTCMediaStream*)stream { + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [self.localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:stream.streamId]; + } +} + +/** + * Obtains local media content of a specific type. Requests access for the + * specified {@code mediaType} if necessary. In other words, implements a media + * type-specific iteration of the {@code getUserMedia()} algorithm. + * + * @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo} + * which specifies the type of the local media content to obtain. + * @param constraints The {@code MediaStreamConstraints} which are to be + * satisfied by the obtained local media content. + * @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which + * success is to be reported. + * @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which + * failure is to be reported. + * @param mediaStream The {@link RTCMediaStream} which is to collect the + * obtained local media content of the specified {@code mediaType}. + */ +- (void)requestAccessForMediaType:(NSString*)mediaType + constraints:(NSDictionary*)constraints + successCallback:(NavigatorUserMediaSuccessCallback)successCallback + errorCallback:(NavigatorUserMediaErrorCallback)errorCallback + mediaStream:(RTCMediaStream*)mediaStream { + // According to step 6.2.1 of the getUserMedia() algorithm, if there is no + // source, fail "with a new DOMException object whose name attribute has the + // value NotFoundError." + // XXX The following approach does not work for audio in Simulator. That is + // because audio capture is done using AVAudioSession which does not use + // AVCaptureDevice there. Anyway, Simulator will not (visually) request access + // for audio. + if (mediaType == AVMediaTypeVideo && [self captureDevices].count == 0) { + // Since successCallback and errorCallback are asynchronously invoked + // elsewhere, make sure that the invocation here is consistent. + dispatch_async(dispatch_get_main_queue(), ^{ + errorCallback(@"DOMException", @"NotFoundError"); + }); + return; + } + +#if TARGET_OS_OSX + if (@available(macOS 10.14, *)) { +#endif + [AVCaptureDevice requestAccessForMediaType:mediaType + completionHandler:^(BOOL granted) { + dispatch_async(dispatch_get_main_queue(), ^{ + if (granted) { + NavigatorUserMediaSuccessCallback scb = + ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } else { + // According to step 10 Permission Failure of the getUserMedia() + // algorithm, if the user has denied permission, fail "with a new + // DOMException object whose name attribute has the value + // NotAllowedError." + errorCallback(@"DOMException", @"NotAllowedError"); + } + }); + }]; +#if TARGET_OS_OSX + } else { + // Fallback on earlier versions + NavigatorUserMediaSuccessCallback scb = ^(RTCMediaStream* mediaStream) { + [self getUserMedia:constraints + successCallback:successCallback + errorCallback:errorCallback + mediaStream:mediaStream]; + }; + if (mediaType == AVMediaTypeAudio) { + [self getUserAudio:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } else if (mediaType == AVMediaTypeVideo) { + [self getUserVideo:constraints + successCallback:scb + errorCallback:errorCallback + mediaStream:mediaStream]; + } + } +#endif +} + +- (void)createLocalMediaStream:(FlutterResult)result { + NSString* mediaStreamId = [[NSUUID UUID] UUIDString]; + RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId]; + + self.localStreams[mediaStreamId] = mediaStream; + result(@{@"streamId" : [mediaStream streamId]}); +} + +- (void)getSources:(FlutterResult)result { + NSMutableArray* sources = [NSMutableArray array]; + NSArray* videoDevices = [self captureDevices]; + for (AVCaptureDevice* device in videoDevices) { + [sources addObject:@{ + @"facing" : device.positionString, + @"deviceId" : device.uniqueID, + @"label" : device.localizedName, + @"kind" : @"videoinput", + }]; + } +#if TARGET_OS_IPHONE + + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + // NSLog(@"input portName: %@, type %@", port.portName,port.portType); + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audioinput", + }]; + } + + for (AVAudioSessionPortDescription* port in session.currentRoute.outputs) { + // NSLog(@"output portName: %@, type %@", port.portName,port.portType); + if (session.currentRoute.outputs.count == 1 && ![port.UID isEqualToString:@"Speaker"]) { + [sources addObject:@{ + @"deviceId" : @"Speaker", + @"label" : @"Speaker", + @"groupId" : @"Speaker", + @"kind" : @"audiooutput", + }]; + } + [sources addObject:@{ + @"deviceId" : port.UID, + @"label" : port.portName, + @"groupId" : port.portType, + @"kind" : @"audiooutput", + }]; + } +#endif +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audioinput", + }]; + } + + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + [sources addObject:@{ + @"deviceId" : device.deviceId, + @"label" : device.name, + @"kind" : @"audiooutput", + }]; + } +#endif + result(@{@"sources" : sources}); +} + +- (void)selectAudioInput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* inputDevices = [audioDeviceModule inputDevices]; + for (RTCIODevice* device in inputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setInputDevice:device]; + if (result) + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + for (AVAudioSessionPortDescription* port in session.session.availableInputs) { + if ([port.UID isEqualToString:deviceId]) { + if (self.preferredInput != port.portType) { + self.preferredInput = port.portType; + [AudioUtils selectAudioInput:self.preferredInput]; + } + break; + } + } + if (result) + result(nil); +#endif + if (result) + result([FlutterError errorWithCode:@"selectAudioInputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)selectAudioOutput:(NSString*)deviceId result:(FlutterResult)result { +#if TARGET_OS_OSX + RTCAudioDeviceModule* audioDeviceModule = [self.peerConnectionFactory audioDeviceModule]; + NSArray* outputDevices = [audioDeviceModule outputDevices]; + for (RTCIODevice* device in outputDevices) { + if ([deviceId isEqualToString:device.deviceId]) { + [audioDeviceModule setOutputDevice:device]; + result(nil); + return; + } + } +#endif +#if TARGET_OS_IPHONE + RTCAudioSession* session = [RTCAudioSession sharedInstance]; + NSError* setCategoryError = nil; + + if ([deviceId isEqualToString:@"Speaker"]) { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker + error:&setCategoryError]; + } else { + [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None + error:&setCategoryError]; + } + + if (setCategoryError == nil) { + result(nil); + return; + } + + result([FlutterError + errorWithCode:@"selectAudioOutputFailed" + message:[NSString + stringWithFormat:@"Error: %@", [setCategoryError localizedFailureReason]] + details:nil]); + +#endif + result([FlutterError errorWithCode:@"selectAudioOutputFailed" + message:[NSString stringWithFormat:@"Error: deviceId not found!"] + details:nil]); +} + +- (void)triggeriOSAudioRouteSelectionUI:(FlutterResult)result { +#if TARGET_OS_IPHONE + if (@available(iOS 11.0, *)) { + AVRoutePickerView* routePicker = [[AVRoutePickerView alloc] init]; + routePicker.frame = CGRectMake(0, 0, 44, 44); + + // Add the route picker to a temporary window to ensure it's in the view hierarchy + UIWindow* window = [[UIApplication sharedApplication] keyWindow]; + if (!window) { + // Fallback for iOS 13+ where keyWindow is deprecated + for (UIWindowScene* windowScene in [UIApplication sharedApplication].connectedScenes) { + if (windowScene.activationState == UISceneActivationStateForegroundActive) { + window = windowScene.windows.firstObject; + break; + } + } + } + + if (window) { + [window addSubview:routePicker]; + + // Trigger the route picker programmatically + for (UIView* view in routePicker.subviews) { + if ([view isKindOfClass:[UIButton class]]) { + UIButton* button = (UIButton*)view; + [button sendActionsForControlEvents:UIControlEventTouchUpInside]; + break; // Only trigger the first button found + } + } + + // Remove the route picker after a short delay + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), + dispatch_get_main_queue(), ^{ + [routePicker removeFromSuperview]; + }); + + result(nil); + } else { + result([FlutterError errorWithCode:@"NoWindowError" + message:@"Could not find a window to present the route picker" + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"UnsupportedVersionError" + message:@"AVRoutePickerView is only available on iOS 11.0 or later" + details:nil]); + } +#else + // macOS doesn't support iOS audio route selection UI + result([FlutterError errorWithCode:@"UnsupportedPlatformError" + message:@"triggeriOSAudioRouteSelectionUI is only supported on iOS" + details:nil]); +#endif +} + +- (void)mediaStreamTrackRelease:(RTCMediaStream*)mediaStream track:(RTCMediaStreamTrack*)track { + // what's different to mediaStreamTrackStop? only call mediaStream explicitly? + if (mediaStream && track) { + track.isEnabled = NO; + // FIXME this is called when track is removed from the MediaStream, + // but it doesn't mean it can not be added back using MediaStream.addTrack + // TODO: [self.localTracks removeObjectForKey:trackID]; + if ([track.kind isEqualToString:@"audio"]) { + [mediaStream removeAudioTrack:(RTCAudioTrack*)track]; + } else if ([track.kind isEqualToString:@"video"]) { + [mediaStream removeVideoTrack:(RTCVideoTrack*)track]; + } + } +} + +- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result { + if (!self.videoCapturer) { + result(@NO); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + result(@NO); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + result(@([device isTorchModeSupported:AVCaptureTorchModeOn])); +} + +- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(FlutterResult)result { + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set torch"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set torch"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + if (![device isTorchModeSupported:AVCaptureTorchModeOn]) { + NSLog(@"Current capture device does not support torch. Can't set torch"); + return; + } + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to aquire configuration lock. %@", error.localizedDescription); + return; + } + + device.torchMode = torch ? AVCaptureTorchModeOn : AVCaptureTorchModeOff; + [device unlockForConfiguration]; + + result(nil); +} + +- (void)mediaStreamTrackSetZoom:(RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(FlutterResult)result { +#if TARGET_OS_OSX + NSLog(@"Not supported on macOS. Can't set zoom"); + return; +#endif +#if TARGET_OS_IPHONE + if (!self.videoCapturer) { + NSLog(@"Video capturer is null. Can't set zoom"); + return; + } + if (self.videoCapturer.captureSession.inputs.count == 0) { + NSLog(@"Video capturer is missing an input. Can't set zoom"); + return; + } + + AVCaptureDeviceInput* deviceInput = [self.videoCapturer.captureSession.inputs objectAtIndex:0]; + AVCaptureDevice* device = deviceInput.device; + + NSError* error; + if ([device lockForConfiguration:&error] == NO) { + NSLog(@"Failed to acquire configuration lock. %@", error.localizedDescription); + return; + } + + CGFloat desiredZoomFactor = (CGFloat)zoomLevel; + device.videoZoomFactor = MAX(1.0, MIN(desiredZoomFactor, device.activeFormat.videoMaxZoomFactor)); + [device unlockForConfiguration]; + + result(nil); +#endif +} + +- (void)mediaStreamTrackCaptureFrame:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result { + self.frameCapturer = [[FlutterRTCFrameCapturer alloc] initWithTrack:track + toPath:path + result:result]; +} + +- (void)mediaStreamTrackStop:(RTCMediaStreamTrack*)track { + if (track) { + track.isEnabled = NO; + [self.localTracks removeObjectForKey:track.trackId]; + } +} + +- (AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position { + if (position == AVCaptureDevicePositionUnspecified) { + return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + } + NSArray* captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice* device in captureDevices) { + if (device.position == position) { + return device; + } + } + if (captureDevices.count > 0) { + return captureDevices[0]; + } + return nil; +} + +- (AVCaptureDeviceFormat*)selectFormatForDevice:(AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight { + NSArray* formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat* selectedFormat = nil; + long currentDiff = INT_MAX; + for (AVCaptureDeviceFormat* format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); +#if TARGET_OS_IPHONE + if (@available(iOS 13.0, *)) { + if (format.isMultiCamSupported != AVCaptureMultiCamSession.multiCamSupported) { + continue; + } + } +#endif + // NSLog(@"AVCaptureDeviceFormats,fps %d, dimension: %dx%d", + // format.videoSupportedFrameRateRanges, dimension.width, dimension.height); + long diff = labs(targetWidth - dimension.width) + labs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && + pixelFormat == [self.videoCapturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + return selectedFormat; +} + +- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps { + Float64 maxSupportedFramerate = 0; + for (AVFrameRateRange* fpsRange in format.videoSupportedFrameRateRanges) { + maxSupportedFramerate = fmax(maxSupportedFramerate, fpsRange.maxFrameRate); + } + return fmin(maxSupportedFramerate, targetFps); +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCPeerConnection.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCPeerConnection.m new file mode 100644 index 0000000000..9934c95d68 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCPeerConnection.m @@ -0,0 +1,856 @@ +#import +#import "./include/stream_webrtc_flutter/FlutterRTCDataChannel.h" +#import "./include/stream_webrtc_flutter/FlutterRTCPeerConnection.h" +#import "./include/stream_webrtc_flutter/FlutterWebRTCPlugin.h" +#import "./include/stream_webrtc_flutter/AudioUtils.h" + +#import + +@implementation RTCPeerConnection (Flutter) + +@dynamic eventSink; + +- (NSString*)flutterId { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setFlutterId:(NSString*)flutterId { + objc_setAssociatedObject(self, @selector(flutterId), flutterId, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventSink)eventSink { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventSink:(FlutterEventSink)eventSink { + objc_setAssociatedObject(self, @selector(eventSink), eventSink, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (FlutterEventChannel*)eventChannel { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setEventChannel:(FlutterEventChannel*)eventChannel { + objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)dataChannels { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setDataChannels:(NSMutableDictionary*)dataChannels { + objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteStreams { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteStreams:(NSMutableDictionary*)remoteStreams { + objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +- (NSMutableDictionary*)remoteTracks { + return objc_getAssociatedObject(self, _cmd); +} + +- (void)setRemoteTracks:(NSMutableDictionary*)remoteTracks { + objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, + OBJC_ASSOCIATION_RETAIN_NONATOMIC); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + self.eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + self.eventSink = sink; + return nil; +} + +@end + +@implementation FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionSetConfiguration:(RTCConfiguration*)configuration + peerConnection:(RTCPeerConnection*)peerConnection { + [peerConnection setConfiguration:configuration]; +} + +- (void)peerConnectionCreateOffer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + offerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateOfferFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionCreateAnswer:(NSDictionary*)constraints + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + answerForConstraints:[self parseMediaConstraints:constraints] + completionHandler:^(RTCSessionDescription* sdp, NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"CreateAnswerFailed" + message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]] + details:nil]); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + }]; +} + +- (void)peerConnectionSetLocalDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setLocalDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetLocalDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionSetRemoteDescription:(RTCSessionDescription*)sdp + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + setRemoteDescription:sdp + completionHandler:^(NSError* error) { + if (error) { + result([FlutterError + errorWithCode:@"SetRemoteDescriptionFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionAddICECandidate:(RTCIceCandidate*)candidate + peerConnection:(RTCPeerConnection*)peerConnection + result:(FlutterResult)result { + [peerConnection + addIceCandidate:candidate + completionHandler:^(NSError* _Nullable error) { + if (error) { + result([FlutterError + errorWithCode:@"AddIceCandidateFailed" + message:[NSString stringWithFormat:@"Error %@", error.localizedDescription] + details:nil]); + } else { + result(nil); + } + }]; +} + +- (void)peerConnectionClose:(RTCPeerConnection*)peerConnection { + [peerConnection close]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; +} + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + RTCRtpSender* sender = nil; + RTCRtpReceiver* receiver = nil; + + for (RTCRtpSender* s in peerConnection.senders) { + if (s.track != nil && [s.track.trackId isEqualToString:trackID]) { + sender = s; + } + } + + for (RTCRtpReceiver* r in peerConnection.receivers) { + if (r.track != nil && [r.track.trackId isEqualToString:trackID]) { + receiver = r; + } + } + + if (sender != nil) { + [peerConnection statisticsForSender:sender + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else if (receiver != nil) { + [peerConnection statisticsForReceiver:receiver + completionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; + } else { + result([FlutterError errorWithCode:@"GetStatsFailed" + message:[NSString stringWithFormat:@"Error %@", @""] + details:nil]); + } +} + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result { + [peerConnection statisticsWithCompletionHandler:^(RTCStatisticsReport* statsReport) { + NSMutableArray* stats = [NSMutableArray array]; + for (id key in statsReport.statistics) { + RTCStatistics* report = [statsReport.statistics objectForKey:key]; + [stats addObject:@{ + @"id" : report.id, + @"type" : report.type, + @"timestamp" : @(report.timestamp_us), + @"values" : report.values + }]; + } + result(@{@"stats" : stats}); + }]; +} + +- (NSString*)stringForICEConnectionState:(RTCIceConnectionState)state { + switch (state) { + case RTCIceConnectionStateNew: + return @"new"; + case RTCIceConnectionStateChecking: + return @"checking"; + case RTCIceConnectionStateConnected: + return @"connected"; + case RTCIceConnectionStateCompleted: + return @"completed"; + case RTCIceConnectionStateFailed: + return @"failed"; + case RTCIceConnectionStateDisconnected: + return @"disconnected"; + case RTCIceConnectionStateClosed: + return @"closed"; + case RTCIceConnectionStateCount: + return @"count"; + } + return nil; +} + +- (NSString*)stringForICEGatheringState:(RTCIceGatheringState)state { + switch (state) { + case RTCIceGatheringStateNew: + return @"new"; + case RTCIceGatheringStateGathering: + return @"gathering"; + case RTCIceGatheringStateComplete: + return @"complete"; + } + return nil; +} + +- (NSString*)stringForSignalingState:(RTCSignalingState)state { + switch (state) { + case RTCSignalingStateStable: + return @"stable"; + case RTCSignalingStateHaveLocalOffer: + return @"have-local-offer"; + case RTCSignalingStateHaveLocalPrAnswer: + return @"have-local-pranswer"; + case RTCSignalingStateHaveRemoteOffer: + return @"have-remote-offer"; + case RTCSignalingStateHaveRemotePrAnswer: + return @"have-remote-pranswer"; + case RTCSignalingStateClosed: + return @"closed"; + } + return nil; +} + +- (NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state { + switch (state) { + case RTCPeerConnectionStateNew: + return @"new"; + case RTCPeerConnectionStateConnecting: + return @"connecting"; + case RTCPeerConnectionStateConnected: + return @"connected"; + case RTCPeerConnectionStateDisconnected: + return @"disconnected"; + case RTCPeerConnectionStateFailed: + return @"failed"; + case RTCPeerConnectionStateClosed: + return @"closed"; + } + return nil; +} + +/** + * Parses the constraint keys and values of a specific JavaScript object into + * a specific NSMutableDictionary in a format suitable for the + * initialization of a RTCMediaConstraints instance. + * + * @param src The JavaScript object which defines constraint keys and values and + * which is to be parsed into the specified dst. + * @param dst The NSMutableDictionary into which the constraint keys + * and values defined by src are to be written in a format suitable for + * the initialization of a RTCMediaConstraints instance. + */ +- (void)parseJavaScriptConstraints:(NSDictionary*)src + intoWebRTCConstraints:(NSMutableDictionary*)dst { + for (id srcKey in src) { + id srcValue = src[srcKey]; + NSString* dstValue; + + if ([srcValue isKindOfClass:[NSNumber class]]) { + dstValue = [srcValue boolValue] ? @"true" : @"false"; + } else { + dstValue = [srcValue description]; + } + dst[[srcKey description]] = dstValue; + } +} + +/** + * Parses a JavaScript object into a new RTCMediaConstraints instance. + * + * @param constraints The JavaScript object to parse into a new + * RTCMediaConstraints instance. + * @returns A new RTCMediaConstraints instance initialized with the + * mandatory and optional constraint keys and values specified by + * constraints. + */ +- (RTCMediaConstraints*)parseMediaConstraints:(NSDictionary*)constraints { + id mandatory = constraints[@"mandatory"]; + NSMutableDictionary* mandatory_ = [NSMutableDictionary new]; + + if ([mandatory isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)mandatory intoWebRTCConstraints:mandatory_]; + } + + id optional = constraints[@"optional"]; + NSMutableDictionary* optional_ = [NSMutableDictionary new]; + + if ([optional isKindOfClass:[NSArray class]]) { + for (id o in (NSArray*)optional) { + if ([o isKindOfClass:[NSDictionary class]]) { + [self parseJavaScriptConstraints:(NSDictionary*)o intoWebRTCConstraints:optional_]; + } + } + } + + return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_ + optionalConstraints:optional_]; +} + +#pragma mark - RTCPeerConnectionDelegate methods +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeSignalingState:(RTCSignalingState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, + @{@"event" : @"signalingState", @"state" : [self stringForSignalingState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didAddTrack:(RTCVideoTrack*)track { + peerConnection.remoteTracks[track.trackId] = track; + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + mediaStream:(RTCMediaStream*)stream + didRemoveTrack:(RTCVideoTrack*)track { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + NSString* streamId = stream.streamId; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveTrack", + @"streamId" : streamId, + @"trackId" : track.trackId, + @"track" : @{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didAddStream:(RTCMediaStream*)stream { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + BOOL hasAudio = NO; + for (RTCAudioTrack* track in stream.audioTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [audioTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + hasAudio = YES; + } + + for (RTCVideoTrack* track in stream.videoTracks) { + peerConnection.remoteTracks[track.trackId] = track; + [videoTracks addObject:@{ + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"enabled" : @(track.isEnabled), + @"remote" : @(YES), + @"readyState" : @"live" + }]; + } + + NSString* streamId = stream.streamId; + peerConnection.remoteStreams[streamId] = stream; + + if (hasAudio) { + [self ensureAudioSession]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onAddStream", + @"streamId" : streamId, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection didRemoveStream:(RTCMediaStream*)stream { + NSArray* keysArray = [peerConnection.remoteStreams allKeysForObject:stream]; + // We assume there can be only one object for 1 key + if (keysArray.count > 1) { + NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", + stream.streamId); + } + NSString* streamId = stream.streamId; + + for (RTCVideoTrack* track in stream.videoTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + for (RTCAudioTrack* track in stream.audioTracks) { + [peerConnection.remoteTracks removeObjectForKey:track.trackId]; + } + + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRemoveStream", + @"streamId" : streamId, + }); + } +} + +- (void)peerConnectionShouldNegotiate:(RTCPeerConnection*)peerConnection { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onRenegotiationNeeded", + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceConnectionState:(RTCIceConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"iceConnectionState", + @"state" : [self stringForICEConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeIceGatheringState:(RTCIceGatheringState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent( + eventSink, + @{@"event" : @"iceGatheringState", @"state" : [self stringForICEGatheringState:newState]}); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didGenerateIceCandidate:(RTCIceCandidate*)candidate { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onCandidate", + @"candidate" : @{ + @"candidate" : candidate.sdp, + @"sdpMLineIndex" : @(candidate.sdpMLineIndex), + @"sdpMid" : candidate.sdpMid + } + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didOpenDataChannel:(RTCDataChannel*)dataChannel { + if (-1 == dataChannel.channelId) { + return; + } + + NSString* flutterChannelId = [[NSUUID UUID] UUIDString]; + NSNumber* dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId]; + dataChannel.peerConnectionId = peerConnection.flutterId; + dataChannel.delegate = self; + peerConnection.dataChannels[flutterChannelId] = dataChannel; + + FlutterEventChannel* eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", + peerConnection.flutterId, flutterChannelId] + binaryMessenger:self.messenger]; + + dataChannel.eventChannel = eventChannel; + dataChannel.flutterChannelId = flutterChannelId; + dataChannel.eventQueue = nil; + + dispatch_async(dispatch_get_main_queue(), ^{ + // setStreamHandler on main thread + [eventChannel setStreamHandler:dataChannel]; + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"didOpenDataChannel", + @"id" : dataChannelId, + @"label" : dataChannel.label, + @"flutterId" : flutterChannelId + }); + } + }); +} + +/** Called any time the PeerConnectionState changes. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeConnectionState:(RTCPeerConnectionState)newState { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"peerConnectionState", + @"state" : [self stringForPeerConnectionState:newState] + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didStartReceivingOnTransceiver:(RTCRtpTransceiver*)transceiver { +} + +/** Called when a receiver and its track are created. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didAddReceiver:(RTCRtpReceiver*)rtpReceiver + streams:(NSArray*)mediaStreams { + // For unified-plan + NSMutableArray* streams = [NSMutableArray array]; + for (RTCMediaStream* stream in mediaStreams) { + [streams addObject:[self mediaStreamToMap:stream ownerTag:peerConnection.flutterId]]; + } + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + NSMutableDictionary* event = [NSMutableDictionary dictionary]; + [event addEntriesFromDictionary:@{ + @"event" : @"onTrack", + @"track" : [self mediaTrackToMap:rtpReceiver.track], + @"receiver" : [self receiverToMap:rtpReceiver], + @"streams" : streams, + }]; + + if (peerConnection.configuration.sdpSemantics == RTCSdpSemanticsUnifiedPlan) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver != nil && + [transceiver.receiver.receiverId isEqualToString:rtpReceiver.receiverId]) { + [event setValue:[self transceiverToMap:transceiver] forKey:@"transceiver"]; + } + } + } + + peerConnection.remoteTracks[rtpReceiver.track.trackId] = rtpReceiver.track; + if (mediaStreams.count > 0) { + peerConnection.remoteStreams[mediaStreams[0].streamId] = mediaStreams[0]; + } + + if ([rtpReceiver.track.kind isEqualToString:@"audio"]) { + [self ensureAudioSession]; + } + postEvent(eventSink, event); + } +} + +/** Called when the receiver and its track are removed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveReceiver:(RTCRtpReceiver*)rtpReceiver { +} + +/** Called when the selected ICE candidate pair is changed. */ +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didChangeLocalCandidate:(RTCIceCandidate*)local + remoteCandidate:(RTCIceCandidate*)remote + lastReceivedMs:(int)lastDataReceivedMs + changeReason:(NSString*)reason { + FlutterEventSink eventSink = peerConnection.eventSink; + if (eventSink) { + postEvent(eventSink, @{ + @"event" : @"onSelectedCandidatePairChanged", + @"local" : @{ + @"candidate" : local.sdp, + @"sdpMLineIndex" : @(local.sdpMLineIndex), + @"sdpMid" : local.sdpMid + }, + @"remote" : @{ + @"candidate" : remote.sdp, + @"sdpMLineIndex" : @(remote.sdpMLineIndex), + @"sdpMid" : remote.sdpMid + }, + @"reason" : reason, + @"lastDataReceivedMs" : @(lastDataReceivedMs) + }); + } +} + +- (void)peerConnection:(RTCPeerConnection*)peerConnection + didRemoveIceCandidates:(NSArray*)candidates { +} + +NSString* mediaTypeFromString(NSString* kind) { + NSString* mediaType = kRTCMediaStreamTrackKindAudio; + if ([kind isEqualToString:@"audio"]) { + mediaType = kRTCMediaStreamTrackKindAudio; + } else if ([kind isEqualToString:@"video"]) { + mediaType = kRTCMediaStreamTrackKindVideo; + } + return mediaType; +} + +NSString* parametersToString(NSDictionary* parameters) { + NSMutableArray* kvs = [NSMutableArray array]; + for (NSString* key in parameters) { + if (key.length > 0) { + [kvs addObject:[NSString stringWithFormat:@"%@=%@", key, parameters[key]]]; + } else { + [kvs addObject:parameters[key]]; + } + } + return [kvs componentsJoinedByString:@";"]; +} + +NSDictionary* stringToParameters(NSString* str) { + NSMutableDictionary* parameters = [NSMutableDictionary dictionary]; + NSArray* kvs = [str componentsSeparatedByString:@";"]; + for (NSString* kv in kvs) { + NSArray* kvArr = [kv componentsSeparatedByString:@"="]; + if (kvArr.count == 2) { + parameters[kvArr[0]] = kvArr[1]; + } else if (kvArr.count == 1) { + parameters[@""] = kvArr[0]; + } + } + return parameters; +} + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpReceiverCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* kind = argsMap[@"kind"]; + RTCRtpCapabilities* caps = + [self.peerConnectionFactory rtpSenderCapabilitiesForKind:mediaTypeFromString(kind)]; + NSMutableArray* codecsMap = [NSMutableArray array]; + for (RTCRtpCodecCapability* c in caps.codecs) { + if ([kind isEqualToString:@"audio"]) { + [codecsMap addObject:@{ + @"channels" : c.numChannels, + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } else if ([kind isEqualToString:@"video"]) { + [codecsMap addObject:@{ + @"clockRate" : c.clockRate, + @"mimeType" : c.mimeType, + @"sdpFmtpLine" : parametersToString(c.parameters), + }]; + } + } + result(@{ + @"codecs" : codecsMap, + @"headerExtensions" : @[], + @"fecMechanisms" : @[], + }); +} + +- (RTC_OBJC_TYPE(RTCRtpCodecCapability) *)findCodecCapability:(NSString*)kind + codec:(NSString*)codec + parameters:(NSDictionary*) + parameters { + RTCRtpCapabilities* caps = [self.peerConnectionFactory + rtpSenderCapabilitiesForKind:[kind isEqualToString:@"video"] ? kRTCMediaStreamTrackKindVideo + : kRTCMediaStreamTrackKindAudio]; + for (RTCRtpCodecCapability* capCodec in caps.codecs) { + if ([capCodec.name isEqualToString:codec] && [capCodec.kind isEqualToString:kind]) { + BOOL matched = YES; + for (NSString* key in capCodec.parameters) { + NSString* value = [capCodec.parameters objectForKey:key]; + NSString* value2 = [parameters objectForKey:key]; + if (![value isEqualToString:value2]) { + matched = NO; + } + } + if (matched) { + return capCodec; + } + } + } + return nil; +} + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result { + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError errorWithCode:@"transceiverSetCodecPreferencesFailed" + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + id codecs = argsMap[@"codecs"]; + NSMutableArray* codecCaps = [NSMutableArray array]; + for (id c in codecs) { + NSArray* kindAndName = [c[@"mimeType"] componentsSeparatedByString:@"/"]; + NSString* kind = [kindAndName[0] lowercaseString]; + NSString* name = kindAndName[1]; + NSLog(@"codec %@/%@", kind, name); + NSDictionary* parameters = nil; + if (c[@"sdpFmtpLine"] != nil && ![((NSString*)c[@"sdpFmtpLine"]) isEqualToString:@""]) { + parameters = stringToParameters((NSString*)c[@"sdpFmtpLine"]); + } + RTCRtpCodecCapability* codec = [self findCodecCapability:kind codec:name parameters:parameters]; + if (codec != nil) { + [codecCaps addObject:codec]; + } + } + [transcevier setCodecPreferences:codecCaps]; + result(nil); +} + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformView.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformView.m similarity index 93% rename from ios/Classes/FlutterRTCVideoPlatformView.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformView.m index 2f44ea47d2..b8c41b6d44 100644 --- a/ios/Classes/FlutterRTCVideoPlatformView.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformView.m @@ -1,4 +1,4 @@ -#import "FlutterRTCVideoPlatformView.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformView.h" @implementation FlutterRTCVideoPlatformView { CGSize _videoSize; @@ -27,11 +27,10 @@ - (void)layoutSubviews { } - (void)setSize:(CGSize)size { - _remoteVideoSize = size; + _remoteVideoSize = size; } - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - CVPixelBufferRef pixelBuffer = nil; if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { pixelBuffer = ((RTCCVPixelBuffer*)frame.buffer).pixelBuffer; @@ -49,13 +48,13 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { CMSampleBufferRef sampleBuffer = [self sampleBufferFromPixelBuffer:pixelBuffer]; if (sampleBuffer) { - if (@available(iOS 14.0, *)) { - if([_videoLayer requiresFlushToResumeDecoding]) { - [_videoLayer flushAndRemoveImage]; - } - } else { - // Fallback on earlier versions + if (@available(iOS 14.0, *)) { + if ([_videoLayer requiresFlushToResumeDecoding]) { + [_videoLayer flushAndRemoveImage]; } + } else { + // Fallback on earlier versions + } [_videoLayer enqueueSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); } diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.m similarity index 92% rename from ios/Classes/FlutterRTCVideoPlatformViewController.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.m index 1f227ee1d5..9778ac30a9 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewController.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.m @@ -1,6 +1,6 @@ -#import "FlutterRTCVideoPlatformViewController.h" -#import "FlutterRTCVideoPlatformView.h" -#import "FlutterWebRTCPlugin.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformView.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h" +#import "./include/stream_webrtc_flutter/FlutterWebRTCPlugin.h" @implementation FlutterRTCVideoPlatformViewController { FlutterRTCVideoPlatformView* _videoView; @@ -87,7 +87,7 @@ - (void)renderFrame:(RTCVideoFrame*)frame { } self->_isFirstFrameRendered = true; } - + [_videoView renderFrame:frame]; } @@ -97,7 +97,7 @@ - (void)renderFrame:(RTCVideoFrame*)frame { * @param size The size of the video frame to render. */ - (void)setSize:(CGSize)size { - [_videoView setSize:size]; + [_videoView setSize:size]; } #pragma mark - FlutterStreamHandler methods diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.m similarity index 85% rename from ios/Classes/FlutterRTCVideoPlatformViewFactory.m rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.m index c70bd68ab4..1f5415edc2 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewFactory.m +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.m @@ -1,5 +1,5 @@ -#import "FlutterRTCVideoPlatformViewFactory.h" -#import "FlutterRTCVideoPlatformViewController.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.h" @implementation FLutterRTCVideoPlatformViewFactory { } diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoRenderer.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoRenderer.m new file mode 100644 index 0000000000..8c28a33a3b --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterRTCVideoRenderer.m @@ -0,0 +1,296 @@ +#import "./include/stream_webrtc_flutter/FlutterRTCVideoRenderer.h" + +#import +#import +#import +#import +#import + +#import + +#import +#import "./include/stream_webrtc_flutter/FlutterWebRTCPlugin.h" + +@implementation FlutterRTCVideoRenderer { + CGSize _frameSize; + CGSize _renderSize; + CVPixelBufferRef _pixelBufferRef; + RTCVideoRotation _rotation; + FlutterEventChannel* _eventChannel; + bool _isFirstFrameRendered; + bool _frameAvailable; + os_unfair_lock _lock; +} + +@synthesize textureId = _textureId; +@synthesize registry = _registry; +@synthesize eventSink = _eventSink; +@synthesize videoTrack = _videoTrack; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _isFirstFrameRendered = false; + _frameAvailable = false; + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + _registry = registry; + _pixelBufferRef = nil; + _eventSink = nil; + _rotation = -1; + _textureId = [registry registerTexture:self]; + /*Create Event Channel.*/ + _eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId] + binaryMessenger:messenger]; + [_eventChannel setStreamHandler:self]; + } + return self; +} + +- (CVPixelBufferRef)copyPixelBuffer { + CVPixelBufferRef buffer = nil; + os_unfair_lock_lock(&_lock); + if (_pixelBufferRef != nil && _frameAvailable) { + buffer = CVBufferRetain(_pixelBufferRef); + _frameAvailable = false; + } + os_unfair_lock_unlock(&_lock); + return buffer; +} + +- (void)dispose { + os_unfair_lock_lock(&_lock); + [_registry unregisterTexture:_textureId]; + _textureId = -1; + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + _pixelBufferRef = nil; + } + _frameAvailable = false; + os_unfair_lock_unlock(&_lock); +} + +- (void)setVideoTrack:(RTCVideoTrack*)videoTrack { + RTCVideoTrack* oldValue = self.videoTrack; + if (oldValue != videoTrack) { + os_unfair_lock_lock(&_lock); + _videoTrack = videoTrack; + os_unfair_lock_unlock(&_lock); + _isFirstFrameRendered = false; + if (oldValue) { + [oldValue removeRenderer:self]; + } + _frameSize = CGSizeZero; + _renderSize = CGSizeZero; + _rotation = -1; + if (videoTrack) { + [videoTrack addRenderer:self]; + } + } +} + +- (id)correctRotation:(const id)src + withRotation:(RTCVideoRotation)rotation { + int rotated_width = src.width; + int rotated_height = src.height; + + if (rotation == RTCVideoRotation_90 || rotation == RTCVideoRotation_270) { + int temp = rotated_width; + rotated_width = rotated_height; + rotated_height = temp; + } + + id buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width + height:rotated_height]; + + [RTCYUVHelper I420Rotate:src.dataY + srcStrideY:src.strideY + srcU:src.dataU + srcStrideU:src.strideU + srcV:src.dataV + srcStrideV:src.strideV + dstY:(uint8_t*)buffer.dataY + dstStrideY:buffer.strideY + dstU:(uint8_t*)buffer.dataU + dstStrideU:buffer.strideU + dstV:(uint8_t*)buffer.dataV + dstStrideV:buffer.strideV + width:src.width + height:src.height + mode:rotation]; + + return buffer; +} + +- (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer + withFrame:(RTCVideoFrame*)frame { + id i420Buffer = [self correctRotation:[frame.buffer toI420] + withRotation:frame.rotation]; + CVPixelBufferLockBaseAddress(outputPixelBuffer, 0); + + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer); + if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { + // NV12 + uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0); + const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0); + uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1); + const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1); + + [RTCYUVHelper I420ToNV12:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstY:dstY + dstStrideY:(int)dstYStride + dstUV:dstUV + dstStrideUV:(int)dstUVStride + width:i420Buffer.width + height:i420Buffer.height]; + + } else { + uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer); + const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer); + + if (pixelFormat == kCVPixelFormatType_32BGRA) { + // Corresponds to libyuv::FOURCC_ARGB + + [RTCYUVHelper I420ToARGB:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstARGB:dst + dstStrideARGB:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + + } else if (pixelFormat == kCVPixelFormatType_32ARGB) { + // Corresponds to libyuv::FOURCC_BGRA + [RTCYUVHelper I420ToBGRA:i420Buffer.dataY + srcStrideY:i420Buffer.strideY + srcU:i420Buffer.dataU + srcStrideU:i420Buffer.strideU + srcV:i420Buffer.dataV + srcStrideV:i420Buffer.strideV + dstBGRA:dst + dstStrideBGRA:(int)bytesPerRow + width:i420Buffer.width + height:i420Buffer.height]; + } + } + + CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0); +} + +#pragma mark - RTCVideoRenderer methods +- (void)renderFrame:(RTCVideoFrame*)frame { + os_unfair_lock_lock(&_lock); + if (_videoTrack == nil) { + os_unfair_lock_unlock(&_lock); + return; + } + if (!_frameAvailable && _pixelBufferRef) { + [self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame]; + if (_textureId != -1) { + [_registry textureFrameAvailable:_textureId]; + } + _frameAvailable = true; + } + os_unfair_lock_unlock(&_lock); + + __weak FlutterRTCVideoRenderer* weakSelf = self; + if (_renderSize.width != frame.width || _renderSize.height != frame.height) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeVideoSize", + @"id" : @(strongSelf.textureId), + @"width" : @(frame.width), + @"height" : @(frame.height), + }); + } + }); + _renderSize = CGSizeMake(frame.width, frame.height); + } + + if (frame.rotation != _rotation) { + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (strongSelf.eventSink) { + strongSelf.eventSink(@{ + @"event" : @"didTextureChangeRotation", + @"id" : @(strongSelf.textureId), + @"rotation" : @(frame.rotation), + }); + } + }); + + _rotation = frame.rotation; + } + + // Notify the Flutter new pixelBufferRef to be ready. + dispatch_async(dispatch_get_main_queue(), ^{ + FlutterRTCVideoRenderer* strongSelf = weakSelf; + if (!strongSelf->_isFirstFrameRendered) { + if (strongSelf.eventSink) { + strongSelf.eventSink(@{@"event" : @"didFirstFrameRendered"}); + strongSelf->_isFirstFrameRendered = true; + } + } + }); +} + +/** + * Sets the size of the video frame to render. + * + * @param size The size of the video frame to render. + */ +- (void)setSize:(CGSize)size { + os_unfair_lock_lock(&_lock); + if (size.width != _frameSize.width || size.height != _frameSize.height) { + if (_pixelBufferRef) { + CVBufferRelease(_pixelBufferRef); + } + NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef); + _frameAvailable = false; + _frameSize = size; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - FlutterStreamHandler methods + +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} +@end + +@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger { + return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger]; +} + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack { + renderer.videoTrack = videoTrack; +} +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterWebRTCPlugin.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterWebRTCPlugin.m new file mode 100644 index 0000000000..de2ac0845b --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterWebRTCPlugin.m @@ -0,0 +1,2505 @@ +#import "./include/stream_webrtc_flutter/AudioUtils.h" +#import "./include/stream_webrtc_flutter/CameraUtils.h" +#import "./include/stream_webrtc_flutter/FlutterRTCDataChannel.h" +#import "./include/stream_webrtc_flutter/FlutterRTCDesktopCapturer.h" +#import "./include/stream_webrtc_flutter/FlutterRTCFrameCryptor.h" +#import "./include/stream_webrtc_flutter/FlutterRTCMediaStream.h" +#import "./include/stream_webrtc_flutter/FlutterRTCPeerConnection.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoRenderer.h" +#import "./include/stream_webrtc_flutter/FlutterWebRTCPlugin.h" +#import "./include/stream_webrtc_flutter/ProcessorProvider.h" +#import "./include/stream_webrtc_flutter/VideoEffectProcessor.h" +#import "./include/stream_webrtc_flutter/VideoFrameProcessor.h" +#if TARGET_OS_IPHONE +#import "./include/stream_webrtc_flutter/FlutterRTCMediaRecorder.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h" +#import "./include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.h" +#endif +#import "./include/stream_webrtc_flutter/AudioManager.h" + +#import +#import +#import +#import + +#import "./include/stream_webrtc_flutter/LocalAudioTrack.h" +#import "./include/stream_webrtc_flutter/LocalTrack.h" +#import "./include/stream_webrtc_flutter/LocalVideoTrack.h" + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wprotocol" + +@interface VideoEncoderFactory : RTCDefaultVideoEncoderFactory +@end + +@interface VideoDecoderFactory : RTCDefaultVideoDecoderFactory +@end + +@interface VideoEncoderFactorySimulcast : RTCVideoEncoderFactorySimulcast +@end + +NSArray* motifyH264ProfileLevelId( + NSArray* codecs) { + NSMutableArray* newCodecs = [[NSMutableArray alloc] init]; + NSInteger count = codecs.count; + for (NSInteger i = 0; i < count; i++) { + RTC_OBJC_TYPE(RTCVideoCodecInfo)* info = [codecs objectAtIndex:i]; + if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + NSString* hexString = info.parameters[@"profile-level-id"]; + RTCH264ProfileLevelId* profileLevelId = + [[RTCH264ProfileLevelId alloc] initWithHexString:hexString]; + if (profileLevelId.level < RTCH264Level5_1) { + RTCH264ProfileLevelId* newProfileLevelId = + [[RTCH264ProfileLevelId alloc] initWithProfile:profileLevelId.profile + level:RTCH264Level5_1]; + // NSLog(@"profile-level-id: %@ => %@", hexString, [newProfileLevelId hexString]); + NSMutableDictionary* parametersCopy = [[NSMutableDictionary alloc] init]; + [parametersCopy addEntriesFromDictionary:info.parameters]; + [parametersCopy setObject:[newProfileLevelId hexString] forKey:@"profile-level-id"]; + [newCodecs insertObject:[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name + parameters:parametersCopy] + atIndex:i]; + } else { + [newCodecs insertObject:info atIndex:i]; + } + } else { + [newCodecs insertObject:info atIndex:i]; + } + } + return newCodecs; +} + +@implementation VideoEncoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoDecoderFactory +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +@implementation VideoEncoderFactorySimulcast +- (NSArray*)supportedCodecs { + NSArray* codecs = [super supportedCodecs]; + return motifyH264ProfileLevelId(codecs); +} +@end + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event) { + dispatch_async(dispatch_get_main_queue(), ^{ + sink(event); + }); +} + +@implementation FlutterWebRTCPlugin { +#pragma clang diagnostic pop + FlutterMethodChannel* _methodChannel; + FlutterEventSink _eventSink; + FlutterEventChannel* _eventChannel; + id _registry; + id _messenger; + id _textures; + BOOL _speakerOn; + BOOL _speakerOnButPreferBluetooth; +#if TARGET_OS_IPHONE + AVAudioSessionPort _preferredInput; +#endif + AudioManager* _audioManager; +#if TARGET_OS_IPHONE + FLutterRTCVideoPlatformViewFactory* _platformViewFactory; +#endif +} + +static FlutterWebRTCPlugin* sharedSingleton; + ++ (FlutterWebRTCPlugin*)sharedSingleton { + @synchronized(self) { + return sharedSingleton; + } +} + +@synthesize messenger = _messenger; +@synthesize eventSink = _eventSink; +#if TARGET_OS_IPHONE +@synthesize preferredInput = _preferredInput; +#endif +@synthesize audioManager = _audioManager; + ++ (void)registerWithRegistrar:(NSObject*)registrar { + FlutterMethodChannel* channel = + [FlutterMethodChannel methodChannelWithName:@"FlutterWebRTC.Method" + binaryMessenger:[registrar messenger]]; +#if TARGET_OS_IPHONE + UIViewController* viewController = (UIViewController*)registrar.messenger; +#endif + FlutterWebRTCPlugin* instance = + [[FlutterWebRTCPlugin alloc] initWithChannel:channel + registrar:registrar + messenger:[registrar messenger] +#if TARGET_OS_IPHONE + viewController:viewController +#endif + withTextures:[registrar textures]]; + [registrar addMethodCallDelegate:instance channel:channel]; +} + +- (instancetype)initWithChannel:(FlutterMethodChannel*)channel + registrar:(NSObject*)registrar + messenger:(NSObject*)messenger +#if TARGET_OS_IPHONE + viewController:(UIViewController*)viewController +#endif + withTextures:(NSObject*)textures { + + self = [super init]; + sharedSingleton = self; + + FlutterEventChannel* eventChannel = + [FlutterEventChannel eventChannelWithName:@"FlutterWebRTC.Event" binaryMessenger:messenger]; + [eventChannel setStreamHandler:self]; + + if (self) { + _methodChannel = channel; + _registry = registrar; + _textures = textures; + _messenger = messenger; + _speakerOn = NO; + _speakerOnButPreferBluetooth = NO; + _eventChannel = eventChannel; + _audioManager = AudioManager.sharedInstance; + +#if TARGET_OS_IPHONE + _preferredInput = AVAudioSessionPortHeadphones; + self.viewController = viewController; + _platformViewFactory = [[FLutterRTCVideoPlatformViewFactory alloc] initWithMessenger:messenger]; + [registrar registerViewFactory:_platformViewFactory + withId:FLutterRTCVideoPlatformViewFactoryID]; +#endif + } + + NSDictionary* fieldTrials = @{kRTCFieldTrialUseNWPathMonitor : kRTCFieldTrialEnabledValue}; + RTCInitFieldTrialDictionary(fieldTrials); + + self.peerConnections = [NSMutableDictionary new]; + self.localStreams = [NSMutableDictionary new]; + self.localTracks = [NSMutableDictionary new]; + self.renders = [NSMutableDictionary new]; + self.frameCryptors = [NSMutableDictionary new]; + self.keyProviders = [NSMutableDictionary new]; + self.videoCapturerStopHandlers = [NSMutableDictionary new]; + self.recorders = [NSMutableDictionary new]; +#if TARGET_OS_IPHONE + self.focusMode = @"locked"; + self.exposureMode = @"locked"; + AVAudioSession* session = [AVAudioSession sharedInstance]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(didSessionRouteChange:) + name:AVAudioSessionRouteChangeNotification + object:session]; +#endif +#if TARGET_OS_OSX + [_peerConnectionFactory.audioDeviceModule setDevicesUpdatedHandler:^(void) { + NSLog(@"Handle Devices Updated!"); + if (self.eventSink) { + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + } + }]; +#endif + return self; +} + +- (void)detachFromEngineForRegistrar:(NSObject*)registrar { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + for (RTCDataChannel* dataChannel in peerConnection.dataChannels) { + dataChannel.eventSink = nil; + } + peerConnection.eventSink = nil; + } + _eventSink = nil; +} + +#pragma mark - FlutterStreamHandler methods + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation" +- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)sink { + _eventSink = sink; + return nil; +} + +- (void)didSessionRouteChange:(NSNotification*)notification { +#if TARGET_OS_IPHONE + NSDictionary* interuptionDict = notification.userInfo; + NSInteger routeChangeReason = + [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue]; + if (self.eventSink && (routeChangeReason == AVAudioSessionRouteChangeReasonNewDeviceAvailable || + routeChangeReason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable || + routeChangeReason == AVAudioSessionRouteChangeReasonCategoryChange || + routeChangeReason == AVAudioSessionRouteChangeReasonOverride)) { + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + } +#endif +} + +- (void)handleInterruption:(NSNotification*)notification { +#if TARGET_OS_IPHONE + NSDictionary* info = notification.userInfo; + AVAudioSessionInterruptionType type = + [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue]; + + if (type == AVAudioSessionInterruptionTypeBegan) { + postEvent(self.eventSink, @{@"event" : @"onInterruptionStart"}); + } else if (type == AVAudioSessionInterruptionTypeEnded) { + postEvent(self.eventSink, @{@"event" : @"onInterruptionEnd"}); + } +#endif +} + +- (void)initialize:(NSArray*)networkIgnoreMask bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { + // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + if (!_peerConnectionFactory) { + VideoDecoderFactory* decoderFactory = [[VideoDecoderFactory alloc] init]; + VideoEncoderFactory* encoderFactory = [[VideoEncoderFactory alloc] init]; + + VideoEncoderFactorySimulcast* simulcastFactory = + [[VideoEncoderFactorySimulcast alloc] initWithPrimary:encoderFactory + fallback:encoderFactory]; + + _peerConnectionFactory = [[RTCPeerConnectionFactory alloc] + initWithBypassVoiceProcessing:bypassVoiceProcessing + encoderFactory:simulcastFactory + decoderFactory:decoderFactory + audioProcessingModule:_audioManager.audioProcessingModule]; + + RTCPeerConnectionFactoryOptions* options = [[RTCPeerConnectionFactoryOptions alloc] init]; + for (NSString* adapter in networkIgnoreMask) { + if ([@"adapterTypeEthernet" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + } else if ([@"adapterTypeWifi" isEqualToString:adapter]) { + options.ignoreWiFiNetworkAdapter = YES; + } else if ([@"adapterTypeCellular" isEqualToString:adapter]) { + options.ignoreCellularNetworkAdapter = YES; + } else if ([@"adapterTypeVpn" isEqualToString:adapter]) { + options.ignoreVPNNetworkAdapter = YES; + } else if ([@"adapterTypeLoopback" isEqualToString:adapter]) { + options.ignoreLoopbackNetworkAdapter = YES; + } else if ([@"adapterTypeAny" isEqualToString:adapter]) { + options.ignoreEthernetNetworkAdapter = YES; + options.ignoreWiFiNetworkAdapter = YES; + options.ignoreCellularNetworkAdapter = YES; + options.ignoreVPNNetworkAdapter = YES; + options.ignoreLoopbackNetworkAdapter = YES; + } + } + + [_peerConnectionFactory setOptions:options]; + } +} + +- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { + if ([@"initialize" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* options = argsMap[@"options"]; + BOOL enableBypassVoiceProcessing = NO; + if (options[@"bypassVoiceProcessing"] != nil) { + enableBypassVoiceProcessing = ((NSNumber*)options[@"bypassVoiceProcessing"]).boolValue; + } + NSArray* networkIgnoreMask = [NSArray new]; + if (options[@"networkIgnoreMask"] != nil) { + networkIgnoreMask = ((NSArray*)options[@"networkIgnoreMask"]); + } + [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing]; + result(@""); + } else if ([@"setVideoEffects" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSArray* names = argsMap[@"names"]; + + [self mediaStreamTrackSetVideoEffects:trackId names:names]; + } else if ([@"handleCallInterruptionCallbacks" isEqualToString:call.method]) { +#if TARGET_OS_IPHONE + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(handleInterruption:) + name:AVAudioSessionInterruptionNotification + object:[AVAudioSession sharedInstance]]; +#endif + result(@""); + } else if ([@"createPeerConnection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + NSDictionary* constraints = argsMap[@"constraints"]; + + RTCPeerConnection* peerConnection = [self.peerConnectionFactory + peerConnectionWithConfiguration:[self RTCConfiguration:configuration] + constraints:[self parseMediaConstraints:constraints] + delegate:self]; + + peerConnection.remoteStreams = [NSMutableDictionary new]; + peerConnection.remoteTracks = [NSMutableDictionary new]; + peerConnection.dataChannels = [NSMutableDictionary new]; + + NSString* peerConnectionId = [[NSUUID UUID] UUIDString]; + peerConnection.flutterId = peerConnectionId; + + /*Create Event Channel.*/ + peerConnection.eventChannel = [FlutterEventChannel + eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/peerConnectionEvent%@", + peerConnectionId] + binaryMessenger:_messenger]; + [peerConnection.eventChannel setStreamHandler:peerConnection]; + + self.peerConnections[peerConnectionId] = peerConnection; + result(@{@"peerConnectionId" : peerConnectionId}); + } else if ([@"getUserMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getUserMedia:constraints result:result]; + } else if ([@"getDisplayMedia" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + [self getDisplayMedia:constraints result:result]; + } else if ([@"createLocalMediaStream" isEqualToString:call.method]) { + [self createLocalMediaStream:result]; + } else if ([@"getSources" isEqualToString:call.method]) { + [self getSources:result]; + } else if ([@"selectAudioInput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioInput:deviceId result:result]; + } else if ([@"selectAudioOutput" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* deviceId = argsMap[@"deviceId"]; + [self selectAudioOutput:deviceId result:result]; + } else if ([@"triggeriOSAudioRouteSelectionUI" isEqualToString:call.method]) { + [self triggeriOSAudioRouteSelectionUI:result]; + } else if ([@"mediaStreamGetTracks" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + [self mediaStreamGetTracks:streamId result:result]; + } else if ([@"createOffer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createAnswer" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* constraints = argsMap[@"constraints"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionCreateAnswer:constraints peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection addStream:stream]; + result(@""); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"removeStream" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + + NSString* streamId = ((NSString*)argsMap[@"streamId"]); + RTCMediaStream* stream = self.localStreams[streamId]; + + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection && stream) { + [peerConnection removeStream:stream]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString + stringWithFormat:@"Error: peerConnection or mediaStream not found!"] + details:nil]); + } + } else if ([@"captureFrame" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [self mediaStreamTrackCaptureFrame:videoTrack toPath:path result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + if (peerConnection) { + [self peerConnectionSetLocalDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + NSDictionary* descriptionMap = argsMap[@"description"]; + NSString* sdp = descriptionMap[@"sdp"]; + RTCSdpType sdpType = [RTCSessionDescription typeForString:descriptionMap[@"type"]]; + RTCSessionDescription* description = [[RTCSessionDescription alloc] initWithType:sdpType + sdp:sdp]; + + if (peerConnection) { + [self peerConnectionSetRemoteDescription:description + peerConnection:peerConnection + result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"sendDtmf" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* tone = argsMap[@"tone"]; + int duration = ((NSNumber*)argsMap[@"duration"]).intValue; + int interToneGap = ((NSNumber*)argsMap[@"gap"]).intValue; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCRtpSender* audioSender = nil; + for (RTCRtpSender* rtpSender in peerConnection.senders) { + if ([[[rtpSender track] kind] isEqualToString:@"audio"]) { + audioSender = rtpSender; + } + } + if (audioSender) { + NSOperationQueue* queue = [[NSOperationQueue alloc] init]; + [queue addOperationWithBlock:^{ + double durationMs = duration / 1000.0; + double interToneGapMs = interToneGap / 1000.0; + [audioSender.dtmfSender insertDtmf:(NSString*)tone + duration:(NSTimeInterval)durationMs + interToneGap:(NSTimeInterval)interToneGapMs]; + NSLog(@"DTMF Tone played "); + }]; + } + + result(@{@"result" : @"success"}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addCandidate" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* candMap = argsMap[@"candidate"]; + NSString* sdp = candMap[@"candidate"]; + id sdpMLineIndexValue = candMap[@"sdpMLineIndex"]; + int sdpMLineIndex = 0; + if (![sdpMLineIndexValue isKindOfClass:[NSNull class]]) { + sdpMLineIndex = ((NSNumber*)candMap[@"sdpMLineIndex"]).intValue; + } + NSString* sdpMid = candMap[@"sdpMid"]; + + RTCIceCandidate* candidate = [[RTCIceCandidate alloc] initWithSdp:sdp + sdpMLineIndex:sdpMLineIndex + sdpMid:sdpMid]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + if (peerConnection) { + [self peerConnectionAddICECandidate:candidate peerConnection:peerConnection result:result]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getStats" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + id trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + if (trackId != nil && trackId != [NSNull null]) { + return [self peerConnectionGetStatsForTrackId:trackId + peerConnection:peerConnection + result:result]; + } else { + return [self peerConnectionGetStats:peerConnection result:result]; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"createDataChannel" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* label = argsMap[@"label"]; + NSDictionary* dataChannelDict = (NSDictionary*)argsMap[@"dataChannelDict"]; + [self createDataChannel:peerConnectionId + label:label + config:[self RTCDataChannelConfiguration:dataChannelDict] + messenger:_messenger + result:result]; + } else if ([@"dataChannelSend" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + NSString* type = argsMap[@"type"]; + id data = argsMap[@"data"]; + + [self dataChannelSend:peerConnectionId dataChannelId:dataChannelId data:data type:type]; + result(nil); + } else if ([@"dataChannelGetBufferedAmount" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + + [self dataChannelGetBufferedAmount:peerConnectionId dataChannelId:dataChannelId result:result]; + } else if ([@"dataChannelClose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* dataChannelId = argsMap[@"dataChannelId"]; + [self dataChannelClose:peerConnectionId dataChannelId:dataChannelId]; + result(nil); + } else if ([@"streamDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + BOOL shouldCallResult = YES; + if (stream) { + for (RTCVideoTrack* track in stream.videoTracks) { + [_localTracks removeObjectForKey:track.trackId]; + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + FlutterRTCVideoRenderer* renderer = [self findRendererByTrackId:videoTrack.trackId]; + if (renderer != nil) { + renderer.videoTrack = nil; + } + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[videoTrack.trackId]; + if (stopHandler) { + shouldCallResult = NO; + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", videoTrack.trackId); + self.videoCapturer = nil; + result(nil); + }); + [self.videoCapturerStopHandlers removeObjectForKey:videoTrack.trackId]; + } + } + for (RTCAudioTrack* track in stream.audioTracks) { + [_localTracks removeObjectForKey:track.trackId]; + } + [self.localStreams removeObjectForKey:streamId]; + [self deactiveRtcAudioSession]; + } + if (shouldCallResult) { + // do not call if will be called in stopCapturer above. + result(nil); + } + } else if ([@"mediaStreamTrackSetEnable" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* enabled = argsMap[@"enabled"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil) { + track.isEnabled = enabled.boolValue; + } + result(nil); + } else if ([@"mediaStreamAddTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track != nil) { + if ([track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + [stream addAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[RTCVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + [stream addVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamAddTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"mediaStreamRemoveTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* streamId = argsMap[@"streamId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStream* stream = self.localStreams[streamId]; + if (stream) { + id track = self.localTracks[trackId]; + if (track != nil) { + if ([track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + [stream removeAudioTrack:audioTrack]; + } else if ([track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [stream removeVideoTrack:videoTrack]; + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Track is nil" + message:nil + details:nil]); + } + } else { + result([FlutterError errorWithCode:@"mediaStreamRemoveTrack: Stream is nil" + message:nil + details:nil]); + } + result(nil); + } else if ([@"trackDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL audioTrack = NO; + for (NSString* streamId in self.localStreams) { + RTCMediaStream* stream = [self.localStreams objectForKey:streamId]; + for (RTCAudioTrack* track in stream.audioTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeAudioTrack:track]; + audioTrack = YES; + } + } + for (RTCVideoTrack* track in stream.videoTracks) { + if ([trackId isEqualToString:track.trackId]) { + [stream removeVideoTrack:track]; + CapturerStopHandler stopHandler = self.videoCapturerStopHandlers[track.trackId]; + if (stopHandler) { + stopHandler(^{ + NSLog(@"video capturer stopped, trackID = %@", track.trackId); + }); + [self.videoCapturerStopHandlers removeObjectForKey:track.trackId]; + } + } + } + } + // [_localTracks removeObjectForKey:trackId]; + if (audioTrack) { + [self ensureAudioSession]; + } + FlutterRTCVideoRenderer* renderer = [self findRendererByTrackId:trackId]; + if (renderer != nil) { + renderer.videoTrack = nil; + } + result(nil); + } else if ([@"restartIce" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (!peerConnection) { + result([FlutterError errorWithCode:@"restartIce: peerConnection is nil" + message:nil + details:nil]); + } else { + [peerConnection restartIce]; + result(nil); + } + } else if ([@"peerConnectionClose" isEqualToString:call.method] || + [@"peerConnectionDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [peerConnection close]; + [self.peerConnections removeObjectForKey:peerConnectionId]; + + // Clean up peerConnection's streams and tracks + [peerConnection.remoteStreams removeAllObjects]; + [peerConnection.remoteTracks removeAllObjects]; + + // Clean up peerConnection's dataChannels. + NSMutableDictionary* dataChannels = peerConnection.dataChannels; + for (NSString* dataChannelId in dataChannels) { + dataChannels[dataChannelId].delegate = nil; + // There is no need to close the RTCDataChannel because it is owned by the + // RTCPeerConnection and the latter will close the former. + } + [dataChannels removeAllObjects]; + } + [self deactiveRtcAudioSession]; + result(nil); + } else if ([@"createVideoRenderer" isEqualToString:call.method]) { + FlutterRTCVideoRenderer* render = [self createWithTextureRegistry:_textures + messenger:_messenger]; + self.renders[@(render.textureId)] = render; + result(@{@"textureId" : @(render.textureId)}); + } else if ([@"videoRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + if (render != nil) { + render.videoTrack = nil; + [render dispose]; + [self.renders removeObjectForKey:textureId]; + } + result(nil); + } else if ([@"videoRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* textureId = argsMap[@"textureId"]; + FlutterRTCVideoRenderer* render = self.renders[textureId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + [self rendererSetSrcObject:render stream:videoTrack]; + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"videoPlatformViewRendererSetSrcObject" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + NSString* streamId = argsMap[@"streamId"]; + NSString* ownerTag = argsMap[@"ownerTag"]; + NSString* trackId = argsMap[@"trackId"]; + if (!render) { + result([FlutterError errorWithCode:@"videoRendererSetSrcObject: render is nil" + message:nil + details:nil]); + return; + } + RTCMediaStream* stream = nil; + RTCVideoTrack* videoTrack = nil; + if ([ownerTag isEqualToString:@"local"]) { + stream = _localStreams[streamId]; + } + if (!stream) { + stream = [self streamForId:streamId peerConnectionId:ownerTag]; + } + if (stream) { + NSArray* videoTracks = stream ? stream.videoTracks : nil; + videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil; + for (RTCVideoTrack* track in videoTracks) { + if ([track.trackId isEqualToString:trackId]) { + videoTrack = track; + } + } + if (!videoTrack) { + NSLog(@"Not found video track for RTCMediaStream: %@", streamId); + } + } + render.videoTrack = videoTrack; + result(nil); + } else if ([@"videoPlatformViewRendererDispose" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* viewId = argsMap[@"viewId"]; + FlutterRTCVideoPlatformViewController* render = _platformViewFactory.renders[viewId]; + if (render != nil) { + render.videoTrack = nil; + [_platformViewFactory.renders removeObjectForKey:viewId]; + } + result(nil); + } +#endif + else if ([@"enableIOSMultitaskingCameraAccess" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + BOOL enable = [argsMap[@"enable"] boolValue]; + + [self enableMultitaskingCameraAccess:enable result:result]; + } else if ([@"mediaStreamTrackHasTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackHasTorch:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetTorch" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + BOOL torch = [argsMap[@"torch"] boolValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetTorch:videoTrack torch:torch result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetZoom" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + double zoomLevel = [argsMap[@"zoomLevel"] doubleValue]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = ((LocalVideoTrack*)track).videoTrack; + [self mediaStreamTrackSetZoom:videoTrack zoomLevel:zoomLevel result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* focusMode = argsMap[@"focusMode"]; + id track = self.localTracks[trackId]; + if (track != nil && focusMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusMode:videoTrack focusMode:focusMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetFocusPoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* focusPoint = argsMap[@"focusPoint"]; + id track = self.localTracks[trackId]; + if (track != nil && focusPoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetFocusPoint:videoTrack focusPoint:focusPoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposureMode" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSString* exposureMode = argsMap[@"exposureMode"]; + id track = self.localTracks[trackId]; + if (track != nil && exposureMode != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposureMode:videoTrack exposureMode:exposureMode result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSetExposurePoint" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSDictionary* exposurePoint = argsMap[@"exposurePoint"]; + id track = self.localTracks[trackId]; + if (track != nil && exposurePoint != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSetExposurePoint:videoTrack exposurePoint:exposurePoint result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"mediaStreamTrackSwitchCamera" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalVideoTrack class]]) { + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track.track; + [self mediaStreamTrackSwitchCamera:videoTrack result:result]; + } else { + if (track == nil) { + result([FlutterError errorWithCode:@"Track is nil" message:nil details:nil]); + } else { + result([FlutterError errorWithCode:[@"Track is class of " + stringByAppendingString:[[track class] description]] + message:nil + details:nil]); + } + } + } else if ([@"setVolume" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* volume = argsMap[@"volume"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCAudioTrack class]]) { + RTCAudioTrack* audioTrack = (RTCAudioTrack*)track; + RTCAudioSource* audioSource = audioTrack.source; + audioSource.volume = [volume doubleValue]; + } + result(nil); + } else if ([@"trackClone" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + RTCMediaStreamTrack* track = [self cloneTrack:trackId]; + + result([self mediaTrackToMap:track]); + } else if ([@"setMicrophoneMute" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* trackId = argsMap[@"trackId"]; + NSNumber* mute = argsMap[@"mute"]; + id track = self.localTracks[trackId]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + RTCAudioTrack* audioTrack = ((LocalAudioTrack*)track).audioTrack; + audioTrack.isEnabled = !mute.boolValue; + } + result(nil); + } +#if TARGET_OS_IPHONE + else if ([@"enableSpeakerphone" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* enable = argsMap[@"enable"]; + _speakerOn = enable.boolValue; + _speakerOnButPreferBluetooth = NO; + [AudioUtils setSpeakerphoneOn:_speakerOn]; + postEvent(self.eventSink, @{@"event" : @"onDeviceChange"}); + result(nil); + } else if ([@"ensureAudioSession" isEqualToString:call.method]) { + [self ensureAudioSession]; + result(nil); + } else if ([@"enableSpeakerphoneButPreferBluetooth" isEqualToString:call.method]) { + _speakerOn = YES; + _speakerOnButPreferBluetooth = YES; + [AudioUtils setSpeakerphoneOnButPreferBluetooth]; + result(nil); + } else if ([@"setAppleAudioConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSDictionary* configuration = argsMap[@"configuration"]; + [AudioUtils setAppleAudioConfiguration:configuration]; + result(nil); + } +#endif + else if ([@"getLocalDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.localDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getRemoteDescription" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + RTCSessionDescription* sdp = peerConnection.remoteDescription; + if (nil == sdp) { + result(nil); + } else { + NSString* type = [RTCSessionDescription stringForType:sdp.type]; + result(@{@"sdp" : sdp.sdp, @"type" : type}); + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"setConfiguration" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* configuration = argsMap[@"configuration"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + [self peerConnectionSetConfiguration:[self RTCConfiguration:configuration] + peerConnection:peerConnection]; + result(nil); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"addTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* trackId = argsMap[@"trackId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [peerConnection addTrack:track streamIds:streamIds]; + if (sender == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection.addTrack failed!"] + details:nil]); + return; + } + + result([self rtpSenderToMap:sender]); + } else if ([@"removeTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"senderId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + result(@{@"result" : @([peerConnection removeTrack:sender])}); + } else if ([@"addTransceiver" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSDictionary* transceiverInit = argsMap[@"transceiverInit"]; + NSString* trackId = argsMap[@"trackId"]; + NSString* mediaType = argsMap[@"mediaType"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transceiver = nil; + BOOL hasAudio = NO; + if (trackId != nil) { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverWithTrack:track init:init]; + } else { + transceiver = [peerConnection addTransceiverWithTrack:track]; + } + if ([track.kind isEqualToString:@"audio"]) { + hasAudio = YES; + } + } else if (mediaType != nil) { + RTCRtpMediaType rtpMediaType = [self stringToRtpMediaType:mediaType]; + if (transceiverInit != nil) { + RTCRtpTransceiverInit* init = [self mapToTransceiverInit:transceiverInit]; + transceiver = [peerConnection addTransceiverOfType:(rtpMediaType) init:init]; + } else { + transceiver = [peerConnection addTransceiverOfType:rtpMediaType]; + } + if (rtpMediaType == RTCRtpMediaTypeAudio) { + hasAudio = YES; + } + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: Incomplete parameters!"] + details:nil]); + return; + } + + if (transceiver == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: can't addTransceiver!"] + details:nil]); + return; + } + + result([self transceiverToMap:transceiver]); + } else if ([@"rtpTransceiverSetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* direction = argsMap[@"direction"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier setDirection:[self stringToTransceiverDirection:direction] error:nil]; + result(nil); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method] || + [@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + + if ([@"rtpTransceiverGetDirection" isEqualToString:call.method]) { + result(@{@"result" : [self transceiverDirectionString:transcevier.direction]}); + } else if ([@"rtpTransceiverGetCurrentDirection" isEqualToString:call.method]) { + RTCRtpTransceiverDirection directionOut = transcevier.direction; + if ([transcevier currentDirection:&directionOut]) { + result(@{@"result" : [self transceiverDirectionString:directionOut]}); + } else { + result(nil); + } + } + } else if ([@"rtpTransceiverStop" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* transceiverId = argsMap[@"transceiverId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpTransceiver* transcevier = [self getRtpTransceiverById:peerConnection Id:transceiverId]; + if (transcevier == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: transcevier not found!"] + details:nil]); + return; + } + [transcevier stopInternal]; + result(nil); + } else if ([@"rtpSenderSetParameters" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSDictionary* parameters = argsMap[@"parameters"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setParameters:[self updateRtpParameters:sender.parameters with:parameters]]; + + result(@{@"result" : @(YES)}); + } else if ([@"rtpSenderReplaceTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetTrack" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSString* trackId = argsMap[@"trackId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + RTCMediaStreamTrack* track = nil; + if ([trackId length] > 0) { + track = [self trackForId:trackId peerConnectionId:nil]; + if (track == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: track not found!"] + details:nil]); + return; + } + } + [sender setTrack:track]; + result(nil); + } else if ([@"rtpSenderSetStreams" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* senderId = argsMap[@"rtpSenderId"]; + NSArray* streamIds = argsMap[@"streamIds"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + RTCRtpSender* sender = [self getRtpSenderById:peerConnection Id:senderId]; + if (sender == nil) { + result([FlutterError errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: sender not found!"] + details:nil]); + return; + } + [sender setStreamIds:streamIds]; + result(nil); + } else if ([@"getSenders" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* senders = [NSMutableArray array]; + for (RTCRtpSender* sender in peerConnection.senders) { + [senders addObject:[self rtpSenderToMap:sender]]; + } + + result(@{@"senders" : senders}); + } else if ([@"getReceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* receivers = [NSMutableArray array]; + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + [receivers addObject:[self receiverToMap:receiver]]; + } + + result(@{@"receivers" : receivers}); + } else if ([@"getTransceivers" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection == nil) { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + return; + } + + NSMutableArray* transceivers = [NSMutableArray array]; + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + [transceivers addObject:[self transceiverToMap:transceiver]]; + } + + result(@{@"transceivers" : transceivers}); + } else if ([@"getDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSources:argsMap result:result]; + } else if ([@"updateDesktopSources" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self updateDesktopSources:argsMap result:result]; + } else if ([@"getDesktopSourceThumbnail" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self getDesktopSourceThumbnail:argsMap result:result]; + } else if ([@"setCodecPreferences" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self transceiverSetCodecPreferences:argsMap result:result]; + } else if ([@"getRtpReceiverCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpReceiverCapabilities:argsMap result:result]; + } else if ([@"getRtpSenderCapabilities" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + [self peerConnectionGetRtpSenderCapabilities:argsMap result:result]; + } else if ([@"getSignalingState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForSignalingState:peerConnection.signalingState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceGatheringState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEGatheringState:peerConnection.iceGatheringState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getIceConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForICEConnectionState:peerConnection.iceConnectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } + } else if ([@"getConnectionState" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + if (peerConnection) { + result(@{@"state" : [self stringForPeerConnectionState:peerConnection.connectionState]}); + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@Failed", call.method] + message:[NSString stringWithFormat:@"Error: peerConnection not found!"] + details:nil]); + } +#if TARGET_OS_IOS + } else if ([@"startRecordToFile" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + NSString* path = argsMap[@"path"]; + NSString* trackId = argsMap[@"videoTrackId"]; + NSString* peerConnectionId = argsMap[@"peerConnectionId"]; + NSString* audioTrackId = [self audioTrackIdForVideoTrackId:trackId]; + + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:peerConnectionId]; + RTCMediaStreamTrack* audioTrack = [self trackForId:audioTrackId + peerConnectionId:peerConnectionId]; + if (track != nil && [track isKindOfClass:[RTCVideoTrack class]]) { + NSURL* pathUrl = [NSURL fileURLWithPath:path]; + self.recorders[recorderId] = + [[FlutterRTCMediaRecorder alloc] initWithVideoTrack:(RTCVideoTrack*)track + audioTrack:(RTCAudioTrack*)audioTrack + outputFile:pathUrl]; + } + result(nil); + } else if ([@"stopRecordToFile" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSNumber* recorderId = argsMap[@"recorderId"]; + FlutterRTCMediaRecorder* recorder = self.recorders[recorderId]; + if (recorder != nil) { + [recorder stop:result]; + [self.recorders removeObjectForKey:recorderId]; + } else { + result([FlutterError + errorWithCode:[NSString stringWithFormat:@"%@ failed", call.method] + message:[NSString + stringWithFormat:@"Error: recorder with id %@ not found!", recorderId] + details:nil]); + } +#endif + } else { + [self handleFrameCryptorMethodCall:call result:result]; + } +} + +- (void)dealloc { + [_localTracks removeAllObjects]; + _localTracks = nil; + [_localStreams removeAllObjects]; + _localStreams = nil; + + for (NSString* peerConnectionId in _peerConnections) { + RTCPeerConnection* peerConnection = _peerConnections[peerConnectionId]; + peerConnection.delegate = nil; + [peerConnection close]; + } + [_peerConnections removeAllObjects]; + _peerConnectionFactory = nil; +} + +- (BOOL)hasLocalAudioTrack { + for (id key in _localTracks.allKeys) { + id track = [_localTracks objectForKey:key]; + if (track != nil && [track isKindOfClass:[LocalAudioTrack class]]) { + return YES; + } + } + return NO; +} + +- (void)ensureAudioSession { +#if TARGET_OS_IPHONE + [AudioUtils ensureAudioSessionWithRecording:[self hasLocalAudioTrack]]; +#endif +} + +- (void)deactiveRtcAudioSession { +#if TARGET_OS_IPHONE + if (![self hasLocalAudioTrack] && self.peerConnections.count == 0) { + [AudioUtils deactiveRtcAudioSession]; + } +#endif +} + +- (void)mediaStreamTrackSetVideoEffects:(nonnull NSString*)trackId + names:(nonnull NSArray*)names { + RTCMediaStreamTrack* track = [self trackForId:trackId peerConnectionId:nil]; + + if (track) { + NSLog(@"mediaStreamTrackSetVideoEffects: track found"); + + RTCVideoTrack* videoTrack = (RTCVideoTrack*)track; + RTCVideoSource* videoSource = videoTrack.source; + + NSMutableArray* processors = [[NSMutableArray alloc] init]; + for (NSString* name in names) { + NSObject* processor = [ProcessorProvider getProcessor:name]; + if (processor != nil) { + [processors addObject:processor]; + } + } + + self.videoEffectProcessor = [[VideoEffectProcessor alloc] initWithProcessors:processors + videoSource:videoSource]; + + self.videoCapturer.delegate = self.videoEffectProcessor; + } else { + NSLog(@"mediaStreamTrackSetVideoEffects: track not found"); + } +} + +- (void)enableMultitaskingCameraAccess:(BOOL)enable result:(FlutterResult)result { + @try { + AVCaptureSession* session = self.videoCapturer.captureSession; + if (session == nil) { + NSLog(@"enableMultitaskingCameraAccess: Capture session is nil."); + result(@NO); + return; + } + +#if TARGET_OS_OSX + NSLog(@"enableMultitaskingCameraAccess: Multitasking camera access is not available on macOS."); + result(@NO); + return; +#else + if (@available(iOS 16.0, *)) { + BOOL shouldChange = session.multitaskingCameraAccessEnabled != enable; + BOOL canChange = !enable || (enable && session.isMultitaskingCameraAccessSupported); + + if (shouldChange && canChange) { + [session beginConfiguration]; + [session setMultitaskingCameraAccessEnabled:enable]; + [session commitConfiguration]; + + result(enable ? @YES : @NO); + } else { + if (!canChange) { + NSLog(@"enableMultitaskingCameraAccess: Multitasking camera access is not supported on " + @"this device."); + result(@NO); + } else { + NSLog(@"enableMultitaskingCameraAccess: Multitasking camera access is already %@.", + enable ? @"enabled" : @"disabled"); + result(enable ? @YES : @NO); + } + } + } else { + NSLog( + @"enableMultitaskingCameraAccess: Multitasking camera access requires iOS 16 or later."); + result(@NO); + } +#endif + } @catch (NSException* exception) { + NSLog(@"enableMultitaskingCameraAccess: Exception occurred: %@ - %@", exception.name, + exception.reason); + result(@NO); + } +} + +- (void)mediaStreamGetTracks:(NSString*)streamId result:(FlutterResult)result { + RTCMediaStream* stream = [self streamForId:streamId peerConnectionId:@""]; + if (stream) { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + NSString* trackId = track.trackId; + [self.localTracks setObject:[[LocalAudioTrack alloc] initWithTrack:(RTCAudioTrack*)track] + forKey:trackId]; + [audioTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + NSString* trackId = track.trackId; + [_localTracks setObject:[[LocalVideoTrack alloc] initWithTrack:(RTCVideoTrack*)track] + forKey:trackId]; + [videoTracks addObject:@{ + @"enabled" : @(track.isEnabled), + @"id" : trackId, + @"kind" : track.kind, + @"label" : trackId, + @"readyState" : @"live", + @"remote" : @(NO) + }]; + } + + result(@{@"audioTracks" : audioTracks, @"videoTracks" : videoTracks}); + } else { + result(nil); + } +} + +- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString*)peerConnectionId { + RTCMediaStream* stream = nil; + if (peerConnectionId.length > 0) { + RTCPeerConnection* peerConnection = [_peerConnections objectForKey:peerConnectionId]; + stream = peerConnection.remoteStreams[streamId]; + } else { + for (RTCPeerConnection* peerConnection in _peerConnections.allValues) { + stream = peerConnection.remoteStreams[streamId]; + if (stream) { + break; + } + } + } + if (!stream) { + stream = _localStreams[streamId]; + } + return stream; +} + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId { + RTCMediaStreamTrack* mediaStreamTrack = nil; + for (NSString* currentId in _peerConnections.allKeys) { + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + + return mediaStreamTrack; +} + +- (NSString*)audioTrackIdForVideoTrackId:(NSString*)videoTrackId { + NSString* audioTrackId = nil; + + // Iterate through all peerConnections + for (NSString* peerConnectionId in self.peerConnections) { + RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId]; + + // Iterate through the receivers to find the video track + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + RTCMediaStreamTrack* track = [receiver valueForKey:@"track"]; + if ([track.kind isEqualToString:@"video"] && [track.trackId isEqualToString:videoTrackId]) { + // Found the video track, now look for the audio track in the same peerConnection + for (RTCRtpReceiver* audioReceiver in peerConnection.receivers) { + RTCMediaStreamTrack* audioTrack = [audioReceiver valueForKey:@"track"]; + if ([audioTrack.kind isEqualToString:@"audio"]) { + audioTrackId = audioTrack.trackId; + break; + } + } + break; + } + } + + // If the audioTrackId is found, break out of the loop + if (audioTrackId != nil) { + break; + } + } + + return audioTrackId; +} + +- (RTCMediaStreamTrack*)trackForId:(NSString*)trackId peerConnectionId:(NSString*)peerConnectionId { + id track = _localTracks[trackId]; + RTCMediaStreamTrack* mediaStreamTrack = nil; + if (!track) { + for (NSString* currentId in _peerConnections.allKeys) { + if (peerConnectionId && [currentId isEqualToString:peerConnectionId] == false) { + continue; + } + RTCPeerConnection* peerConnection = _peerConnections[currentId]; + mediaStreamTrack = peerConnection.remoteTracks[trackId]; + if (!mediaStreamTrack) { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + if (transceiver.receiver.track != nil && + [transceiver.receiver.track.trackId isEqual:trackId]) { + mediaStreamTrack = transceiver.receiver.track; + break; + } + } + } + if (mediaStreamTrack) { + break; + } + } + } else { + mediaStreamTrack = [track track]; + } + return mediaStreamTrack; +} + +- (RTCIceServer*)RTCIceServer:(id)json { + if (!json) { + NSLog(@"a valid iceServer value"); + return nil; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return nil; + } + + NSArray* urls; + if ([json[@"url"] isKindOfClass:[NSString class]]) { + // TODO: 'url' is non-standard + urls = @[ json[@"url"] ]; + } else if ([json[@"urls"] isKindOfClass:[NSString class]]) { + urls = @[ json[@"urls"] ]; + } else { + urls = (NSArray*)json[@"urls"]; + } + + if (json[@"username"] != nil || json[@"credential"] != nil) { + return [[RTCIceServer alloc] initWithURLStrings:urls + username:json[@"username"] + credential:json[@"credential"]]; + } + + return [[RTCIceServer alloc] initWithURLStrings:urls]; +} + +- (nonnull RTCConfiguration*)RTCConfiguration:(id)json { + RTCConfiguration* config = [[RTCConfiguration alloc] init]; + + if (!json) { + return config; + } + + if (![json isKindOfClass:[NSDictionary class]]) { + NSLog(@"must be an object"); + return config; + } + + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + config.audioJitterBufferMaxPackets = [json[@"audioJitterBufferMaxPackets"] intValue]; + } + + if (json[@"bundlePolicy"] != nil && [json[@"bundlePolicy"] isKindOfClass:[NSString class]]) { + NSString* bundlePolicy = json[@"bundlePolicy"]; + if ([bundlePolicy isEqualToString:@"balanced"]) { + config.bundlePolicy = RTCBundlePolicyBalanced; + } else if ([bundlePolicy isEqualToString:@"max-compat"]) { + config.bundlePolicy = RTCBundlePolicyMaxCompat; + } else if ([bundlePolicy isEqualToString:@"max-bundle"]) { + config.bundlePolicy = RTCBundlePolicyMaxBundle; + } + } + + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + config.iceBackupCandidatePairPingInterval = + [json[@"iceBackupCandidatePairPingInterval"] intValue]; + } + + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + config.iceConnectionReceivingTimeout = [json[@"iceConnectionReceivingTimeout"] intValue]; + } + + if (json[@"iceServers"] != nil && [json[@"iceServers"] isKindOfClass:[NSArray class]]) { + NSMutableArray* iceServers = [NSMutableArray new]; + for (id server in json[@"iceServers"]) { + RTCIceServer* convert = [self RTCIceServer:server]; + if (convert != nil) { + [iceServers addObject:convert]; + } + } + config.iceServers = iceServers; + } + + if (json[@"iceTransportPolicy"] != nil && + [json[@"iceTransportPolicy"] isKindOfClass:[NSString class]]) { + NSString* iceTransportPolicy = json[@"iceTransportPolicy"]; + if ([iceTransportPolicy isEqualToString:@"all"]) { + config.iceTransportPolicy = RTCIceTransportPolicyAll; + } else if ([iceTransportPolicy isEqualToString:@"none"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNone; + } else if ([iceTransportPolicy isEqualToString:@"nohost"]) { + config.iceTransportPolicy = RTCIceTransportPolicyNoHost; + } else if ([iceTransportPolicy isEqualToString:@"relay"]) { + config.iceTransportPolicy = RTCIceTransportPolicyRelay; + } + } + + if (json[@"rtcpMuxPolicy"] != nil && [json[@"rtcpMuxPolicy"] isKindOfClass:[NSString class]]) { + NSString* rtcpMuxPolicy = json[@"rtcpMuxPolicy"]; + if ([rtcpMuxPolicy isEqualToString:@"negotiate"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyNegotiate; + } else if ([rtcpMuxPolicy isEqualToString:@"require"]) { + config.rtcpMuxPolicy = RTCRtcpMuxPolicyRequire; + } + } + + if (json[@"sdpSemantics"] != nil && [json[@"sdpSemantics"] isKindOfClass:[NSString class]]) { + NSString* sdpSemantics = json[@"sdpSemantics"]; + if ([sdpSemantics isEqualToString:@"plan-b"]) { + config.sdpSemantics = RTCSdpSemanticsPlanB; + } else if ([sdpSemantics isEqualToString:@"unified-plan"]) { + config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; + } + } + + if (json[@"maxIPv6Networks"] != nil && + [json[@"maxIPv6Networks"] isKindOfClass:[NSNumber class]]) { + NSNumber* maxIPv6Networks = json[@"maxIPv6Networks"]; + config.maxIPv6Networks = [maxIPv6Networks intValue]; + } + + // === below is private api in webrtc === + if (json[@"tcpCandidatePolicy"] != nil && + [json[@"tcpCandidatePolicy"] isKindOfClass:[NSString class]]) { + NSString* tcpCandidatePolicy = json[@"tcpCandidatePolicy"]; + if ([tcpCandidatePolicy isEqualToString:@"enabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyEnabled; + } else if ([tcpCandidatePolicy isEqualToString:@"disabled"]) { + config.tcpCandidatePolicy = RTCTcpCandidatePolicyDisabled; + } + } + + // candidateNetworkPolicy (private api) + if (json[@"candidateNetworkPolicy"] != nil && + [json[@"candidateNetworkPolicy"] isKindOfClass:[NSString class]]) { + NSString* candidateNetworkPolicy = json[@"candidateNetworkPolicy"]; + if ([candidateNetworkPolicy isEqualToString:@"all"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyAll; + } else if ([candidateNetworkPolicy isEqualToString:@"low_cost"]) { + config.candidateNetworkPolicy = RTCCandidateNetworkPolicyLowCost; + } + } + + // KeyType (private api) + if (json[@"keyType"] != nil && [json[@"keyType"] isKindOfClass:[NSString class]]) { + NSString* keyType = json[@"keyType"]; + if ([keyType isEqualToString:@"RSA"]) { + config.keyType = RTCEncryptionKeyTypeRSA; + } else if ([keyType isEqualToString:@"ECDSA"]) { + config.keyType = RTCEncryptionKeyTypeECDSA; + } + } + + // continualGatheringPolicy (private api) + if (json[@"continualGatheringPolicy"] != nil && + [json[@"continualGatheringPolicy"] isKindOfClass:[NSString class]]) { + NSString* continualGatheringPolicy = json[@"continualGatheringPolicy"]; + if ([continualGatheringPolicy isEqualToString:@"gather_once"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherOnce; + } else if ([continualGatheringPolicy isEqualToString:@"gather_continually"]) { + config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually; + } + } + + // audioJitterBufferMaxPackets (private api) + if (json[@"audioJitterBufferMaxPackets"] != nil && + [json[@"audioJitterBufferMaxPackets"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferMaxPackets = json[@"audioJitterBufferMaxPackets"]; + config.audioJitterBufferMaxPackets = [audioJitterBufferMaxPackets intValue]; + } + + // iceConnectionReceivingTimeout (private api) + if (json[@"iceConnectionReceivingTimeout"] != nil && + [json[@"iceConnectionReceivingTimeout"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceConnectionReceivingTimeout = json[@"iceConnectionReceivingTimeout"]; + config.iceConnectionReceivingTimeout = [iceConnectionReceivingTimeout intValue]; + } + + // iceBackupCandidatePairPingInterval (private api) + if (json[@"iceBackupCandidatePairPingInterval"] != nil && + [json[@"iceBackupCandidatePairPingInterval"] isKindOfClass:[NSNumber class]]) { + NSNumber* iceBackupCandidatePairPingInterval = json[@"iceConnectionReceivingTimeout"]; + config.iceBackupCandidatePairPingInterval = [iceBackupCandidatePairPingInterval intValue]; + } + + // audioJitterBufferFastAccelerate (private api) + if (json[@"audioJitterBufferFastAccelerate"] != nil && + [json[@"audioJitterBufferFastAccelerate"] isKindOfClass:[NSNumber class]]) { + NSNumber* audioJitterBufferFastAccelerate = json[@"audioJitterBufferFastAccelerate"]; + config.audioJitterBufferFastAccelerate = [audioJitterBufferFastAccelerate boolValue]; + } + + // pruneTurnPorts (private api) + if (json[@"pruneTurnPorts"] != nil && [json[@"pruneTurnPorts"] isKindOfClass:[NSNumber class]]) { + NSNumber* pruneTurnPorts = json[@"pruneTurnPorts"]; + config.shouldPruneTurnPorts = [pruneTurnPorts boolValue]; + } + + // presumeWritableWhenFullyRelayed (private api) + if (json[@"presumeWritableWhenFullyRelayed"] != nil && + [json[@"presumeWritableWhenFullyRelayed"] isKindOfClass:[NSNumber class]]) { + NSNumber* presumeWritableWhenFullyRelayed = json[@"presumeWritableWhenFullyRelayed"]; + config.shouldPresumeWritableWhenFullyRelayed = [presumeWritableWhenFullyRelayed boolValue]; + } + + // cryptoOptions (private api) + if (json[@"cryptoOptions"] != nil && + [json[@"cryptoOptions"] isKindOfClass:[NSDictionary class]]) { + id options = json[@"cryptoOptions"]; + BOOL srtpEnableGcmCryptoSuites = NO; + BOOL sframeRequireFrameEncryption = NO; + BOOL srtpEnableEncryptedRtpHeaderExtensions = NO; + BOOL srtpEnableAes128Sha1_32CryptoCipher = NO; + + if (options[@"enableGcmCryptoSuites"] != nil && + [options[@"enableGcmCryptoSuites"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableGcmCryptoSuites"]; + srtpEnableGcmCryptoSuites = [value boolValue]; + } + + if (options[@"requireFrameEncryption"] != nil && + [options[@"requireFrameEncryption"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"requireFrameEncryption"]; + sframeRequireFrameEncryption = [value boolValue]; + } + + if (options[@"enableEncryptedRtpHeaderExtensions"] != nil && + [options[@"enableEncryptedRtpHeaderExtensions"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableEncryptedRtpHeaderExtensions"]; + srtpEnableEncryptedRtpHeaderExtensions = [value boolValue]; + } + + if (options[@"enableAes128Sha1_32CryptoCipher"] != nil && + [options[@"enableAes128Sha1_32CryptoCipher"] isKindOfClass:[NSNumber class]]) { + NSNumber* value = options[@"enableAes128Sha1_32CryptoCipher"]; + srtpEnableAes128Sha1_32CryptoCipher = [value boolValue]; + } + + config.cryptoOptions = [[RTCCryptoOptions alloc] + initWithSrtpEnableGcmCryptoSuites:srtpEnableGcmCryptoSuites + srtpEnableAes128Sha1_32CryptoCipher:srtpEnableAes128Sha1_32CryptoCipher + srtpEnableEncryptedRtpHeaderExtensions:srtpEnableEncryptedRtpHeaderExtensions + sframeRequireFrameEncryption:(BOOL)sframeRequireFrameEncryption]; + } + + return config; +} + +- (RTCDataChannelConfiguration*)RTCDataChannelConfiguration:(id)json { + if (!json) { + return nil; + } + if ([json isKindOfClass:[NSDictionary class]]) { + RTCDataChannelConfiguration* init = [RTCDataChannelConfiguration new]; + + if (json[@"id"]) { + [init setChannelId:(int)[json[@"id"] integerValue]]; + } + if (json[@"ordered"]) { + init.isOrdered = [json[@"ordered"] boolValue]; + } + if (json[@"maxRetransmits"]) { + init.maxRetransmits = [json[@"maxRetransmits"] intValue]; + } + if (json[@"negotiated"]) { + init.isNegotiated = [json[@"negotiated"] boolValue]; + } + if (json[@"protocol"]) { + init.protocol = json[@"protocol"]; + } + return init; + } + return nil; +} + +- (CGRect)parseRect:(NSDictionary*)rect { + return CGRectMake( + [[rect valueForKey:@"left"] doubleValue], [[rect valueForKey:@"top"] doubleValue], + [[rect valueForKey:@"width"] doubleValue], [[rect valueForKey:@"height"] doubleValue]); +} + +- (NSDictionary*)dtmfSenderToMap:(id)dtmf Id:(NSString*)Id { + return @{ + @"dtmfSenderId" : Id, + @"interToneGap" : @(dtmf.interToneGap / 1000.0), + @"duration" : @(dtmf.duration / 1000.0), + }; +} + +- (NSDictionary*)rtpParametersToMap:(RTCRtpParameters*)parameters { + NSDictionary* rtcp = @{ + @"cname" : parameters.rtcp.cname, + @"reducedSize" : @(parameters.rtcp.isReducedSize), + }; + + NSMutableArray* headerExtensions = [NSMutableArray array]; + for (RTCRtpHeaderExtension* headerExtension in parameters.headerExtensions) { + [headerExtensions addObject:@{ + @"uri" : headerExtension.uri, + @"encrypted" : @(headerExtension.encrypted), + @"id" : @(headerExtension.id), + }]; + } + + NSMutableArray* encodings = [NSMutableArray array]; + for (RTCRtpEncodingParameters* encoding in parameters.encodings) { + // non-nil values + NSMutableDictionary* obj = [@{@"active" : @(encoding.isActive)} mutableCopy]; + // optional values + if (encoding.rid != nil) + [obj setObject:encoding.rid forKey:@"rid"]; + if (encoding.minBitrateBps != nil) + [obj setObject:encoding.minBitrateBps forKey:@"minBitrate"]; + if (encoding.maxBitrateBps != nil) + [obj setObject:encoding.maxBitrateBps forKey:@"maxBitrate"]; + if (encoding.maxFramerate != nil) + [obj setObject:encoding.maxFramerate forKey:@"maxFramerate"]; + if (encoding.numTemporalLayers != nil) + [obj setObject:encoding.numTemporalLayers forKey:@"numTemporalLayers"]; + if (encoding.scaleResolutionDownBy != nil) + [obj setObject:encoding.scaleResolutionDownBy forKey:@"scaleResolutionDownBy"]; + if (encoding.ssrc != nil) + [obj setObject:encoding.ssrc forKey:@"ssrc"]; + + [encodings addObject:obj]; + } + + NSMutableArray* codecs = [NSMutableArray array]; + for (RTCRtpCodecParameters* codec in parameters.codecs) { + [codecs addObject:@{ + @"name" : codec.name, + @"payloadType" : @(codec.payloadType), + @"clockRate" : codec.clockRate, + @"numChannels" : codec.numChannels ? codec.numChannels : @(1), + @"parameters" : codec.parameters, + @"kind" : codec.kind + }]; + } + + NSString* degradationPreference = @"balanced"; + if (parameters.degradationPreference != nil) { + if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceMaintainFramerate) { + degradationPreference = @"maintain-framerate"; + } else if ([parameters.degradationPreference intValue] == + RTCDegradationPreferenceMaintainResolution) { + degradationPreference = @"maintain-resolution"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceBalanced) { + degradationPreference = @"balanced"; + } else if ([parameters.degradationPreference intValue] == RTCDegradationPreferenceDisabled) { + degradationPreference = @"disabled"; + } + } + + return @{ + @"transactionId" : parameters.transactionId, + @"rtcp" : rtcp, + @"headerExtensions" : headerExtensions, + @"encodings" : encodings, + @"codecs" : codecs, + @"degradationPreference" : degradationPreference, + }; +} + +- (NSString*)streamTrackStateToString:(RTCMediaStreamTrackState)state { + switch (state) { + case RTCMediaStreamTrackStateLive: + return @"live"; + case RTCMediaStreamTrackStateEnded: + return @"ended"; + default: + break; + } + return @""; +} + +- (NSDictionary*)mediaStreamToMap:(RTCMediaStream*)stream ownerTag:(NSString*)ownerTag { + NSMutableArray* audioTracks = [NSMutableArray array]; + NSMutableArray* videoTracks = [NSMutableArray array]; + + for (RTCMediaStreamTrack* track in stream.audioTracks) { + [audioTracks addObject:[self mediaTrackToMap:track]]; + } + + for (RTCMediaStreamTrack* track in stream.videoTracks) { + [videoTracks addObject:[self mediaTrackToMap:track]]; + } + + return @{ + @"streamId" : stream.streamId, + @"ownerTag" : ownerTag, + @"audioTracks" : audioTracks, + @"videoTracks" : videoTracks, + + }; +} + +- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track { + if (track == nil) + return @{}; + NSDictionary* params = @{ + @"enabled" : @(track.isEnabled), + @"id" : track.trackId, + @"kind" : track.kind, + @"label" : track.trackId, + @"readyState" : [self streamTrackStateToString:track.readyState], + @"remote" : @(YES) + }; + return params; +} + +- (NSDictionary*)rtpSenderToMap:(RTCRtpSender*)sender { + NSDictionary* params = @{ + @"senderId" : sender.senderId, + @"ownsTrack" : @(YES), + @"rtpParameters" : [self rtpParametersToMap:sender.parameters], + @"track" : [self mediaTrackToMap:sender.track], + @"dtmfSender" : [self dtmfSenderToMap:sender.dtmfSender Id:sender.senderId] + }; + return params; +} + +- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver { + NSDictionary* params = @{ + @"receiverId" : receiver.receiverId, + @"rtpParameters" : [self rtpParametersToMap:receiver.parameters], + @"track" : [self mediaTrackToMap:receiver.track], + }; + return params; +} + +- (RTCRtpTransceiver*)getRtpTransceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpTransceiver* transceiver in peerConnection.transceivers) { + NSString* mid = transceiver.mid ? transceiver.mid : @""; + if ([mid isEqualToString:Id]) { + return transceiver; + } + } + return nil; +} + +- (RTCRtpSender*)getRtpSenderById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpSender* sender in peerConnection.senders) { + if ([sender.senderId isEqualToString:Id]) { + return sender; + } + } + return nil; +} + +- (RTCRtpReceiver*)getRtpReceiverById:(RTCPeerConnection*)peerConnection Id:(NSString*)Id { + for (RTCRtpReceiver* receiver in peerConnection.receivers) { + if ([receiver.receiverId isEqualToString:Id]) { + return receiver; + } + } + return nil; +} + +- (RTCRtpEncodingParameters*)mapToEncoding:(NSDictionary*)map { + RTCRtpEncodingParameters* encoding = [[RTCRtpEncodingParameters alloc] init]; + encoding.isActive = YES; + encoding.scaleResolutionDownBy = [NSNumber numberWithDouble:1.0]; + encoding.numTemporalLayers = [NSNumber numberWithInt:1]; +#if TARGET_OS_IPHONE + encoding.networkPriority = RTCPriorityLow; + encoding.bitratePriority = 1.0; +#endif + [encoding setRid:map[@"rid"]]; + + if (map[@"active"] != nil) { + [encoding setIsActive:((NSNumber*)map[@"active"]).boolValue]; + } + + if (map[@"minBitrate"] != nil) { + [encoding setMinBitrateBps:(NSNumber*)map[@"minBitrate"]]; + } + + if (map[@"maxBitrate"] != nil) { + [encoding setMaxBitrateBps:(NSNumber*)map[@"maxBitrate"]]; + } + + if (map[@"maxFramerate"] != nil) { + [encoding setMaxFramerate:(NSNumber*)map[@"maxFramerate"]]; + } + + if (map[@"numTemporalLayers"] != nil) { + [encoding setNumTemporalLayers:(NSNumber*)map[@"numTemporalLayers"]]; + } + + if (map[@"scaleResolutionDownBy"] != nil) { + [encoding setScaleResolutionDownBy:(NSNumber*)map[@"scaleResolutionDownBy"]]; + } + + if (map[@"scalabilityMode"] != nil) { + [encoding setScalabilityMode:(NSString*)map[@"scalabilityMode"]]; + } + + return encoding; +} + +- (RTCRtpTransceiverInit*)mapToTransceiverInit:(NSDictionary*)map { + NSArray* streamIds = map[@"streamIds"]; + NSArray* encodingsParams = map[@"sendEncodings"]; + NSString* direction = map[@"direction"]; + + RTCRtpTransceiverInit* init = [RTCRtpTransceiverInit alloc]; + + if (direction != nil) { + init.direction = [self stringToTransceiverDirection:direction]; + } + + if (streamIds != nil) { + init.streamIds = streamIds; + } + + if (encodingsParams != nil) { + NSMutableArray* sendEncodings = [[NSMutableArray alloc] init]; + for (NSDictionary* map in encodingsParams) { + [sendEncodings addObject:[self mapToEncoding:map]]; + } + [init setSendEncodings:sendEncodings]; + } + return init; +} + +- (RTCRtpMediaType)stringToRtpMediaType:(NSString*)type { + if ([type isEqualToString:@"audio"]) { + return RTCRtpMediaTypeAudio; + } else if ([type isEqualToString:@"video"]) { + return RTCRtpMediaTypeVideo; + } else if ([type isEqualToString:@"data"]) { + return RTCRtpMediaTypeData; + } + return RTCRtpMediaTypeAudio; +} + +- (RTCRtpTransceiverDirection)stringToTransceiverDirection:(NSString*)type { + if ([type isEqualToString:@"sendrecv"]) { + return RTCRtpTransceiverDirectionSendRecv; + } else if ([type isEqualToString:@"sendonly"]) { + return RTCRtpTransceiverDirectionSendOnly; + } else if ([type isEqualToString:@"recvonly"]) { + return RTCRtpTransceiverDirectionRecvOnly; + } else if ([type isEqualToString:@"inactive"]) { + return RTCRtpTransceiverDirectionInactive; + } + return RTCRtpTransceiverDirectionInactive; +} + +- (RTCRtpParameters*)updateRtpParameters:(RTCRtpParameters*)parameters + with:(NSDictionary*)newParameters { + // current encodings + NSArray* currentEncodings = parameters.encodings; + // new encodings + NSArray* newEncodings = [newParameters objectForKey:@"encodings"]; + + NSString* degradationPreference = [newParameters objectForKey:@"degradationPreference"]; + + if (degradationPreference != nil) { + if ([degradationPreference isEqualToString:@"maintain-framerate"]) { + parameters.degradationPreference = + [NSNumber numberWithInt:RTCDegradationPreferenceMaintainFramerate]; + } else if ([degradationPreference isEqualToString:@"maintain-resolution"]) { + parameters.degradationPreference = + [NSNumber numberWithInt:RTCDegradationPreferenceMaintainResolution]; + } else if ([degradationPreference isEqualToString:@"balanced"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceBalanced]; + } else if ([degradationPreference isEqualToString:@"disabled"]) { + parameters.degradationPreference = [NSNumber numberWithInt:RTCDegradationPreferenceDisabled]; + } + } + + for (int i = 0; i < [newEncodings count]; i++) { + RTCRtpEncodingParameters* currentParams = nil; + NSDictionary* newParams = [newEncodings objectAtIndex:i]; + NSString* rid = [newParams objectForKey:@"rid"]; + + // update by matching RID + if ([rid isKindOfClass:[NSString class]] && [rid length] != 0) { + // try to find current encoding with same rid + NSUInteger result = + [currentEncodings indexOfObjectPassingTest:^BOOL(RTCRtpEncodingParameters* _Nonnull obj, + NSUInteger idx, BOOL* _Nonnull stop) { + // stop if found object with matching rid + return (*stop = ([rid isEqualToString:obj.rid])); + }]; + + if (result != NSNotFound) { + currentParams = [currentEncodings objectAtIndex:result]; + } + } + + // fall back to update by index + if (currentParams == nil && i < [currentEncodings count]) { + currentParams = [currentEncodings objectAtIndex:i]; + } + + if (currentParams != nil) { + // update values + NSNumber* active = [newParams objectForKey:@"active"]; + if (active != nil) + currentParams.isActive = [active boolValue]; + NSNumber* maxBitrate = [newParams objectForKey:@"maxBitrate"]; + if (maxBitrate != nil) + currentParams.maxBitrateBps = maxBitrate; + NSNumber* minBitrate = [newParams objectForKey:@"minBitrate"]; + if (minBitrate != nil) + currentParams.minBitrateBps = minBitrate; + NSNumber* maxFramerate = [newParams objectForKey:@"maxFramerate"]; + if (maxFramerate != nil) + currentParams.maxFramerate = maxFramerate; + NSNumber* numTemporalLayers = [newParams objectForKey:@"numTemporalLayers"]; + if (numTemporalLayers != nil) + currentParams.numTemporalLayers = numTemporalLayers; + NSNumber* scaleResolutionDownBy = [newParams objectForKey:@"scaleResolutionDownBy"]; + if (scaleResolutionDownBy != nil) + currentParams.scaleResolutionDownBy = scaleResolutionDownBy; + } + } + + return parameters; +} + +- (NSString*)transceiverDirectionString:(RTCRtpTransceiverDirection)direction { + switch (direction) { + case RTCRtpTransceiverDirectionSendRecv: + return @"sendrecv"; + case RTCRtpTransceiverDirectionSendOnly: + return @"sendonly"; + case RTCRtpTransceiverDirectionRecvOnly: + return @"recvonly"; + case RTCRtpTransceiverDirectionInactive: + return @"inactive"; + case RTCRtpTransceiverDirectionStopped: + return @"stopped"; + break; + } + return nil; +} + +- (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver { + NSString* mid = transceiver.mid ? transceiver.mid : @""; + NSDictionary* params = @{ + @"transceiverId" : mid, + @"mid" : mid, + @"direction" : [self transceiverDirectionString:transceiver.direction], + @"sender" : [self rtpSenderToMap:transceiver.sender], + @"receiver" : [self receiverToMap:transceiver.receiver] + }; + return params; +} + +- (FlutterRTCVideoRenderer*)findRendererByTrackId:(NSString*)trackId { + for (FlutterRTCVideoRenderer* renderer in self.renders.allValues) { + if (renderer.videoTrack != nil && [renderer.videoTrack.trackId isEqualToString:trackId]) { + return renderer; + } + } + return nil; +} +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalAudioTrack.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalAudioTrack.m new file mode 100644 index 0000000000..51d84467d7 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalAudioTrack.m @@ -0,0 +1,38 @@ +#import "./include/stream_webrtc_flutter/AudioManager.h" +#import "./include/stream_webrtc_flutter/LocalAudioTrack.h" + +@implementation LocalAudioTrack { + RTCAudioTrack* _track; +} + +@synthesize audioTrack = _track; + +- (instancetype)initWithTrack:(RTCAudioTrack*)track { + self = [super init]; + if (self) { + _track = track; + } + return self; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +- (void)addRenderer:(id)renderer { + [AudioManager.sharedInstance addLocalAudioRenderer:renderer]; +} + +- (void)removeRenderer:(id)renderer { + [AudioManager.sharedInstance removeLocalAudioRenderer:renderer]; +} + +- (void)addProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter addProcessing:processor]; +} + +- (void)removeProcessing:(_Nonnull id)processor { + [AudioManager.sharedInstance.capturePostProcessingAdapter removeProcessing:processor]; +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalVideoTrack.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalVideoTrack.m new file mode 100644 index 0000000000..0e257b3906 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/LocalVideoTrack.m @@ -0,0 +1,47 @@ +#import "./include/stream_webrtc_flutter/LocalVideoTrack.h" + +@implementation LocalVideoTrack { + RTCVideoTrack* _track; + VideoProcessingAdapter* _processing; +} + +@synthesize videoTrack = _track; +@synthesize processing = _processing; + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + videoProcessing:(VideoProcessingAdapter*)processing { + self = [super init]; + if (self) { + _track = track; + _processing = processing; + } + return self; +} + +- (instancetype)initWithTrack:(RTCVideoTrack*)track { + return [self initWithTrack:track videoProcessing:nil]; +} + +- (RTCMediaStreamTrack*)track { + return _track; +} + +/** Register a renderer that will render all frames received on this track. */ +- (void)addRenderer:(id)renderer { + [_track addRenderer:renderer]; +} + +/** Deregister a renderer. */ +- (void)removeRenderer:(id)renderer { + [_track removeRenderer:renderer]; +} + +- (void)addProcessing:(id)processor { + [_processing addProcessing:processor]; +} + +- (void)removeProcessing:(id)processor { + [_processing removeProcessing:processor]; +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/ProcessorProvider.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/ProcessorProvider.m new file mode 100644 index 0000000000..c59393a801 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/ProcessorProvider.m @@ -0,0 +1,23 @@ +#import "./include/stream_webrtc_flutter/ProcessorProvider.h" + +@implementation ProcessorProvider + +static NSMutableDictionary*>* processorMap; + ++ (void)initialize { + processorMap = [[NSMutableDictionary alloc] init]; +} + ++ (NSObject*)getProcessor:(NSString*)name { + return [processorMap objectForKey:name]; +} + ++ (void)addProcessor:(NSObject*)processor forName:(NSString*)name { + [processorMap setObject:processor forKey:name]; +} + ++ (void)removeProcessor:(NSString*)name { + [processorMap removeObjectForKey:name]; +} + +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoEffectProcessor.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoEffectProcessor.m new file mode 100644 index 0000000000..b345f398d5 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoEffectProcessor.m @@ -0,0 +1,21 @@ +#import +#import "./include/stream_webrtc_flutter/VideoEffectProcessor.h" + +@implementation VideoEffectProcessor +- (instancetype)initWithProcessors: + (NSArray*>*)videoFrameProcessors + videoSource:(RTCVideoSource*)videoSource { + self = [super init]; + _videoFrameProcessors = videoFrameProcessors; + _videoSource = videoSource; + return self; +} +- (void)capturer:(nonnull RTCVideoCapturer*)capturer + didCaptureVideoFrame:(nonnull RTCVideoFrame*)frame { + RTCVideoFrame* processedFrame = frame; + for (NSObject* processor in _videoFrameProcessors) { + processedFrame = [processor capturer:capturer didCaptureVideoFrame:processedFrame]; + } + [self.videoSource capturer:capturer didCaptureVideoFrame:processedFrame]; +} +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoProcessingAdapter.m b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoProcessingAdapter.m new file mode 100644 index 0000000000..a127ab5850 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/VideoProcessingAdapter.m @@ -0,0 +1,55 @@ +#import +#import "./include/stream_webrtc_flutter/VideoProcessingAdapter.h" + +@implementation VideoProcessingAdapter { + RTCVideoSource* _videoSource; + CGSize _frameSize; + NSArray>* _processors; + os_unfair_lock _lock; +} + +- (instancetype)initWithRTCVideoSource:(RTCVideoSource*)source { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _videoSource = source; + _processors = [NSArray> new]; + } + return self; +} + +- (RTCVideoSource* _Nonnull)source { + return _videoSource; +} + +- (void)addProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors arrayByAddingObject:processor]; + os_unfair_lock_unlock(&_lock); +} + +- (void)removeProcessing:(id)processor { + os_unfair_lock_lock(&_lock); + _processors = [_processors + filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(id evaluatedObject, + NSDictionary* bindings) { + return evaluatedObject != processor; + }]]; + os_unfair_lock_unlock(&_lock); +} + +- (void)setSize:(CGSize)size { + _frameSize = size; +} + +- (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer + didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + os_unfair_lock_lock(&_lock); + for (id processor in _processors) { + frame = [processor onFrame:frame]; + } + [_videoSource capturer:capturer didCaptureVideoFrame:frame]; + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/audio_sink_bridge.cpp b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/audio_sink_bridge.cpp new file mode 100644 index 0000000000..737b8f9ff7 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/audio_sink_bridge.cpp @@ -0,0 +1,27 @@ +#include "./include/stream_webrtc_flutter/media_stream_interface.h" +#include "./include/stream_webrtc_flutter/FlutterRTCAudioSink-Interface.h" + +class AudioSinkBridge : public webrtc::AudioTrackSinkInterface { +private: + void* sink; + +public: + AudioSinkBridge(void* sink1) { + sink = sink1; + } + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) override + { + RTCAudioSinkCallback(sink, + audio_data, + bits_per_sample, + sample_rate, + number_of_channels, + number_of_frames + ); + }; + int NumPreferredChannels() const override { return 1; } +}; diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioManager.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioManager.h new file mode 100644 index 0000000000..f6d9af4a38 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioManager.h @@ -0,0 +1,19 @@ +#import +#import +#import "AudioProcessingAdapter.h" + +@interface AudioManager : NSObject + +@property(nonatomic, strong) RTCDefaultAudioProcessingModule* _Nonnull audioProcessingModule; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull capturePostProcessingAdapter; + +@property(nonatomic, strong) AudioProcessingAdapter* _Nonnull renderPreProcessingAdapter; + ++ (_Nonnull instancetype)sharedInstance; + +- (void)addLocalAudioRenderer:(nonnull id)renderer; + +- (void)removeLocalAudioRenderer:(nonnull id)renderer; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioProcessingAdapter.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioProcessingAdapter.h new file mode 100644 index 0000000000..1e93b7dd7b --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioProcessingAdapter.h @@ -0,0 +1,26 @@ +#import +#import + +@protocol ExternalAudioProcessingDelegate + +- (void)audioProcessingInitializeWithSampleRate:(size_t)sampleRateHz channels:(size_t)channels; + +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) * _Nonnull)audioBuffer; + +- (void)audioProcessingRelease; + +@end + +@interface AudioProcessingAdapter : NSObject + +- (nonnull instancetype)init; + +- (void)addProcessing:(id _Nonnull)processor; + +- (void)removeProcessing:(id _Nonnull)processor; + +- (void)addAudioRenderer:(nonnull id)renderer; + +- (void)removeAudioRenderer:(nonnull id)renderer; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioUtils.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioUtils.h new file mode 100644 index 0000000000..7364ce69cc --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/AudioUtils.h @@ -0,0 +1,15 @@ +#if TARGET_OS_IPHONE + +#import + +@interface AudioUtils : NSObject ++ (void)ensureAudioSessionWithRecording:(BOOL)recording; +// needed for wired headphones to use headphone mic ++ (BOOL)selectAudioInput:(AVAudioSessionPort)type; ++ (void)setSpeakerphoneOn:(BOOL)enable; ++ (void)setSpeakerphoneOnButPreferBluetooth; ++ (void)deactiveRtcAudioSession; ++ (void)setAppleAudioConfiguration:(NSDictionary*)configuration; +@end + +#endif diff --git a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.h similarity index 93% rename from ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.h index cefdbfcb40..f1d1d7f336 100644 --- a/ios/Classes/Broadcast/FlutterBroadcastScreenCapturer.h +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterBroadcastScreenCapturer.h @@ -6,7 +6,7 @@ // #import -#import +#import NS_ASSUME_NONNULL_BEGIN extern NSString* const kRTCScreensharingSocketFD; diff --git a/ios/Classes/Broadcast/FlutterSocketConnection.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.h similarity index 100% rename from ios/Classes/Broadcast/FlutterSocketConnection.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterSocketConnection.h diff --git a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.h similarity index 92% rename from ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.h index 230616e207..5279be2e6a 100644 --- a/ios/Classes/Broadcast/FlutterSocketConnectionFrameReader.h +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/Broadcast/FlutterSocketConnectionFrameReader.h @@ -6,7 +6,7 @@ // #import -#import +#import NS_ASSUME_NONNULL_BEGIN diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/CameraUtils.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/CameraUtils.h new file mode 100644 index 0000000000..d6859d88ee --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/CameraUtils.h @@ -0,0 +1,43 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (CameraUtils) + +- (void)mediaStreamTrackHasTorch:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetTorch:(nonnull RTCMediaStreamTrack*)track + torch:(BOOL)torch + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetZoom:(nonnull RTCMediaStreamTrack*)track + zoomLevel:(double)zoomLevel + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusMode:(nonnull RTCMediaStreamTrack*)track + focusMode:(nonnull NSString*)focusMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetFocusPoint:(nonnull RTCMediaStreamTrack*)track + focusPoint:(nonnull NSDictionary*)focusPoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposureMode:(nonnull RTCMediaStreamTrack*)track + exposureMode:(nonnull NSString*)exposureMode + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSetExposurePoint:(nonnull RTCMediaStreamTrack*)track + exposurePoint:(nonnull NSDictionary*)exposurePoint + result:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackSwitchCamera:(nonnull RTCMediaStreamTrack*)track result:(nonnull FlutterResult)result; + +- (NSInteger)selectFpsForFormat:(nonnull AVCaptureDeviceFormat*)format targetFps:(NSInteger)targetFps; + +- (nullable AVCaptureDeviceFormat*)selectFormatForDevice:(nonnull AVCaptureDevice*)device + targetWidth:(NSInteger)targetWidth + targetHeight:(NSInteger)targetHeight; + +- (nullable AVCaptureDevice*)findDeviceForPosition:(AVCaptureDevicePosition)position; + + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRPScreenRecorder.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRPScreenRecorder.h new file mode 100644 index 0000000000..8659c88ac8 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRPScreenRecorder.h @@ -0,0 +1,13 @@ +#if TARGET_OS_IPHONE +#import +@interface FlutterRPScreenRecorder : RTCVideoCapturer + +- (void)startCapture; + +// Stops the capture session asynchronously and notifies callback on completion. +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +- (void)stopCapture; + +@end +#endif diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink-Interface.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink-Interface.h new file mode 100644 index 0000000000..8a0352333d --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink-Interface.h @@ -0,0 +1,6 @@ +void RTCAudioSinkCallback (void *object, + const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames); diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink.h new file mode 100644 index 0000000000..5173b27076 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCAudioSink.h @@ -0,0 +1,14 @@ +#import +#import +#import + +@interface FlutterRTCAudioSink : NSObject + +@property(nonatomic, copy) void (^bufferCallback)(CMSampleBufferRef); +@property(nonatomic) CMAudioFormatDescriptionRef format; + +- (instancetype)initWithAudioTrack:(RTCAudioTrack*)audio; + +- (void)close; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDataChannel.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDataChannel.h new file mode 100644 index 0000000000..2b1d685274 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDataChannel.h @@ -0,0 +1,30 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCDataChannel (Flutter) +@property(nonatomic, strong, nonnull) NSString* peerConnectionId; +@property(nonatomic, strong, nonnull) NSString* flutterChannelId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@property(nonatomic, strong, nullable) NSArray* eventQueue; +@end + +@interface FlutterWebRTCPlugin (RTCDataChannel) + +- (void)createDataChannel:(nonnull NSString*)peerConnectionId + label:(nonnull NSString*)label + config:(nonnull RTCDataChannelConfiguration*)config + messenger:(nonnull NSObject*)messenger + result:(nonnull FlutterResult)result; + +- (void)dataChannelClose:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId; + +- (void)dataChannelSend:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + data:(nonnull NSString*)data + type:(nonnull NSString*)type; + +- (void)dataChannelGetBufferedAmount:(nonnull NSString*)peerConnectionId + dataChannelId:(nonnull NSString*)dataChannelId + result:(nonnull FlutterResult)result; +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDesktopCapturer.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDesktopCapturer.h new file mode 100644 index 0000000000..985ef4095d --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCDesktopCapturer.h @@ -0,0 +1,22 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import +#import + +#import "FlutterWebRTCPlugin.h" + +@interface FlutterWebRTCPlugin (DesktopCapturer) + +- (void)getDisplayMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)getDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)updateDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result; + +- (void)getDesktopSourceThumbnail:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h new file mode 100644 index 0000000000..83e502f8dc --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCapturer.h @@ -0,0 +1,17 @@ +#import + +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +@interface FlutterRTCFrameCapturer : NSObject + +- (instancetype)initWithTrack:(RTCVideoTrack*)track + toPath:(NSString*)path + result:(FlutterResult)result; + ++ (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCryptor.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCryptor.h new file mode 100644 index 0000000000..abff7367d9 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCFrameCryptor.h @@ -0,0 +1,48 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import + +#import "FlutterWebRTCPlugin.h" + +@interface RTCFrameCryptor (Flutter) +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (FrameCryptor) + +- (void)handleFrameCryptorMethodCall:(nonnull FlutterMethodCall*)call + result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateFrameCryptor:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetKeyIndex:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorSetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorGetEnabled:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)frameCryptorDispose:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)frameCryptorFactoryCreateKeyProvider:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderSetKey:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)keyProviderRatchetKey:(nonnull NSDictionary*)constraints + result:(nonnull FlutterResult)result; + +- (void)keyProviderDispose:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaRecorder.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaRecorder.h new file mode 100644 index 0000000000..5180b632fb --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaRecorder.h @@ -0,0 +1,24 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif +#import + +@import Foundation; +@import AVFoundation; + +@interface FlutterRTCMediaRecorder : NSObject + +@property(nonatomic, strong) RTCVideoTrack* _Nullable videoTrack; +@property(nonatomic, strong) NSURL* _Nonnull output; +@property(nonatomic, strong) AVAssetWriter* _Nullable assetWriter; +@property(nonatomic, strong) AVAssetWriterInput* _Nullable writerInput; + +- (instancetype _Nonnull)initWithVideoTrack:(RTCVideoTrack* _Nullable)video + audioTrack:(RTCAudioTrack* _Nullable)audio + outputFile:(NSURL* _Nonnull)out; + +- (void)stop:(_Nonnull FlutterResult)result; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaStream.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaStream.h new file mode 100644 index 0000000000..91f19be617 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCMediaStream.h @@ -0,0 +1,27 @@ +#import +#import "FlutterWebRTCPlugin.h" + +@interface RTCMediaStreamTrack (Flutter) +@property(nonatomic, strong, nonnull) id settings; +@end + +@interface FlutterWebRTCPlugin (RTCMediaStream) + +- (RTCVideoTrack* _Nullable)cloneTrack:(nonnull NSString*)trackId; + +- (void)getUserMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result; + +- (void)createLocalMediaStream:(nonnull FlutterResult)result; + +- (void)getSources:(nonnull FlutterResult)result; + +- (void)mediaStreamTrackCaptureFrame:(nonnull RTCMediaStreamTrack*)track + toPath:(nonnull NSString*)path + result:(nonnull FlutterResult)result; + +- (void)selectAudioInput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; + +- (void)selectAudioOutput:(nonnull NSString*)deviceId result:(nullable FlutterResult)result; + +- (void)triggeriOSAudioRouteSelectionUI:(nonnull FlutterResult)result; +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCPeerConnection.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCPeerConnection.h new file mode 100644 index 0000000000..bd86076209 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCPeerConnection.h @@ -0,0 +1,65 @@ +#import "FlutterWebRTCPlugin.h" + +@interface RTCPeerConnection (Flutter) +@property(nonatomic, strong, nonnull) NSMutableDictionary* dataChannels; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteStreams; +@property(nonatomic, strong, nonnull) + NSMutableDictionary* remoteTracks; +@property(nonatomic, strong, nonnull) NSString* flutterId; +@property(nonatomic, strong, nullable) FlutterEventSink eventSink; +@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel; +@end + +@interface FlutterWebRTCPlugin (RTCPeerConnection) + +- (void)peerConnectionCreateOffer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionCreateAnswer:(nonnull NSDictionary*)constraints + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetLocalDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionSetRemoteDescription:(nonnull RTCSessionDescription*)sdp + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionAddICECandidate:(nonnull RTCIceCandidate*)candidate + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID + peerConnection:(nonnull RTCPeerConnection*)peerConnection + result:(nonnull FlutterResult)result; + +- (nonnull RTCMediaConstraints*)parseMediaConstraints:(nonnull NSDictionary*)constraints; + +- (void)peerConnectionSetConfiguration:(nonnull RTCConfiguration*)configuration + peerConnection:(nonnull RTCPeerConnection*)peerConnection; + +- (void)peerConnectionGetRtpReceiverCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)peerConnectionGetRtpSenderCapabilities:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (void)transceiverSetCodecPreferences:(nonnull NSDictionary*)argsMap + result:(nonnull FlutterResult)result; + +- (nullable NSString*)stringForSignalingState:(RTCSignalingState)state; + +- (nullable NSString*)stringForICEGatheringState:(RTCIceGatheringState)state; + +- (nullable NSString*)stringForICEConnectionState:(RTCIceConnectionState)state; + +- (nullable NSString*)stringForPeerConnectionState:(RTCPeerConnectionState)state; + +@end diff --git a/ios/Classes/FlutterRTCVideoPlatformView.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformView.h similarity index 89% rename from ios/Classes/FlutterRTCVideoPlatformView.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformView.h index 01e1215ea8..49f9b705e8 100644 --- a/ios/Classes/FlutterRTCVideoPlatformView.h +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformView.h @@ -4,7 +4,7 @@ #import #endif -#import +#import @interface FlutterRTCVideoPlatformView : UIView diff --git a/ios/Classes/FlutterRTCVideoPlatformViewController.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h similarity index 95% rename from ios/Classes/FlutterRTCVideoPlatformViewController.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h index cb15ed7b35..f5f27f53d3 100644 --- a/ios/Classes/FlutterRTCVideoPlatformViewController.h +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewController.h @@ -4,7 +4,7 @@ #import #endif -#import +#import @interface FlutterRTCVideoPlatformViewController : NSObject diff --git a/ios/Classes/FlutterRTCVideoPlatformViewFactory.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.h similarity index 100% rename from ios/Classes/FlutterRTCVideoPlatformViewFactory.h rename to ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoPlatformViewFactory.h diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoRenderer.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoRenderer.h new file mode 100644 index 0000000000..6aa40a70b8 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterRTCVideoRenderer.h @@ -0,0 +1,33 @@ +#import "FlutterWebRTCPlugin.h" + +#import +#import +#import +#import + +@interface FlutterRTCVideoRenderer + : NSObject + +/** + * The {@link RTCVideoTrack}, if any, which this instance renders. + */ +@property(nonatomic, strong) RTCVideoTrack* videoTrack; +@property(nonatomic) int64_t textureId; +@property(nonatomic, weak) id registry; +@property(nonatomic, strong) FlutterEventSink eventSink; + +- (instancetype)initWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)dispose; + +@end + +@interface FlutterWebRTCPlugin (FlutterVideoRendererManager) + +- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id)registry + messenger:(NSObject*)messenger; + +- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterWebRTCPlugin.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterWebRTCPlugin.h new file mode 100644 index 0000000000..7ba9fb8a56 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/FlutterWebRTCPlugin.h @@ -0,0 +1,102 @@ +#if TARGET_OS_IPHONE +#import +#elif TARGET_OS_OSX +#import +#endif + +#import +#import +#import "LocalTrack.h" + +@class VideoEffectProcessor; +@class FlutterRTCVideoRenderer; +@class FlutterRTCFrameCapturer; +@class FlutterRTCMediaRecorder; +@class AudioManager; + +void postEvent(FlutterEventSink _Nonnull sink, id _Nullable event); + +typedef void (^CompletionHandler)(void); + +typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler); + +@interface FlutterWebRTCPlugin : NSObject + +@property(nonatomic, strong) RTCPeerConnectionFactory* _Nullable peerConnectionFactory; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable peerConnections; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable localStreams; +@property(nonatomic, strong) NSMutableDictionary>* _Nullable localTracks; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable renders; +@property(nonatomic, strong) NSMutableDictionary* recorders; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable videoCapturerStopHandlers; + +@property(nonatomic, strong) + NSMutableDictionary* _Nullable frameCryptors; +@property(nonatomic, strong) + NSMutableDictionary* _Nullable keyProviders; + +#if TARGET_OS_IPHONE +@property(nonatomic, retain) + UIViewController* _Nullable viewController; /*for broadcast or ReplayKit */ +#endif + +@property(nonatomic, strong) FlutterEventSink _Nullable eventSink; +@property(nonatomic, strong) NSObject* _Nonnull messenger; +@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer; +@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer; +#if TARGET_OS_IPHONE +@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput; +#endif +@property(nonatomic, strong) VideoEffectProcessor* _Nullable videoEffectProcessor; + +@property(nonatomic, strong) NSString* _Nonnull focusMode; +@property(nonatomic, strong) NSString* _Nonnull exposureMode; + +@property(nonatomic) BOOL _usingFrontCamera; +@property(nonatomic) NSInteger _lastTargetWidth; +@property(nonatomic) NSInteger _lastTargetHeight; +@property(nonatomic) NSInteger _lastTargetFps; + +@property(nonatomic, strong) AudioManager* _Nullable audioManager; + +- (void)mediaStreamTrackSetVideoEffects:(nonnull NSString*)trackId + names:(nonnull NSArray*)names; +- (RTCMediaStream* _Nullable)streamForId:(NSString* _Nonnull)streamId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (RTCMediaStreamTrack* _Nullable)trackForId:(NSString* _Nonnull)trackId + peerConnectionId:(NSString* _Nullable)peerConnectionId; +- (NSString*)audioTrackIdForVideoTrackId:(NSString*)videoTrackId; +- (RTCRtpTransceiver* _Nullable)getRtpTransceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nullable)Id; +- (NSDictionary* _Nullable)mediaStreamToMap:(RTCMediaStream* _Nonnull)stream + ownerTag:(NSString* _Nullable)ownerTag; +- (NSDictionary* _Nullable)mediaTrackToMap:(RTCMediaStreamTrack* _Nonnull)track; +- (NSDictionary* _Nullable)receiverToMap:(RTCRtpReceiver* _Nonnull)receiver; +- (NSDictionary* _Nullable)transceiverToMap:(RTCRtpTransceiver* _Nonnull)transceiver; + +- (RTCMediaStreamTrack* _Nullable)remoteTrackForId:(NSString* _Nonnull)trackId; + +- (BOOL)hasLocalAudioTrack; +- (void)ensureAudioSession; +- (void)deactiveRtcAudioSession; + +- (RTCRtpReceiver* _Nullable)getRtpReceiverById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; +- (RTCRtpSender* _Nullable)getRtpSenderById:(RTCPeerConnection* _Nonnull)peerConnection + Id:(NSString* _Nonnull)Id; + ++ (FlutterWebRTCPlugin* _Nullable)sharedSingleton; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalAudioTrack.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalAudioTrack.h new file mode 100644 index 0000000000..f7e110e4e7 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalAudioTrack.h @@ -0,0 +1,19 @@ +#import +#import "AudioProcessingAdapter.h" +#import "LocalTrack.h" + +@interface LocalAudioTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCAudioTrack* _Nonnull)track; + +@property(nonatomic, strong) RTCAudioTrack* _Nonnull audioTrack; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalTrack.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalTrack.h new file mode 100644 index 0000000000..34f2e1e29e --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalTrack.h @@ -0,0 +1,7 @@ +#import + +@protocol LocalTrack + +- (RTCMediaStreamTrack*)track; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalVideoTrack.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalVideoTrack.h new file mode 100644 index 0000000000..e0acba6b45 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/LocalVideoTrack.h @@ -0,0 +1,24 @@ +#import +#import "LocalTrack.h" +#import "VideoProcessingAdapter.h" + +@interface LocalVideoTrack : NSObject + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track; + +- (_Nonnull instancetype)initWithTrack:(RTCVideoTrack* _Nonnull)track + videoProcessing:(VideoProcessingAdapter* _Nullable)processing; + +@property(nonatomic, strong) RTCVideoTrack* _Nonnull videoTrack; + +@property(nonatomic, strong) VideoProcessingAdapter* _Nonnull processing; + +- (void)addRenderer:(_Nonnull id)renderer; + +- (void)removeRenderer:(_Nonnull id)renderer; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/ProcessorProvider.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/ProcessorProvider.h new file mode 100644 index 0000000000..702ccc5e87 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/ProcessorProvider.h @@ -0,0 +1,10 @@ +#import "VideoFrameProcessor.h" + +@interface ProcessorProvider : NSObject + ++ (NSObject *)getProcessor:(NSString *)name; ++ (void)addProcessor:(NSObject *)processor + forName:(NSString *)name; ++ (void)removeProcessor:(NSString *)name; + +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/RTCAudioSource+Private.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/RTCAudioSource+Private.h new file mode 100644 index 0000000000..755d9ca0c6 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/RTCAudioSource+Private.h @@ -0,0 +1,14 @@ +#ifdef __cplusplus +#import "StreamWebRTC/RTCAudioSource.h" +#include "media_stream_interface.h" + +@interface RTCAudioSource () + +/** + * The AudioSourceInterface object passed to this RTCAudioSource during + * construction. + */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioSource; + +@end +#endif diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoEffectProcessor.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoEffectProcessor.h new file mode 100644 index 0000000000..5efbb300b0 --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoEffectProcessor.h @@ -0,0 +1,13 @@ +#import + +#import "VideoFrameProcessor.h" + +@interface VideoEffectProcessor : NSObject + +@property (nonatomic, strong) NSArray *> *videoFrameProcessors; +@property (nonatomic, strong) RTCVideoSource *videoSource; + +- (instancetype)initWithProcessors:(NSArray *> *)videoFrameProcessors + videoSource:(RTCVideoSource *)videoSource; + +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoFrameProcessor.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoFrameProcessor.h new file mode 100644 index 0000000000..cf06e9fe1a --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoFrameProcessor.h @@ -0,0 +1,7 @@ +#import +#import + +@protocol VideoFrameProcessorDelegate +- (RTCVideoFrame *)capturer:(RTCVideoCapturer *)capturer + didCaptureVideoFrame:(RTCVideoFrame *)frame; +@end \ No newline at end of file diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoProcessingAdapter.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoProcessingAdapter.h new file mode 100644 index 0000000000..9fba86edec --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/VideoProcessingAdapter.h @@ -0,0 +1,18 @@ +#import +#import + +@protocol ExternalVideoProcessingDelegate +- (RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)onFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * _Nonnull)frame; +@end + +@interface VideoProcessingAdapter : NSObject + +- (_Nonnull instancetype)initWithRTCVideoSource:(RTCVideoSource* _Nonnull)source; + +- (void)addProcessing:(_Nonnull id)processor; + +- (void)removeProcessing:(_Nonnull id)processor; + +- (RTCVideoSource* _Nonnull)source; + +@end diff --git a/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/media_stream_interface.h b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/media_stream_interface.h new file mode 100644 index 0000000000..e25553f9fa --- /dev/null +++ b/ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/include/stream_webrtc_flutter/media_stream_interface.h @@ -0,0 +1,199 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/// Source https://webrtc.googlesource.com/src/+/master/api/media_stream_interface.h + +#ifdef __cplusplus +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include +#include +#include +#include +#include + +namespace webrtc { + + // Generic observer interface. + class ObserverInterface { + public: + virtual void OnChanged() = 0; + protected: + virtual ~ObserverInterface() {} + }; + class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + virtual ~NotifierInterface() {} + }; + + enum class RefCountReleaseStatus { kDroppedLastRef, kOtherRefsRemained }; + // Interfaces where refcounting is part of the public api should + // inherit this abstract interface. The implementation of these + // methods is usually provided by the RefCountedObject template class, + // applied as a leaf in the inheritance tree. + class RefCountInterface { + public: + virtual void AddRef() const = 0; + virtual RefCountReleaseStatus Release() const = 0; + // Non-public destructor, because Release() has exclusive responsibility for + // destroying the object. + protected: + virtual ~RefCountInterface() {} + }; + + // Base class for sources. A MediaStreamTrack has an underlying source that + // provides media. A source can be shared by multiple tracks. + class MediaSourceInterface : public RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + virtual SourceState state() const = 0; + virtual bool remote() const = 0; + protected: + ~MediaSourceInterface() override = default; + }; + + // Interface for receiving audio data from a AudioTrack. + class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + + }; + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + void* absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + virtual int NumPreferredChannels() const { return -1; } + protected: + virtual ~AudioTrackSinkInterface() {} + }; + // AudioSourceInterface is a reference counted source used for AudioTracks. + // The same source can be used by multiple AudioTracks. + class AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + protected: + virtual ~AudioObserver() {} + }; + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). +// virtual const AudioOptions options() const; + }; +} +namespace rtc { + + template + class scoped_refptr { + public: + typedef T element_type; + scoped_refptr() : ptr_(nullptr) {} + scoped_refptr(std::nullptr_t) : ptr_(nullptr) {} // NOLINT(runtime/explicit) + explicit scoped_refptr(T* p) : ptr_(p) { + if (ptr_) + ptr_->AddRef(); + } + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + T* get() const { return ptr_; } + explicit operator bool() const { return ptr_ != nullptr; } + T& operator*() const { return *ptr_; } + T* operator->() const { return ptr_; } + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + protected: + T* ptr_; + }; +}; + +#endif // API_MEDIA_STREAM_INTERFACE_H_ +#endif // __cplusplus diff --git a/scripts/sync_darwin_sources.sh b/scripts/sync_darwin_sources.sh new file mode 100755 index 0000000000..e5a194fb23 --- /dev/null +++ b/scripts/sync_darwin_sources.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Path to your shared Darwin implementation +COMMON_DIR="common/darwin/Classes" + +# Flutter plugin target names (adjust if your plugin names differ) +IOS_TARGET="stream_webrtc_flutter" +MACOS_TARGET="stream_webrtc_flutter" + +# Destination roots +IOS_DIR="ios/$IOS_TARGET/Sources/$IOS_TARGET" +MACOS_DIR="macos/$MACOS_TARGET/Sources/$MACOS_TARGET" + +# Ensure destination dirs exist +mkdir -p "$IOS_DIR/include/$IOS_TARGET" +mkdir -p "$MACOS_DIR/include/$MACOS_TARGET" + +echo "Syncing Darwin sources into iOS and macOS targets..." + +# Copy implementation files (.m, .mm, .cpp, .c, .swift) +rsync -av --include='*/' \ + --include='*.m' --include='*.mm' \ + --include='*.cpp' --include='*.c' \ + --include='*.swift' \ + --exclude='*' \ + "$COMMON_DIR/" "$IOS_DIR/" + +rsync -av --include='*/' \ + --include='*.m' --include='*.mm' \ + --include='*.cpp' --include='*.c' \ + --include='*.swift' \ + --exclude='*' \ + "$COMMON_DIR/" "$MACOS_DIR/" + +# Copy public headers (.h, .hpp) +rsync -av --include='*/' \ + --include='*.h' --include='*.hpp' \ + --exclude='*' \ + "$COMMON_DIR/" "$IOS_DIR/include/$IOS_TARGET/" + +rsync -av --include='*/' \ + --include='*.h' --include='*.hpp' \ + --exclude='*' \ + "$COMMON_DIR/" "$MACOS_DIR/include/$MACOS_TARGET/" + +echo "✅ Sync complete!" \ No newline at end of file