diff --git a/README.md b/README.md index 89c2d95d..e0713110 100755 --- a/README.md +++ b/README.md @@ -152,7 +152,7 @@ There are a few different ways to approach filtering an image. The easiest are t ```swift let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() -let filteredImage = testImage.filterWithOperation(toonFilter) +let filteredImage = try! testImage.filterWithOperation(toonFilter) ``` for a more complex pipeline: @@ -161,7 +161,7 @@ for a more complex pipeline: let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() let luminanceFilter = Luminance() -let filteredImage = testImage.filterWithPipeline{input, output in +let filteredImage = try! testImage.filterWithPipeline{input, output in input --> toonFilter --> luminanceFilter --> output } ``` @@ -173,7 +173,7 @@ Both of these convenience methods wrap several operations. To feed a picture int ```swift let toonFilter = SmoothToonFilter() let testImage = UIImage(named:"WID-small.jpg")! -let pictureInput = PictureInput(image:testImage) +let pictureInput = try! PictureInput(image:testImage) let pictureOutput = PictureOutput() pictureOutput.imageAvailableCallback = {image in // Do something with image @@ -186,24 +186,116 @@ In the above, the imageAvailableCallback will be triggered right at the processI ### Filtering and re-encoding a movie ### -To filter an existing movie file, you can write code like the following: +To filter and playback an existing movie file, you can write code like the following: ```swift do { - let bundleURL = Bundle.main.resourceURL! - let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let bundleURL = Bundle.main.resourceURL! + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] + + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = SaturationAdjustment() movie --> filter --> renderView + movie.start() + speaker.start() } catch { - fatalError("Could not initialize rendering pipeline: \(error)") + print("Couldn't process movie with error: \(error)") } ``` where renderView is an instance of RenderView that you've placed somewhere in your view hierarchy. The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the screen. start() initiates the movie playback. +To filter an existing movie file and save the result to a new movie file you can write code like the following: + + +```swift +let bundleURL = Bundle.main.resourceURL! +// The movie you want to reencode +let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + +let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! +// The location you want to save the new video +let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + +let asset = AVURLAsset(url:movieURL, options:[AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)]) + +guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } +let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first + +// If you would like passthrough audio instead, set both audioDecodingSettings and audioEncodingSettings to nil +let audioDecodingSettings:[String:Any] = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples + +do { + movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) +} +catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return +} + +try? FileManager().removeItem(at: exportedURL) + +let videoEncodingSettings:[String:Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, + AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], + AVVideoCodecKey:AVVideoCodecH264] + +var acl = AudioChannelLayout() +memset(&acl, 0, MemoryLayout.size) +acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo +let audioEncodingSettings:[String:Any] = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 +] + +do { + movieOutput = try MovieOutput(URL: exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:.mp4, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings) +} +catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return +} + +filter = SaturationAdjustment() + +if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } +movieInput.synchronizedMovieOutput = movieOutput +movieInput --> filter --> movieOutput + +movieInput.completion = { + self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil + print("Encoding finished") + } +} + +movieOutput.startRecording { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") +} +``` + + The above loads a movie named "sample_iPod.m4v" from the application's bundle, creates a saturation filter, and directs movie frames to be processed through the saturation filter on their way to the new file. In addition it writes the audio in AAC format to the new file. + ### Writing a custom image processing operation ### The framework uses a series of protocols to define types that can output images to be processed, take in an image for processing, or do both. These are the ImageSource, ImageConsumer, and ImageProcessingOperation protocols, respectively. Any type can comply to these, but typically classes are used. diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj old mode 100644 new mode 100755 diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift old mode 100644 new mode 100755 diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/Base.lproj/LaunchScreen.storyboard b/examples/iOS/FilterShowcase/FilterShowcaseSwift/Base.lproj/LaunchScreen.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/Base.lproj/Main.storyboard b/examples/iOS/FilterShowcase/FilterShowcaseSwift/Base.lproj/Main.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift old mode 100644 new mode 100755 index 05fceeb8..d0b532e2 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -45,7 +45,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega currentFilterConfiguration.filter.addTarget(view) case .blend: videoCamera.addTarget(currentFilterConfiguration.filter) - self.blendImage = PictureInput(imageName:blendImageName) + self.blendImage = try? PictureInput(imageName:blendImageName) self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift old mode 100644 new mode 100755 diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/Info.plist b/examples/iOS/FilterShowcase/FilterShowcaseSwift/Info.plist old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleImageFilter/SimpleImageFilter/Base.lproj/LaunchScreen.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/Base.lproj/Main.storyboard b/examples/iOS/SimpleImageFilter/SimpleImageFilter/Base.lproj/Main.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/Info.plist b/examples/iOS/SimpleImageFilter/SimpleImageFilter/Info.plist old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift old mode 100644 new mode 100755 index 7980b1dc..a2c52890 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -14,7 +14,14 @@ class ViewController: UIViewController { // Filtering image for saving let testImage = UIImage(named:"WID-small.jpg")! let toonFilter = SmoothToonFilter() - let filteredImage = testImage.filterWithOperation(toonFilter) + + let filteredImage:UIImage + do { + filteredImage = try testImage.filterWithOperation(toonFilter) + } catch { + print("Couldn't filter image with error: \(error)") + return + } let pngImage = UIImagePNGRepresentation(filteredImage)! do { @@ -25,8 +32,14 @@ class ViewController: UIViewController { print("Couldn't write to file with error: \(error)") } + // Filtering image for display - picture = PictureInput(image:UIImage(named:"WID-small.jpg")!) + do { + picture = try PictureInput(image:UIImage(named:"WID-small.jpg")!) + } catch { + print("Couldn't create PictureInput with error: \(error)") + return + } filter = SaturationAdjustment() picture --> filter --> renderView picture.processImage() diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj new file mode 100755 index 00000000..9b8a8077 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding.xcodeproj/project.pbxproj @@ -0,0 +1,472 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 48; + objects = { + +/* Begin PBXBuildFile section */ + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393432071C12C001886DD /* AppDelegate.swift */; }; + 1F2393462071C12C001886DD /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F2393452071C12C001886DD /* ViewController.swift */; }; + 1F2393492071C12C001886DD /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393472071C12C001886DD /* Main.storyboard */; }; + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934A2071C12C001886DD /* Assets.xcassets */; }; + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */; }; + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393652071C169001886DD /* AVFoundation.framework */; }; + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F2393672071C16D001886DD /* CoreAudio.framework */; }; + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */ = {isa = PBXBuildFile; fileRef = 1F23936C2071C2DB001886DD /* sample_iPod.m4v */; }; + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = 1F2393612071C155001886DD /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */; }; + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */ = {isa = PBXBuildFile; fileRef = 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + 1F23935C2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CAB1C39A9D8006DF678; + remoteInfo = GPUImage_macOS; + }; + 1F23935E2071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC6E7CB51C39A9D8006DF678; + remoteInfo = GPUImageTests_macOS; + }; + 1F2393602071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34E91E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; + 1F2393622071C155001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 2; + remoteGlobalIDString = BC9E34F11E524A2200B8604F; + remoteInfo = GPUImageTests_iOS; + }; + 1F23936A2071C29D001886DD /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + proxyType = 1; + remoteGlobalIDString = BC9E34E81E524A2200B8604F; + remoteInfo = GPUImage_iOS; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 1F2393762071F506001886DD /* CopyFiles */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + 1F2393772071F51C001886DD /* GPUImage.framework in CopyFiles */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieEncoding.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 1F2393432071C12C001886DD /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 1F2393452071C12C001886DD /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + 1F2393482071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 1F23934A2071C12C001886DD /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 1F23934D2071C12C001886DD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 1F23934F2071C12C001886DD /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 1F2393552071C155001886DD /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; + 1F2393652071C169001886DD /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; + 1F2393672071C16D001886DD /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */ = {isa = PBXFileReference; lastKnownFileType = file; name = sample_iPod.m4v; path = ../../../SharedAssets/sample_iPod.m4v; sourceTree = ""; }; + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = "Assets-iOS.xcassets"; path = "../../../SharedAssets/Assets-iOS.xcassets"; sourceTree = ""; }; + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = lookup_miss_etikate.png; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 1F23933D2071C12C001886DD /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393682071C16D001886DD /* CoreAudio.framework in Frameworks */, + 1F2393662071C169001886DD /* AVFoundation.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 1F2393372071C12C001886DD = { + isa = PBXGroup; + children = ( + 1F2393422071C12C001886DD /* SimpleMovieEncoding */, + 1F2393412071C12C001886DD /* Products */, + 1F2393642071C169001886DD /* Frameworks */, + ); + sourceTree = ""; + }; + 1F2393412071C12C001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393422071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXGroup; + children = ( + 1F2393432071C12C001886DD /* AppDelegate.swift */, + 1F2393452071C12C001886DD /* ViewController.swift */, + 1F2393472071C12C001886DD /* Main.storyboard */, + 1F2393552071C155001886DD /* GPUImage.xcodeproj */, + 1F23934A2071C12C001886DD /* Assets.xcassets */, + 1F2393782071FCB1001886DD /* Assets-iOS.xcassets */, + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */, + 1F23937A2071FCDA001886DD /* lookup_miss_etikate.png */, + 1F23936C2071C2DB001886DD /* sample_iPod.m4v */, + 1F23934F2071C12C001886DD /* Info.plist */, + ); + path = SimpleMovieEncoding; + sourceTree = ""; + }; + 1F2393562071C155001886DD /* Products */ = { + isa = PBXGroup; + children = ( + 1F23935D2071C155001886DD /* GPUImage.framework */, + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */, + 1F2393612071C155001886DD /* GPUImage.framework */, + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */, + ); + name = Products; + sourceTree = ""; + }; + 1F2393642071C169001886DD /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1F2393672071C16D001886DD /* CoreAudio.framework */, + 1F2393652071C169001886DD /* AVFoundation.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */ = { + isa = PBXNativeTarget; + buildConfigurationList = 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */; + buildPhases = ( + 1F23933C2071C12C001886DD /* Sources */, + 1F23933D2071C12C001886DD /* Frameworks */, + 1F23933E2071C12C001886DD /* Resources */, + 1F2393762071F506001886DD /* CopyFiles */, + ); + buildRules = ( + ); + dependencies = ( + 1F23936B2071C29D001886DD /* PBXTargetDependency */, + ); + name = SimpleMovieEncoding; + productName = SimpleMovieEncoding; + productReference = 1F2393402071C12C001886DD /* SimpleMovieEncoding.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 1F2393382071C12C001886DD /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0920; + LastUpgradeCheck = 1010; + ORGANIZATIONNAME = "Sunset Lake Software LLC"; + TargetAttributes = { + 1F23933F2071C12C001886DD = { + CreatedOnToolsVersion = 9.2; + LastSwiftMigration = 1010; + ProvisioningStyle = Automatic; + }; + }; + }; + buildConfigurationList = 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */; + compatibilityVersion = "Xcode 8.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 1F2393372071C12C001886DD; + productRefGroup = 1F2393412071C12C001886DD /* Products */; + projectDirPath = ""; + projectReferences = ( + { + ProductGroup = 1F2393562071C155001886DD /* Products */; + ProjectRef = 1F2393552071C155001886DD /* GPUImage.xcodeproj */; + }, + ); + projectRoot = ""; + targets = ( + 1F23933F2071C12C001886DD /* SimpleMovieEncoding */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXReferenceProxy section */ + 1F23935D2071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F23935C2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F23935F2071C155001886DD /* GPUImageTests_macOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_macOS.xctest; + remoteRef = 1F23935E2071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393612071C155001886DD /* GPUImage.framework */ = { + isa = PBXReferenceProxy; + fileType = wrapper.framework; + path = GPUImage.framework; + remoteRef = 1F2393602071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; + 1F2393632071C155001886DD /* GPUImageTests_iOS.xctest */ = { + isa = PBXReferenceProxy; + fileType = wrapper.cfbundle; + path = GPUImageTests_iOS.xctest; + remoteRef = 1F2393622071C155001886DD /* PBXContainerItemProxy */; + sourceTree = BUILT_PRODUCTS_DIR; + }; +/* End PBXReferenceProxy section */ + +/* Begin PBXResourcesBuildPhase section */ + 1F23933E2071C12C001886DD /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F23934E2071C12C001886DD /* LaunchScreen.storyboard in Resources */, + 1F23936D2071C2DB001886DD /* sample_iPod.m4v in Resources */, + 1F23934B2071C12C001886DD /* Assets.xcassets in Resources */, + 1F2393492071C12C001886DD /* Main.storyboard in Resources */, + 1F2393792071FCB1001886DD /* Assets-iOS.xcassets in Resources */, + 1F23937B2071FCDB001886DD /* lookup_miss_etikate.png in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 1F23933C2071C12C001886DD /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 1F2393462071C12C001886DD /* ViewController.swift in Sources */, + 1F2393442071C12C001886DD /* AppDelegate.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + 1F23936B2071C29D001886DD /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + name = GPUImage_iOS; + targetProxy = 1F23936A2071C29D001886DD /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + 1F2393472071C12C001886DD /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F2393482071C12C001886DD /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 1F23934C2071C12C001886DD /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 1F23934D2071C12C001886DD /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 1F2393502071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 1F2393512071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 9.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 1F2393532071C12C001886DD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.2; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 1F2393542071C12C001886DD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = ""; + INFOPLIST_FILE = SimpleMovieEncoding/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieEncoding; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 4.2; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 1F23933B2071C12C001886DD /* Build configuration list for PBXProject "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393502071C12C001886DD /* Debug */, + 1F2393512071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 1F2393522071C12C001886DD /* Build configuration list for PBXNativeTarget "SimpleMovieEncoding" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 1F2393532071C12C001886DD /* Debug */, + 1F2393542071C12C001886DD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 1F2393382071C12C001886DD /* Project object */; +} diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift new file mode 100755 index 00000000..173f49b1 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/AppDelegate.swift @@ -0,0 +1,22 @@ +// +// AppDelegate.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit + +@UIApplicationMain +class AppDelegate: UIResponder, UIApplicationDelegate { + + var window: UIWindow? + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } +} + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100755 index 00000000..1d060ed2 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,93 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "29x29", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "40x40", + "scale" : "3x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "60x60", + "scale" : "3x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "29x29", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "40x40", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "76x76", + "scale" : "2x" + }, + { + "idiom" : "ipad", + "size" : "83.5x83.5", + "scale" : "2x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard new file mode 100755 index 00000000..f83f6fd5 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard new file mode 100755 index 00000000..e411a78f --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Base.lproj/Main.storyboard @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist new file mode 100755 index 00000000..16be3b68 --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/Info.plist @@ -0,0 +1,45 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift new file mode 100755 index 00000000..4563b62a --- /dev/null +++ b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/ViewController.swift @@ -0,0 +1,134 @@ +// +// ViewController.swift +// SimpleMovieEncoding +// +// Created by Josh Bernfeld on 4/1/18. +// Copyright © 2018 Sunset Lake Software LLC. All rights reserved. +// + +import UIKit +import GPUImage +import CoreAudio +import AVFoundation + +class ViewController: UIViewController { + + @IBOutlet var progressView:UIProgressView! + + var movieInput:MovieInput! + var movieOutput:MovieOutput! + var filter:MissEtikateFilter! + + override func viewDidLoad() { + super.viewDidLoad() + // Do any additional setup after loading the view, typically from a nib. + + let bundleURL = Bundle.main.resourceURL! + // The movie you want to reencode + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! + + let documentsDir = FileManager().urls(for:.documentDirectory, in:.userDomainMask).first! + // The location you want to save the new video + let exportedURL = URL(string:"test.mp4", relativeTo:documentsDir)! + + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] + let asset = AVURLAsset(url:movieURL, options:inputOptions) + + guard let videoTrack = asset.tracks(withMediaType:AVMediaType.video).first else { return } + let audioTrack = asset.tracks(withMediaType:AVMediaType.audio).first + + let audioDecodingSettings:[String:Any]? + let audioEncodingSettings:[String:Any]? + var audioSourceFormatHint:CMFormatDescription? = nil + + let shouldPassthroughAudio = false + if(shouldPassthroughAudio) { + audioDecodingSettings = nil + audioEncodingSettings = nil + // A format hint is required when writing to certain file types with passthrough audio + // A conditional downcast would not work here for some reason + if let description = audioTrack?.formatDescriptions.first { audioSourceFormatHint = (description as! CMFormatDescription) } + } + else { + audioDecodingSettings = [AVFormatIDKey:kAudioFormatLinearPCM] // Noncompressed audio samples + var acl = AudioChannelLayout() + memset(&acl, 0, MemoryLayout.size) + acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo + audioEncodingSettings = [ + AVFormatIDKey:kAudioFormatMPEG4AAC, + AVNumberOfChannelsKey:2, + AVSampleRateKey:AVAudioSession.sharedInstance().sampleRate, + AVChannelLayoutKey:NSData(bytes:&acl, length:MemoryLayout.size), + AVEncoderBitRateKey:96000 + ] + audioSourceFormatHint = nil + } + + do { + movieInput = try MovieInput(asset:asset, videoComposition:nil, playAtActualSpeed:false, loop:false, audioSettings:audioDecodingSettings) + } + catch { + print("ERROR: Unable to setup MovieInput with error: \(error)") + return + } + + try? FileManager().removeItem(at: exportedURL) + + let videoEncodingSettings:[String:Any] = [ + AVVideoCompressionPropertiesKey: [ + AVVideoExpectedSourceFrameRateKey:videoTrack.nominalFrameRate, + AVVideoAverageBitRateKey:videoTrack.estimatedDataRate, + AVVideoProfileLevelKey:AVVideoProfileLevelH264HighAutoLevel, + AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC, + AVVideoAllowFrameReorderingKey:videoTrack.requiresFrameReordering], + AVVideoCodecKey:AVVideoCodecH264] + + do { + movieOutput = try MovieOutput(URL: exportedURL, size:Size(width:Float(videoTrack.naturalSize.width), height:Float(videoTrack.naturalSize.height)), fileType:.mp4, liveVideo:false, videoSettings:videoEncodingSettings, videoNaturalTimeScale:videoTrack.naturalTimeScale, audioSettings:audioEncodingSettings, audioSourceFormatHint:audioSourceFormatHint) + } + catch { + print("ERROR: Unable to setup MovieOutput with error: \(error)") + return + } + + filter = MissEtikateFilter() + + if(audioTrack != nil) { movieInput.audioEncodingTarget = movieOutput } + movieInput.synchronizedMovieOutput = movieOutput + //movieInput.synchronizedEncodingDebug = true + movieInput --> filter --> movieOutput + + movieInput.completion = { + self.movieOutput.finishRecording { + self.movieInput.audioEncodingTarget = nil + self.movieInput.synchronizedMovieOutput = nil + + DispatchQueue.main.async { + print("Encoding finished") + } + } + } + movieInput.progress = { progressVal in + DispatchQueue.main.async { + self.progressView.progress = Float(progressVal) + } + } + + movieOutput.startRecording { started, error in + if(!started) { + print("ERROR: MovieOutput unable to start writing with error: \(String(describing: error))") + return + } + self.movieInput.start() + print("Encoding started") + } + } + + override func didReceiveMemoryWarning() { + super.didReceiveMemoryWarning() + // Dispose of any resources that can be recreated. + } + + +} + diff --git a/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png new file mode 100755 index 00000000..e1317d78 Binary files /dev/null and b/examples/iOS/SimpleMovieEncoding/SimpleMovieEncoding/lookup_miss_etikate.png differ diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj old mode 100644 new mode 100755 index dd76f5b7..3b9680af --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj @@ -7,6 +7,9 @@ objects = { /* Begin PBXBuildFile section */ + 1F2393812071FDE4001886DD /* Assets-iOS.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */; }; + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF369E2071965100089948 /* CoreAudio.framework */; }; + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1FDF36A02071966B00089948 /* AVFoundation.framework */; }; BC9E367C1E525BCF00B8604F /* GPUImage.framework in CopyFiles */ = {isa = PBXBuildFile; fileRef = BC9E36771E525BC000B8604F /* GPUImage.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCC49F931CD6E1D800B63EEB /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */; }; BCC49F951CD6E1D800B63EEB /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC49F941CD6E1D800B63EEB /* ViewController.swift */; }; @@ -67,6 +70,9 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ + 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = "Assets-iOS.xcassets"; path = "../../../SharedAssets/Assets-iOS.xcassets"; sourceTree = ""; }; + 1FDF369E2071965100089948 /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = System/Library/Frameworks/CoreAudio.framework; sourceTree = SDKROOT; }; + 1FDF36A02071966B00089948 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; BC9E366B1E525BC000B8604F /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../../../framework/GPUImage.xcodeproj; sourceTree = ""; }; BCC49F8F1CD6E1D800B63EEB /* SimpleMovieFilter.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SimpleMovieFilter.app; sourceTree = BUILT_PRODUCTS_DIR; }; BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; @@ -82,12 +88,23 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1FDF36A12071966B00089948 /* AVFoundation.framework in Frameworks */, + 1FDF369F2071965100089948 /* CoreAudio.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 1FDF369D2071965000089948 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 1FDF36A02071966B00089948 /* AVFoundation.framework */, + 1FDF369E2071965100089948 /* CoreAudio.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; BC9E366C1E525BC000B8604F /* Products */ = { isa = PBXGroup; children = ( @@ -105,6 +122,7 @@ BCC49F911CD6E1D800B63EEB /* Application */, BCC49FA41CD6E1E300B63EEB /* Resources */, BCC49F901CD6E1D800B63EEB /* Products */, + 1FDF369D2071965000089948 /* Frameworks */, ); sourceTree = ""; }; @@ -122,6 +140,7 @@ BCC49F921CD6E1D800B63EEB /* AppDelegate.swift */, BCC49F941CD6E1D800B63EEB /* ViewController.swift */, BCC49F961CD6E1D800B63EEB /* Main.storyboard */, + 1F23937D2071FDE4001886DD /* Assets-iOS.xcassets */, BC9E366B1E525BC000B8604F /* GPUImage.xcodeproj */, ); name = Application; @@ -239,6 +258,7 @@ files = ( BCC49FA61CD6E20000B63EEB /* sample_iPod.m4v in Resources */, BCC49F9D1CD6E1D800B63EEB /* LaunchScreen.storyboard in Resources */, + 1F2393812071FDE4001886DD /* Assets-iOS.xcassets in Resources */, BCC49F981CD6E1D800B63EEB /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/LaunchScreen.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard old mode 100644 new mode 100755 index 67c04ee8..769960fe --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Base.lproj/Main.storyboard @@ -1,8 +1,13 @@ - - + + + + + - + + + @@ -14,18 +19,61 @@ - + - - - + + + + + + + + + - + + + + + + + + @@ -34,6 +82,7 @@ + diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Info.plist b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/Info.plist old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift old mode 100644 new mode 100755 index 173b6b8a..ff9431f2 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -1,5 +1,7 @@ import UIKit import GPUImage +import CoreAudio +import AVFoundation class ViewController: UIViewController { @@ -7,19 +9,27 @@ class ViewController: UIViewController { var movie:MovieInput! var filter:Pixellate! + var speaker:SpeakerOutput! - override func viewDidLayoutSubviews() { - super.viewDidLayoutSubviews() + override func viewDidLoad() { + super.viewDidLoad() let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { - movie = try MovieInput(url:movieURL, playAtActualSpeed:true) + let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] + + movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) + speaker = SpeakerOutput() + movie.audioEncodingTarget = speaker + filter = Pixellate() movie --> filter --> renderView movie.runBenchmark = true + movie.start() + speaker.start() } catch { print("Couldn't process movie with error: \(error)") } @@ -28,5 +38,20 @@ class ViewController: UIViewController { // let fileURL = NSURL(string:"test.png", relativeToURL:documentsDir)! // try pngImage.writeToURL(fileURL, options:.DataWritingAtomic) } + + @IBAction func pause() { + movie.pause() + speaker.cancel() + } + + @IBAction func cancel() { + movie.cancel() + speaker.cancel() + } + + @IBAction func play() { + movie.start() + speaker.start() + } } diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/Base.lproj/LaunchScreen.storyboard b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/Base.lproj/LaunchScreen.storyboard old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/Info.plist b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/Info.plist old mode 100644 new mode 100755 diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift old mode 100644 new mode 100755 index 904ee14b..ecd82253 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -38,10 +38,30 @@ class ViewController: UIViewController { } catch { } - movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), liveVideo:true) + // Do this now so we can access the audioOutput recommendedAudioSettings before initializing the MovieOutput + do { + try self.camera.addAudioInputsAndOutputs() + } catch { + fatalError("ERROR: Could not connect audio target with error: \(error)") + } + + let audioSettings = self.camera!.audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo:.mp4) as? [String : Any] + var videoSettings:[String : Any]? = nil + if #available(iOS 11.0, *) { + videoSettings = self.camera!.videoOutput.recommendedVideoSettings(forVideoCodecType:.h264, assetWriterOutputFileType:.mp4) as? [String : Any] + videoSettings![AVVideoWidthKey] = nil + videoSettings![AVVideoHeightKey] = nil + } + + movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), fileType:.mp4, liveVideo:true, videoSettings:videoSettings, audioSettings:audioSettings) camera.audioEncodingTarget = movieOutput filter --> movieOutput! - movieOutput!.startRecording() + movieOutput!.startRecording() { started, error in + if(!started) { + self.isRecording = false + fatalError("ERROR: Could not start writing with error: \(String(describing: error))") + } + } DispatchQueue.main.async { // Label not updating on the main thread, for some reason, so dispatching slightly after this (sender as! UIButton).titleLabel!.text = "Stop" diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index 9ff93151..f2576e6b 100755 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -7,6 +7,17 @@ objects = { /* Begin PBXBuildFile section */ + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */; }; + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */; }; + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */; }; + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */; }; + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */; }; + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */; }; + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -370,6 +381,14 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSObject+Exception.m"; path = "Source/NSObject+Exception.m"; sourceTree = ""; }; + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSObject+Exception.h"; path = "Source/NSObject+Exception.h"; sourceTree = ""; }; + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "GPUImage-Bridging-Header.h"; path = "Source/GPUImage-Bridging-Header.h"; sourceTree = ""; }; + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AudioToolbox.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS11.2.sdk/System/Library/Frameworks/AVFoundation.framework; sourceTree = DEVELOPER_DIR; }; + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = Source/TPCircularBuffer.h; sourceTree = ""; }; + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = TPCircularBuffer.m; path = Source/TPCircularBuffer.m; sourceTree = ""; }; + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SpeakerOutput.swift; path = Source/iOS/SpeakerOutput.swift; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -710,6 +729,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1F6D1CB52048F8DD00317B5F /* AVFoundation.framework in Frameworks */, + 1F6D1CB32048F81D00317B5F /* AudioToolbox.framework in Frameworks */, BC9E35021E524BE200B8604F /* OpenGLES.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -961,8 +982,13 @@ BC6E7CAD1C39A9D8006DF678 /* Other */ = { isa = PBXGroup; children = ( + 1F6D1CB62048FB0300317B5F /* TPCircularBuffer.h */, + 1F6D1CB72048FB0300317B5F /* TPCircularBuffer.m */, + 1F499A721FDA0F9F0000E37E /* NSObject+Exception.h */, + 1F499A711FDA0F9E0000E37E /* NSObject+Exception.m */, BC4C85ED1C9F042900FD95D8 /* ConvertedShaders_GL.swift */, BC9E35531E52521F00B8604F /* ConvertedShaders_GLES.swift */, + 1F499A771FDA0FE20000E37E /* GPUImage-Bridging-Header.h */, ); name = Other; sourceTree = ""; @@ -1002,6 +1028,8 @@ BC6E7CCB1C39ADDD006DF678 /* Frameworks */ = { isa = PBXGroup; children = ( + 1F6D1CB42048F8DD00317B5F /* AVFoundation.framework */, + 1F6D1CB22048F81D00317B5F /* AudioToolbox.framework */, BC9E35011E524BE200B8604F /* OpenGLES.framework */, BC6E7CC91C39ADCC006DF678 /* OpenGL.framework */, ); @@ -1123,6 +1151,7 @@ BC9E35201E524D2A00B8604F /* iOS */ = { isa = PBXGroup; children = ( + 1F6D1CBF2048FFD900317B5F /* SpeakerOutput.swift */, BC9E35231E524D4D00B8604F /* RenderView.swift */, BC9E35221E524D4D00B8604F /* PictureOutput.swift */, BC9E35211E524D4D00B8604F /* MovieOutput.swift */, @@ -1198,6 +1227,8 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A751FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CB82048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1205,6 +1236,8 @@ isa = PBXHeadersBuildPhase; buildActionMask = 2147483647; files = ( + 1F499A761FDA0F9F0000E37E /* NSObject+Exception.h in Headers */, + 1F6D1CB92048FB0300317B5F /* TPCircularBuffer.h in Headers */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1295,13 +1328,14 @@ TargetAttributes = { BC6E7CAA1C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; - LastSwiftMigration = 0800; + LastSwiftMigration = 0910; }; BC6E7CB41C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; }; BC9E34E81E524A2200B8604F = { CreatedOnToolsVersion = 8.2.1; + LastSwiftMigration = 0910; ProvisioningStyle = Automatic; }; BC9E34F01E524A2200B8604F = { @@ -1447,6 +1481,7 @@ BCFF46C01CB9556B00A0C521 /* WhiteBalance.swift in Sources */, BC7FD14E1CB0BD3900037949 /* ZoomBlur.swift in Sources */, BCFB07921CBF37A1009B2333 /* TextureInput.swift in Sources */, + 1F6D1CBA2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC6E7CC71C39AD9E006DF678 /* ShaderProgram.swift in Sources */, BCFF46CA1CB96BD700A0C521 /* HighPassFilter.swift in Sources */, BC7FD1321CB0A57F00037949 /* HighlightsAndShadows.swift in Sources */, @@ -1454,6 +1489,7 @@ BC7FD11C1CB0795A00037949 /* NormalBlend.swift in Sources */, BC4EE15E1CB3481F00AD8A65 /* ThresholdSobelEdgeDetection.swift in Sources */, BC7FD1911CB1D2A300037949 /* ImageGenerator.swift in Sources */, + 1F499A731FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC7FD1201CB079B200037949 /* SaturationBlend.swift in Sources */, BCA4E2491CC3EF26007B51BA /* ColourFASTFeatureDetection.swift in Sources */, BC7FD0FD1CB06E0000037949 /* Position.swift in Sources */, @@ -1630,6 +1666,7 @@ BC9E35511E52518F00B8604F /* Timestamp.swift in Sources */, BC9E35781E5256EB00B8604F /* ColorMatrixFilter.swift in Sources */, BC9E35D11E52580400B8604F /* ScreenBlend.swift in Sources */, + 1F6D1CBB2048FB0300317B5F /* TPCircularBuffer.m in Sources */, BC9E356A1E5256C200B8604F /* Haze.swift in Sources */, BC9E35D31E52580A00B8604F /* SourceOverBlend.swift in Sources */, BC9E357E1E5256FE00B8604F /* Vibrance.swift in Sources */, @@ -1637,6 +1674,8 @@ BC9E356E1E5256CE00B8604F /* FalseColor.swift in Sources */, BC9E35881E52572000B8604F /* ThresholdSobelEdgeDetection.swift in Sources */, BC9E356F1E5256D000B8604F /* HighlightsAndShadows.swift in Sources */, + 1F6D1CC02048FFD900317B5F /* SpeakerOutput.swift in Sources */, + 1F499A741FDA0F9F0000E37E /* NSObject+Exception.m in Sources */, BC9E35AA1E52578900B8604F /* Halftone.swift in Sources */, BC9E35961E52574A00B8604F /* ImageBuffer.swift in Sources */, BC9E35831E52571100B8604F /* LocalBinaryPattern.swift in Sources */, @@ -1798,6 +1837,7 @@ ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 4.0; VERSIONING_SYSTEM = "apple-generic"; @@ -1851,6 +1891,7 @@ MACOSX_DEPLOYMENT_TARGET = 10.9; SDKROOT = macosx; SKIP_INSTALL = YES; + SWIFT_OBJC_BRIDGING_HEADER = "Source/GPUImage-Bridging-Header.h"; SWIFT_VERSION = 4.0; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; @@ -1932,6 +1973,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; @@ -1961,6 +2003,7 @@ isa = XCBuildConfiguration; buildSettings = { CLANG_ANALYZER_NONNULL = YES; + CLANG_ENABLE_MODULES = YES; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; diff --git a/framework/Source/GPUImage-Bridging-Header.h b/framework/Source/GPUImage-Bridging-Header.h new file mode 100755 index 00000000..379e1ac3 --- /dev/null +++ b/framework/Source/GPUImage-Bridging-Header.h @@ -0,0 +1,15 @@ +// +// GPUImage-Bridging-Header.h +// GPUImage +// +// Created by Josh Bernfeld on 12/7/17. +// Copyright © 2017 Sunset Lake Software LLC. All rights reserved. +// + +#ifndef GPUImage_Bridging_Header_h +#define GPUImage_Bridging_Header_h + +#import "NSObject+Exception.h" +#import "TPCircularBuffer.h" + +#endif /* GPUImage_Bridging_Header_h */ diff --git a/framework/Source/NSObject+Exception.h b/framework/Source/NSObject+Exception.h new file mode 100755 index 00000000..bb0bf010 --- /dev/null +++ b/framework/Source/NSObject+Exception.h @@ -0,0 +1,14 @@ +// +// NSObject+Exception.h +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// + +#import + +@interface NSObject (Exception) + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error; + +@end diff --git a/framework/Source/NSObject+Exception.m b/framework/Source/NSObject+Exception.m new file mode 100755 index 00000000..ed6d3711 --- /dev/null +++ b/framework/Source/NSObject+Exception.m @@ -0,0 +1,24 @@ +// +// NSObject+Exception.m +// GPUImage2 +// +// Created by Josh Bernfeld on 11/23/17. +// +// Source: https://stackoverflow.com/a/36454808/1275014 + +#import "NSObject+Exception.h" + +@implementation NSObject (Exception) + ++ (BOOL)catchException:(void(^)(void))tryBlock error:(__autoreleasing NSError **)error { + @try { + tryBlock(); + return YES; + } + @catch (NSException *exception) { + *error = [[NSError alloc] initWithDomain:exception.name code:0 userInfo:exception.userInfo]; + return NO; + } +} + +@end diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 20473a30..6fcbf83d 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -23,7 +23,7 @@ extension OpenGLContext { if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { return shaderFromCache } else { - return try sharedImageProcessingContext.runOperationSynchronously{ + return try self.runOperationSynchronously{ let program = try ShaderProgram(vertexShader:vertexShader, fragmentShader:fragmentShader) self.shaderCache[lookupKeyForShaderProgram] = program return program diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index 020b0b12..ba55c790 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -63,7 +63,7 @@ public let standardImageVertices:[GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, public let verticallyInvertedImageVertices:[GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] // "position" and "inputTextureCoordinate", "inputTextureCoordinate2" attribute naming follows the convention of the old GPUImage -public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties]) { +public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat]? = nil, vertexBufferObject:GLuint? = nil, inputTextures:[InputTextureProperties], context: OpenGLContext = sharedImageProcessingContext) { switch (vertices, vertexBufferObject) { case (.none, .some): break case (.some, .none): break @@ -71,7 +71,7 @@ public func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderU case (.none, .none): fatalError("Can't specify both vertices and a VBO in renderQuadWithShader()") } - sharedImageProcessingContext.makeCurrentContext() + context.makeCurrentContext() shader.use() uniformSettings?.restoreShaderSettings(shader) diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 9f48a4e9..0541b066 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -19,19 +19,11 @@ extension SerialDispatch { #else public var standardProcessingQueue:DispatchQueue { - if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.global(qos: .default) - } else { - return DispatchQueue.global(priority: .default) - } + return DispatchQueue.global(qos: .default) } public var lowProcessingQueue:DispatchQueue { - if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.global(qos: .background) - } else { - return DispatchQueue.global(priority: .low) - } + return DispatchQueue.global(qos: .background) } func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { @@ -64,6 +56,7 @@ func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { public protocol SerialDispatch { var serialDispatchQueue:DispatchQueue { get } var dispatchQueueKey:DispatchSpecificKey { get } + var dispatchQueueKeyValue:Int { get } func makeCurrentContext() } @@ -77,7 +70,7 @@ public extension SerialDispatch { public func runOperationSynchronously(_ operation:() -> ()) { // TODO: Verify this works as intended - if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == 81) { + if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == self.dispatchQueueKeyValue) { operation() } else { self.serialDispatchQueue.sync { diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index abf50b63..7e2013bc 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -59,7 +59,7 @@ public class ShaderProgram { } deinit { - debugPrint("Shader deallocated") + //debugPrint("Shader deallocated") if (vertexShader != nil) { glDeleteShader(vertexShader) diff --git a/framework/Source/TPCircularBuffer.h b/framework/Source/TPCircularBuffer.h new file mode 100755 index 00000000..88129560 --- /dev/null +++ b/framework/Source/TPCircularBuffer.h @@ -0,0 +1,243 @@ +// +// TPCircularBuffer.h +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// +// This implementation makes use of a virtual memory mapping technique that inserts a virtual copy +// of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around +// logic. Clients can simply use the returned memory address as if it were contiguous space. +// +// The implementation is thread-safe in the case of a single producer and single consumer. +// +// Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and +// adapted to Darwin by Kurt Revis (http://www.snoize.com, +// http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz) +// +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#ifndef TPCircularBuffer_h +#define TPCircularBuffer_h + +#include +#include +#include + +#ifdef __cplusplus + extern "C++" { + #include + typedef std::atomic_int atomicInt; + #define atomicFetchAdd(a,b) std::atomic_fetch_add(a,b) + } +#else + #include + typedef atomic_int atomicInt; + #define atomicFetchAdd(a,b) atomic_fetch_add(a,b) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct { + void *buffer; + uint32_t length; + uint32_t tail; + uint32_t head; + volatile atomicInt fillCount; + bool atomic; +} TPCircularBuffer; + +/*! + * Initialise buffer + * + * Note that the length is advisory only: Because of the way the + * memory mirroring technique works, the true buffer length will + * be multiples of the device page size (e.g. 4096 bytes) + * + * If you intend to use the AudioBufferList utilities, you should + * always allocate a bit more space than you need for pure audio + * data, so there's room for the metadata. How much extra is required + * depends on how many AudioBufferList structures are used, which is + * a function of how many audio frames each buffer holds. A good rule + * of thumb is to add 15%, or at least another 2048 bytes or so. + * + * @param buffer Circular buffer + * @param length Length of buffer + */ +#define TPCircularBufferInit(buffer, length) \ + _TPCircularBufferInit(buffer, length, sizeof(*buffer)) +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize); + +/*! + * Cleanup buffer + * + * Releases buffer resources. + */ +void TPCircularBufferCleanup(TPCircularBuffer *buffer); + +/*! + * Clear buffer + * + * Resets buffer to original, empty state. + * + * This is safe for use by consumer while producer is accessing + * buffer. + */ +void TPCircularBufferClear(TPCircularBuffer *buffer); + +/*! + * Set the atomicity + * + * If you set the atomiticy to false using this method, the buffer will + * not use atomic operations. This can be used to give the compiler a little + * more optimisation opportunities when the buffer is only used on one thread. + * + * Important note: Only set this to false if you know what you're doing! + * + * The default value is true (the buffer will use atomic operations) + * + * @param buffer Circular buffer + * @param atomic Whether the buffer is atomic (default true) + */ +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic); + +// Reading (consuming) + +/*! + * Access end of buffer + * + * This gives you a pointer to the end of the buffer, ready + * for reading, and the number of available bytes to read. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for reading + * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferTail(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = buffer->fillCount; + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->tail); +} + +/*! + * Consume bytes in buffer + * + * This frees up the just-read bytes, ready for writing again. + * + * @param buffer Circular buffer + * @param amount Number of bytes to consume + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferConsume(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, -amount); + } else { + buffer->fillCount -= amount; + } + assert(buffer->fillCount >= 0); +} + +/*! + * Access front of buffer + * + * This gives you a pointer to the front of the buffer, ready + * for writing, and the number of available bytes to write. + * + * @param buffer Circular buffer + * @param availableBytes On output, the number of bytes ready for writing + * @return Pointer to the first bytes ready for writing, or NULL if buffer is full + */ +static __inline__ __attribute__((always_inline)) void* TPCircularBufferHead(TPCircularBuffer *buffer, uint32_t* availableBytes) { + *availableBytes = (buffer->length - buffer->fillCount); + if ( *availableBytes == 0 ) return NULL; + return (void*)((char*)buffer->buffer + buffer->head); +} + +// Writing (producing) + +/*! + * Produce bytes in buffer + * + * This marks the given section of the buffer ready for reading. + * + * @param buffer Circular buffer + * @param amount Number of bytes to produce + */ +static __inline__ __attribute__((always_inline)) void TPCircularBufferProduce(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + if ( buffer->atomic ) { + atomicFetchAdd(&buffer->fillCount, amount); + } else { + buffer->fillCount += amount; + } + assert(buffer->fillCount <= buffer->length); +} + +/*! + * Helper routine to copy bytes to buffer + * + * This copies the given bytes to the buffer, and marks them ready for reading. + * + * @param buffer Circular buffer + * @param src Source buffer + * @param len Number of bytes in source buffer + * @return true if bytes copied, false if there was insufficient space + */ +static __inline__ __attribute__((always_inline)) bool TPCircularBufferProduceBytes(TPCircularBuffer *buffer, const void* src, uint32_t len) { + uint32_t space; + void *ptr = TPCircularBufferHead(buffer, &space); + if ( space < len ) return false; + memcpy(ptr, src, len); + TPCircularBufferProduce(buffer, len); + return true; +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferConsume instead") +void TPCircularBufferConsumeNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->tail = (buffer->tail + amount) % buffer->length; + buffer->fillCount -= amount; + assert(buffer->fillCount >= 0); +} + +/*! + * Deprecated method + */ +static __inline__ __attribute__((always_inline)) __deprecated_msg("use TPCircularBufferSetAtomic(false) and TPCircularBufferProduce instead") +void TPCircularBufferProduceNoBarrier(TPCircularBuffer *buffer, uint32_t amount) { + buffer->head = (buffer->head + amount) % buffer->length; + buffer->fillCount += amount; + assert(buffer->fillCount <= buffer->length); +} + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/framework/Source/TPCircularBuffer.m b/framework/Source/TPCircularBuffer.m new file mode 100755 index 00000000..a3e6b3c5 --- /dev/null +++ b/framework/Source/TPCircularBuffer.m @@ -0,0 +1,149 @@ +// +// TPCircularBuffer.c +// Circular/Ring buffer implementation +// +// https://github.com/michaeltyson/TPCircularBuffer +// +// Created by Michael Tyson on 10/12/2011. +// +// Copyright (C) 2012-2013 A Tasty Pixel +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// +// 3. This notice may not be removed or altered from any source distribution. +// + +#include "TPCircularBuffer.h" +#include +#include +#include + +#define reportResult(result,operation) (_reportResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__)) +static inline bool _reportResult(kern_return_t result, const char *operation, const char* file, int line) { + if ( result != ERR_SUCCESS ) { + printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result)); + return false; + } + return true; +} + +bool _TPCircularBufferInit(TPCircularBuffer *buffer, uint32_t length, size_t structSize) { + + assert(length > 0); + + if ( structSize != sizeof(TPCircularBuffer) ) { + fprintf(stderr, "TPCircularBuffer: Header version mismatch. Check for old versions of TPCircularBuffer in your project\n"); + abort(); + } + + // Keep trying until we get our buffer, needed to handle race conditions + int retries = 3; + while ( true ) { + + buffer->length = (uint32_t)round_page(length); // We need whole page sizes + + // Temporarily allocate twice the length, so we have the contiguous address space to + // support a second instance of the buffer directly after + vm_address_t bufferAddress; + kern_return_t result = vm_allocate(mach_task_self(), + &bufferAddress, + buffer->length * 2, + VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer allocation"); + return false; + } + // Try again if we fail + continue; + } + + // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half... + result = vm_deallocate(mach_task_self(), + bufferAddress + buffer->length, + buffer->length); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Buffer deallocation"); + return false; + } + // If this fails somehow, deallocate the whole region and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + // Re-map the buffer to the address space immediately after the buffer + vm_address_t virtualAddress = bufferAddress + buffer->length; + vm_prot_t cur_prot, max_prot; + result = vm_remap(mach_task_self(), + &virtualAddress, // mirror target + buffer->length, // size of mirror + 0, // auto alignment + 0, // force remapping to virtualAddress + mach_task_self(), // same task + bufferAddress, // mirror source + 0, // MAP READ-WRITE, NOT COPY + &cur_prot, // unused protection struct + &max_prot, // unused protection struct + VM_INHERIT_DEFAULT); + if ( result != ERR_SUCCESS ) { + if ( retries-- == 0 ) { + reportResult(result, "Remap buffer memory"); + return false; + } + // If this remap failed, we hit a race condition, so deallocate and try again + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + if ( virtualAddress != bufferAddress+buffer->length ) { + // If the memory is not contiguous, clean up both allocated buffers and try again + if ( retries-- == 0 ) { + printf("Couldn't map buffer memory to end of buffer\n"); + return false; + } + + vm_deallocate(mach_task_self(), virtualAddress, buffer->length); + vm_deallocate(mach_task_self(), bufferAddress, buffer->length); + continue; + } + + buffer->buffer = (void*)bufferAddress; + buffer->fillCount = 0; + buffer->head = buffer->tail = 0; + buffer->atomic = true; + + return true; + } + return false; +} + +void TPCircularBufferCleanup(TPCircularBuffer *buffer) { + vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2); + memset(buffer, 0, sizeof(TPCircularBuffer)); +} + +void TPCircularBufferClear(TPCircularBuffer *buffer) { + uint32_t fillCount; + if ( TPCircularBufferTail(buffer, &fillCount) ) { + TPCircularBufferConsume(buffer, fillCount); + } +} + +void TPCircularBufferSetAtomic(TPCircularBuffer *buffer, bool atomic) { + buffer->atomic = atomic; +} diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 0dfa6e61..59cf0e55 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -1,7 +1,7 @@ import Foundation import AVFoundation -public protocol CameraDelegate { +public protocol CameraDelegate: class { func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) } public enum PhysicalCameraLocation { @@ -51,32 +51,32 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer public var audioEncodingTarget:AudioEncodingTarget? { didSet { guard let audioEncodingTarget = audioEncodingTarget else { - self.removeAudioInputsAndOutputs() return } do { try self.addAudioInputsAndOutputs() audioEncodingTarget.activateAudioTrack() } catch { - fatalError("ERROR: Could not connect audio target with error: \(error)") + print("ERROR: Could not connect audio target with error: \(error)") } } } public let targets = TargetContainer() - public var delegate: CameraDelegate? + public weak var delegate: CameraDelegate? public let captureSession:AVCaptureSession public let inputCamera:AVCaptureDevice! - let videoInput:AVCaptureDeviceInput! - let videoOutput:AVCaptureVideoDataOutput! - var microphone:AVCaptureDevice? - var audioInput:AVCaptureDeviceInput? - var audioOutput:AVCaptureAudioDataOutput? + public let videoInput:AVCaptureDeviceInput! + public let videoOutput:AVCaptureVideoDataOutput! + public var microphone:AVCaptureDevice? + public var audioInput:AVCaptureDeviceInput? + public var audioOutput:AVCaptureAudioDataOutput? var supportsFullYUVRange:Bool = false let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) + let cameraProcessingQueue = DispatchQueue.global() let audioProcessingQueue = DispatchQueue.global() @@ -85,6 +85,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var totalFrameTimeDuringCapture:Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() + + var captureSessionRestartAttempts = 0 public init(sessionPreset:AVCaptureSession.Preset, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true) throws { self.location = location @@ -160,25 +162,51 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } } - + if captureConnection.isVideoOrientationSupported { captureConnection.videoOrientation = .portrait } + captureSession.commitConfiguration() super.init() videoOutput.setSampleBufferDelegate(self, queue:cameraProcessingQueue) + + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionRuntimeError(note:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(Camera.captureSessionDidStartRunning(note:)), name: NSNotification.Name.AVCaptureSessionDidStartRunning, object: nil) } deinit { + let captureSession = self.captureSession + DispatchQueue.global().async { + if (captureSession.isRunning) { + // Don't call this on the sharedImageProcessingContext otherwise you may get a deadlock + // since this waits for the captureOutput() delegate call to finish. + captureSession.stopRunning() + } + } + sharedImageProcessingContext.runOperationSynchronously{ - self.stopCapture() self.videoOutput?.setSampleBufferDelegate(nil, queue:nil) self.audioOutput?.setSampleBufferDelegate(nil, queue:nil) } } + @objc func captureSessionRuntimeError(note: NSNotification) { + print("ERROR: Capture session runtime error: \(String(describing: note.userInfo))") + if(self.captureSessionRestartAttempts < 1) { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { + self.startCapture() + } + self.captureSessionRestartAttempts += 1 + } + } + + @objc func captureSessionDidStartRunning(note: NSNotification) { + self.captureSessionRestartAttempts = 0 + } + public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard (output != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) @@ -300,7 +328,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer // MARK: - // MARK: Audio processing - func addAudioInputsAndOutputs() throws { + public func addAudioInputsAndOutputs() throws { guard (self.audioOutput == nil) else { return } captureSession.beginConfiguration() @@ -324,7 +352,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer audioOutput.setSampleBufferDelegate(self, queue:audioProcessingQueue) } - func removeAudioInputsAndOutputs() { + public func removeAudioInputsAndOutputs() { guard (audioOutput != nil) else { return } captureSession.beginConfiguration() @@ -337,6 +365,6 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { - self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: false) } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift old mode 100644 new mode 100755 index d4e98760..ada8ab22 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -1,140 +1,352 @@ import AVFoundation +public protocol MovieInputDelegate: class { + func didFinishMovie() +} + public class MovieInput: ImageSource { public let targets = TargetContainer() public var runBenchmark = false + public weak var delegate: MovieInputDelegate? + + public var audioEncodingTarget:AudioEncodingTarget? { + didSet { + guard let audioEncodingTarget = audioEncodingTarget else { + return + } + audioEncodingTarget.activateAudioTrack() + + // Call enableSynchronizedEncoding() again if they didn't set the audioEncodingTarget before setting synchronizedMovieOutput. + if(synchronizedMovieOutput != nil) { self.enableSynchronizedEncoding() } + } + } + let yuvConversionShader:ShaderProgram let asset:AVAsset - let assetReader:AVAssetReader - let playAtActualSpeed:Bool - let loop:Bool - var videoEncodingIsFinished = false - var previousFrameTime = kCMTimeZero - var previousActualFrameTime = CFAbsoluteTimeGetCurrent() - - var numberOfFramesCaptured = 0 + let videoComposition:AVVideoComposition? + var playAtActualSpeed:Bool + + // Time in the video where it should start. + var requestedStartTime:CMTime? + // Time in the video where it started. + var startTime:CMTime? + // Time according to device clock when the video started. + var actualStartTime:DispatchTime? + // Last sample time that played. + private(set) public var currentTime:CMTime? + + public var loop:Bool + + // Called after the video finishes. Not called when cancel() or pause() is called. + public var completion: (() -> Void)? + // Progress block of the video with a paramater value of 0-1. + // Can be used to check video encoding progress. Not called from main thread. + public var progress: ((Double) -> Void)? + + public var synchronizedMovieOutput:MovieOutput? { + didSet { + self.enableSynchronizedEncoding() + } + } + public var synchronizedEncodingDebug = false { + didSet { + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug + } + } + let conditionLock = NSCondition() + var readingShouldWait = false + var videoInputStatusObserver:NSKeyValueObservation? + var audioInputStatusObserver:NSKeyValueObservation? + + public var useRealtimeThreads = false + var timebaseInfo = mach_timebase_info_data_t() + var currentThread:Thread? + + var totalFramesSent = 0 var totalFrameTimeDuringCapture:Double = 0.0 - - // TODO: Add movie reader synchronization + + var audioSettings:[String:Any]? + + var movieFramebuffer:Framebuffer? + // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works - public init(asset:AVAsset, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public init(asset:AVAsset, videoComposition: AVVideoComposition?, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { self.asset = asset + self.videoComposition = videoComposition self.playAtActualSpeed = playAtActualSpeed self.loop = loop self.yuvConversionShader = crashOnShaderCompileFailure("MovieInput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - - assetReader = try AVAssetReader(asset:self.asset) - - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaType.video)[0], outputSettings:outputSettings) - readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.add(readerVideoTrackOutput) - // TODO: Audio here + self.audioSettings = audioSettings } - public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false, audioSettings:[String:Any]? = nil) throws { let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] let inputAsset = AVURLAsset(url:url, options:inputOptions) - try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) + try self.init(asset:inputAsset, videoComposition: nil, playAtActualSpeed:playAtActualSpeed, loop:loop, audioSettings:audioSettings) + } + + deinit { + self.movieFramebuffer?.unlock() + self.cancel() + + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() } // MARK: - // MARK: Playback control - - public func start() { - asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ - DispatchQueue.global().async(execute: { - guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } - - guard self.assetReader.startReading() else { - print("Couldn't start reading") - return - } - - var readerVideoTrackOutput:AVAssetReaderOutput? = nil; - - for output in self.assetReader.outputs { - if(output.mediaType == AVMediaType.video.rawValue) { - readerVideoTrackOutput = output; - } - } - - while (self.assetReader.status == .reading) { - self.readNextVideoFrame(from:readerVideoTrackOutput!) - } - - if (self.assetReader.status == .completed) { - self.assetReader.cancelReading() - - if (self.loop) { - // TODO: Restart movie processing - } else { - self.endProcessing() - } - } - }) - }) + + public func start(atTime: CMTime) { + self.requestedStartTime = atTime + self.start() + } + + @objc public func start() { + if let currentThread = self.currentThread, + currentThread.isExecuting, + !currentThread.isCancelled { + // If the current thread is running and has not been cancelled, bail. + return + } + // Cancel the thread just to be safe in the event we somehow get here with the thread still running. + self.currentThread?.cancel() + + self.currentThread = Thread(target: self, selector: #selector(beginReading), object: nil) + self.currentThread?.start() } public func cancel() { - assetReader.cancelReading() - self.endProcessing() + self.currentThread?.cancel() + self.currentThread = nil } - func endProcessing() { - + public func pause() { + self.cancel() + self.requestedStartTime = self.currentTime } // MARK: - // MARK: Internal processing functions - func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .reading) && !videoEncodingIsFinished) { - if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { - if (playAtActualSpeed) { - // Do this outside of the video processing queue to not slow that down while waiting - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - let differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime) - let currentActualTime = CFAbsoluteTimeGetCurrent() - - let frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame) - let actualTimeDifference = currentActualTime - previousActualFrameTime - - if (frameTimeDifference > actualTimeDifference) { - usleep(UInt32(round(1000000.0 * (frameTimeDifference - actualTimeDifference)))) - } - - previousFrameTime = currentSampleTime - previousActualFrameTime = CFAbsoluteTimeGetCurrent() + func createReader() -> AVAssetReader? + { + do { + let outputSettings:[String:AnyObject] = + [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + + let assetReader = try AVAssetReader.init(asset: self.asset) + + if(self.videoComposition == nil) { + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: self.asset.tracks(withMediaType: .video).first!, outputSettings:outputSettings) + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) + } + else { + let readerVideoTrackOutput = AVAssetReaderVideoCompositionOutput(videoTracks: self.asset.tracks(withMediaType: .video), videoSettings: outputSettings) + readerVideoTrackOutput.videoComposition = self.videoComposition + readerVideoTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerVideoTrackOutput) + } + + if let audioTrack = self.asset.tracks(withMediaType: .audio).first, + let _ = self.audioEncodingTarget { + let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings) + readerAudioTrackOutput.alwaysCopiesSampleData = false + assetReader.add(readerAudioTrackOutput) + } + + self.startTime = self.requestedStartTime + if let requestedStartTime = self.requestedStartTime { + assetReader.timeRange = CMTimeRange(start: requestedStartTime, duration: kCMTimePositiveInfinity) + } + self.requestedStartTime = nil + self.currentTime = nil + self.actualStartTime = nil + + return assetReader + } catch { + print("ERROR: Unable to create asset reader: \(error)") + } + return nil + } + + @objc func beginReading() { + let thread = Thread.current + + mach_timebase_info(&timebaseInfo) + + if(useRealtimeThreads) { + self.configureThread() + } + else if(playAtActualSpeed) { + thread.qualityOfService = .userInitiated + } + else { + // This includes synchronized encoding since the above vars will be disabled for it. + thread.qualityOfService = .default + } + + guard let assetReader = self.createReader() else { + return // A return statement in this frame will end thread execution. + } + + do { + try NSObject.catchException { + guard assetReader.startReading() else { + print("ERROR: Unable to start reading: \(String(describing: assetReader.error))") + return } - - sharedImageProcessingContext.runOperationSynchronously{ - self.process(movieFrame:sampleBuffer) - CMSampleBufferInvalidate(sampleBuffer) + } + } + catch { + print("ERROR: Unable to start reading: \(error)") + return + } + + var readerVideoTrackOutput:AVAssetReaderOutput? = nil + var readerAudioTrackOutput:AVAssetReaderOutput? = nil + + for output in assetReader.outputs { + if(output.mediaType == AVMediaType.video.rawValue) { + readerVideoTrackOutput = output + } + if(output.mediaType == AVMediaType.audio.rawValue) { + readerAudioTrackOutput = output + } + } + + while(assetReader.status == .reading) { + if(thread.isCancelled) { break } + + if let movieOutput = self.synchronizedMovieOutput { + self.conditionLock.lock() + if(self.readingShouldWait) { + self.synchronizedEncodingDebugPrint("Disable reading") + self.conditionLock.wait() + self.synchronizedEncodingDebugPrint("Enable reading") } - } else { - if (!loop) { - videoEncodingIsFinished = true - if (videoEncodingIsFinished) { - self.endProcessing() + self.conditionLock.unlock() + + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData) { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + } + if(movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + if let readerAudioTrackOutput = readerAudioTrackOutput { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) } } } + else { + self.readNextVideoFrame(with: assetReader, from: readerVideoTrackOutput!) + if let readerAudioTrackOutput = readerAudioTrackOutput, + self.audioEncodingTarget?.readyForNextAudioBuffer() ?? true { + self.readNextAudioSample(with: assetReader, from: readerAudioTrackOutput) + } + } + } + + assetReader.cancelReading() + + // Since only the main thread will cancel and create threads jump onto it to prevent + // the current thread from being cancelled in between the below if statement and creating the new thread. + DispatchQueue.main.async { + // Start the video over so long as it wasn't cancelled. + if (self.loop && !thread.isCancelled) { + self.currentThread = Thread(target: self, selector: #selector(self.beginReading), object: nil) + self.currentThread?.start() + } + else { + self.delegate?.didFinishMovie() + self.completion?() + + self.synchronizedEncodingDebugPrint("MovieInput finished reading") + self.synchronizedEncodingDebugPrint("MovieInput total frames sent: \(self.totalFramesSent)") + } + } + } + + func readNextVideoFrame(with assetReader: AVAssetReader, from videoTrackOutput:AVAssetReaderOutput) { + guard let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + movieOutput.movieProcessingContext.runOperationAsynchronously { + // Documentation: "Clients that are monitoring each input's readyForMoreMediaData value must call markAsFinished on an input when they are done + // appending buffers to it. This is necessary to prevent other inputs from stalling, as they may otherwise wait forever + // for that input's media data, attempting to complete the ideal interleaving pattern." + movieOutput.videoEncodingIsFinished = true + movieOutput.assetWriterVideoInput.markAsFinished() + } + } + return + } + + + self.synchronizedEncodingDebugPrint("Process frame input") + + var currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + var duration = self.asset.duration // Only used for the progress block so its acuracy is not critical + + self.currentTime = currentSampleTime + + if let startTime = self.startTime { + // Make sure our samples start at kCMTimeZero if the video was started midway. + currentSampleTime = CMTimeSubtract(currentSampleTime, startTime) + duration = CMTimeSubtract(duration, startTime) + } + + if (self.playAtActualSpeed) { + let currentSampleTimeNanoseconds = Int64(currentSampleTime.seconds * 1_000_000_000) + let currentActualTime = DispatchTime.now() + + if(self.actualStartTime == nil) { self.actualStartTime = currentActualTime } + + // Determine how much time we need to wait in order to display the frame at the right currentActualTime such that it will match the currentSampleTime. + // The reason we subtract the actualStartTime from the currentActualTime is so the actual time starts at zero relative to the video start. + let delay = currentSampleTimeNanoseconds - Int64(currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds) + + //print("currentSampleTime: \(currentSampleTimeNanoseconds) currentTime: \((currentActualTime.uptimeNanoseconds-self.actualStartTime!.uptimeNanoseconds)) delay: \(delay)") + + if(delay > 0) { + mach_wait_until(mach_absolute_time()+self.nanosToAbs(UInt64(delay))) + } + else { + // This only happens if we aren't given enough processing time for playback + // but is necessary otherwise the playback will never catch up to its timeline. + // If we weren't adhearing to the sample timline and used the old timing method + // the video would still lag during an event like this. + //print("Dropping frame in order to catch up") + return + } + } + + self.progress?(currentSampleTime.seconds/duration.seconds) + + sharedImageProcessingContext.runOperationSynchronously{ + self.process(movieFrame:sampleBuffer) + CMSampleBufferInvalidate(sampleBuffer) } -// else if (synchronizedMovieWriter != nil) { -// if (assetReader.status == .Completed) { -// self.endProcessing() -// } -// } - + } + + func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput:AVAssetReaderOutput) { + guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer() else { + if let movieOutput = self.synchronizedMovieOutput { + movieOutput.movieProcessingContext.runOperationAsynchronously { + movieOutput.audioEncodingIsFinished = true + movieOutput.assetWriterAudioInput?.markAsFinished() + } + } + return + } + + self.synchronizedEncodingDebugPrint("Process audio sample input") + + self.audioEncodingTarget?.processAudioBuffer(sampleBuffer, shouldInvalidateSampleWhenDone: true) } func process(movieFrame frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! - -// processingFrameTime = currentSampleTime + self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } @@ -142,28 +354,31 @@ public class MovieInput: ImageSource { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - + let conversionMatrix = colorConversionMatrix601FullRangeDefault // TODO: Get this color query working -// if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { -// if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { -// _preferredConversion = kColorConversion601FullRange -// } else { -// _preferredConversion = kColorConversion709 -// } -// } else { -// _preferredConversion = kColorConversion601FullRange -// } + // if let colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, nil) { + // if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == .EqualTo) { + // _preferredConversion = kColorConversion601FullRange + // } else { + // _preferredConversion = kColorConversion709 + // } + // } else { + // _preferredConversion = kColorConversion601FullRange + // } let startTime = CFAbsoluteTimeGetCurrent() - + var luminanceGLTexture: CVOpenGLESTexture? glActiveTexture(GLenum(GL_TEXTURE0)) let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture) - assert(luminanceGLTextureResult == kCVReturnSuccess && luminanceGLTexture != nil) + if(luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil) { + print("Could not create LuminanceGLTexture") + return + } let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!) @@ -175,20 +390,20 @@ public class MovieInput: ImageSource { do { luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture) } catch { - fatalError("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return } -// luminanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache - luminanceFramebuffer.lock() - - var chrominanceGLTexture: CVOpenGLESTexture? glActiveTexture(GLenum(GL_TEXTURE1)) let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture) - assert(chrominanceGLTextureResult == kCVReturnSuccess && chrominanceGLTexture != nil) + if(chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil) { + print("Could not create ChrominanceGLTexture") + return + } let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!) @@ -200,30 +415,143 @@ public class MovieInput: ImageSource { do { chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture) } catch { - fatalError("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + print("Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)") + return } -// chrominanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache - chrominanceFramebuffer.lock() - + self.movieFramebuffer?.unlock() let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + movieFramebuffer.lock() convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - + movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) + self.movieFramebuffer = movieFramebuffer + self.updateTargetsWithFramebuffer(movieFramebuffer) + if(self.runBenchmark || self.synchronizedEncodingDebug) { + self.totalFramesSent += 1 + } + if self.runBenchmark { let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime) - self.numberOfFramesCaptured += 1 self.totalFrameTimeDuringCapture += currentFrameTime - print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.numberOfFramesCaptured)) ms") + print("Average frame time : \(1000.0 * self.totalFrameTimeDuringCapture / Double(self.totalFramesSent)) ms") print("Current frame time : \(1000.0 * currentFrameTime) ms") } } - + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } + + public func transmitPreviousFrame() { + sharedImageProcessingContext.runOperationAsynchronously { + if let movieFramebuffer = self.movieFramebuffer { + self.updateTargetsWithFramebuffer(movieFramebuffer) + } + } + } + + // MARK: - + // MARK: Synchronized encoding + + func enableSynchronizedEncoding() { + self.synchronizedMovieOutput?.encodingLiveVideo = false + self.synchronizedMovieOutput?.synchronizedEncodingDebug = self.synchronizedEncodingDebug + self.playAtActualSpeed = false + self.loop = false + + // Subscribe to isReadyForMoreMediaData changes + self.setupObservers() + // Set the intial state of the lock + self.updateLock() + } + + func setupObservers() { + self.videoInputStatusObserver?.invalidate() + self.audioInputStatusObserver?.invalidate() + + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.videoInputStatusObserver = movieOutput.assetWriterVideoInput.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterVideoInput, change) in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + self.audioInputStatusObserver = movieOutput.assetWriterAudioInput?.observe(\.isReadyForMoreMediaData, options: [.new, .old]) { [weak self] (assetWriterAudioInput, change) in + guard let weakSelf = self else { return } + weakSelf.updateLock() + } + } + + func updateLock() { + guard let movieOutput = self.synchronizedMovieOutput else { return } + + self.conditionLock.lock() + // Allow reading if either input is able to accept data, prevent reading if both inputs are unable to accept data. + if(movieOutput.assetWriterVideoInput.isReadyForMoreMediaData || movieOutput.assetWriterAudioInput?.isReadyForMoreMediaData ?? false) { + self.readingShouldWait = false + self.conditionLock.signal() + } + else { + self.readingShouldWait = true + } + self.conditionLock.unlock() + } + + // MARK: - + // MARK: Thread configuration + + func configureThread() { + let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_MSEC) + + // http://docs.huihoo.com/darwin/kernel-programming-guide/scheduler/chapter_8_section_4.html + // + // To see the impact of adjusting these values, uncomment the print statement above mach_wait_until() in self.readNextVideoFrame() + // + // Setup for 5 ms of work. + // The anticpated frame render duration is in the 1-3 ms range on an iPhone 6 for 1080p without filters and 1-7 ms range with filters + // If the render duration is allowed to exceed 16ms (the duration of a frame in 60fps video) + // the 60fps video will no longer be playing in real time. + let computation = UInt32(5 * clock2abs) + // Tell the scheduler the next 20 ms of work needs to be done as soon as possible. + let period = UInt32(0 * clock2abs) + // According to the above scheduling chapter this constraint only appears relevant + // if preemtible is set to true and the period is not 0. If this is wrong, please let me know. + let constraint = UInt32(5 * clock2abs) + + //print("period: \(period) computation: \(computation) constraint: \(constraint)") + + let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout.size / MemoryLayout.size) + + var policy = thread_time_constraint_policy() + var ret: Int32 + let thread: thread_port_t = pthread_mach_thread_np(pthread_self()) + + policy.period = period + policy.computation = computation + policy.constraint = constraint + policy.preemptible = 0 + + ret = withUnsafeMutablePointer(to: &policy) { + $0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) { + thread_policy_set(thread, UInt32(THREAD_TIME_CONSTRAINT_POLICY), $0, THREAD_TIME_CONSTRAINT_POLICY_COUNT) + } + } + + if ret != KERN_SUCCESS { + mach_error("thread_policy_set:", ret) + print("Unable to configure thread") + } + } + + func nanosToAbs(_ nanos: UInt64) -> UInt64 { + return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer) + } + + func synchronizedEncodingDebugPrint(_ string: String) { + if(synchronizedMovieOutput != nil && synchronizedEncodingDebug) { print(string) } + } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift old mode 100644 new mode 100755 index 5ef92250..9207f645 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -2,225 +2,368 @@ import AVFoundation public protocol AudioEncodingTarget { func activateAudioTrack() - func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) + // Note: This is not used for synchronized encoding. + func readyForNextAudioBuffer() -> Bool +} + +public enum MovieOutputError: Error, CustomStringConvertible { + case startWritingError(assetWriterError: Error?) + case pixelBufferPoolNilError + + public var errorDescription: String { + switch self { + case .startWritingError(let assetWriterError): + return "Could not start asset writer: \(String(describing: assetWriterError))" + case .pixelBufferPoolNilError: + return "Asset writer pixel buffer pool was nil. Make sure that your output file doesn't already exist." + } + } + + public var description: String { + return "<\(type(of: self)): errorDescription = \(self.errorDescription)>" + } } public class MovieOutput: ImageConsumer, AudioEncodingTarget { + public let sources = SourceContainer() public let maximumInputs:UInt = 1 let assetWriter:AVAssetWriter let assetWriterVideoInput:AVAssetWriterInput var assetWriterAudioInput:AVAssetWriterInput? - + let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor let size:Size let colorSwizzlingShader:ShaderProgram private var isRecording = false - private var videoEncodingIsFinished = false - private var audioEncodingIsFinished = false - private var startTime:CMTime? - private var previousFrameTime = kCMTimeNegativeInfinity - private var previousAudioTime = kCMTimeNegativeInfinity - private var encodingLiveVideo:Bool + var videoEncodingIsFinished = false + var audioEncodingIsFinished = false + private var previousFrameTime: CMTime? + var encodingLiveVideo:Bool { + didSet { + assetWriterVideoInput.expectsMediaDataInRealTime = encodingLiveVideo + assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo + } + } var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! - var transform:CGAffineTransform { - get { - return assetWriterVideoInput.transform - } - set { - assetWriterVideoInput.transform = transform - } - } + var audioSettings:[String:Any]? = nil + var audioSourceFormatHint:CMFormatDescription? + + let movieProcessingContext:OpenGLContext - public init(URL:Foundation.URL, size:Size, fileType:AVFileType = AVFileType.mov, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { - if sharedImageProcessingContext.supportsTextureCaches() { - self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader + var synchronizedEncodingDebug = false + var totalFramesAppended:Int = 0 + + public init(URL:Foundation.URL, size:Size, fileType:AVFileType = .mov, liveVideo:Bool = false, videoSettings:[String:Any]? = nil, videoNaturalTimeScale:CMTimeScale? = nil, audioSettings:[String:Any]? = nil, audioSourceFormatHint:CMFormatDescription? = nil) throws { + imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup + let movieProcessingContext = OpenGLContext() + + if movieProcessingContext.supportsTextureCaches() { + self.colorSwizzlingShader = movieProcessingContext.passthroughShader } else { - self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} + self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try movieProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)} } self.size = size + assetWriter = try AVAssetWriter(url:URL, fileType:fileType) - // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. - assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) - var localSettings:[String:AnyObject] - if let settings = settings { - localSettings = settings + var localSettings:[String:Any] + if let videoSettings = videoSettings { + localSettings = videoSettings } else { - localSettings = [String:AnyObject]() + localSettings = [String:Any]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? size.width + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? size.height + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 - assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaType.video, outputSettings:localSettings) + assetWriterVideoInput = AVAssetWriterInput(mediaType:.video, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo + + // You should provide a naturalTimeScale if you have one for the current media. + // Otherwise the asset writer will choose one for you and it may result in misaligned frames. + if let naturalTimeScale = videoNaturalTimeScale { + assetWriter.movieTimeScale = naturalTimeScale + assetWriterVideoInput.mediaTimeScale = naturalTimeScale + // This is set to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, naturalTimeScale) + } + else { + assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1, 1000) + } + encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(value:size.width), - kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] + let sourcePixelBufferAttributesDictionary:[String:Any] = [kCVPixelBufferPixelFormatTypeKey as String:Int32(kCVPixelFormatType_32BGRA), + kCVPixelBufferWidthKey as String:self.size.width, + kCVPixelBufferHeightKey as String:self.size.height] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) assetWriter.add(assetWriterVideoInput) + + self.audioSettings = audioSettings + self.audioSourceFormatHint = audioSourceFormatHint + + self.movieProcessingContext = movieProcessingContext } - public func startRecording(transform:CGAffineTransform? = nil) { - if let transform = transform { - assetWriterVideoInput.transform = transform + public func startRecording(transform:CGAffineTransform? = nil, _ completionCallback:((_ started: Bool, _ error: Error?) -> Void)? = nil) { + // Don't do this work on the movieProcessingContext queue so we don't block it. + // If it does get blocked framebuffers will pile up from live video and after it is no longer blocked (this work has finished) + // we will be able to accept framebuffers but the ones that piled up will come in too quickly resulting in most being dropped. + DispatchQueue.global(qos: .utility).async { + do { + if let transform = transform { + self.assetWriterVideoInput.transform = transform + } + + var success = false + try NSObject.catchException { + success = self.assetWriter.startWriting() + } + + if(!success) { + throw MovieOutputError.startWritingError(assetWriterError: self.assetWriter.error) + } + + guard self.assetWriterPixelBufferInput.pixelBufferPool != nil else { + /* + When the pixelBufferPool returns nil, check the following: + 1. the the output file of the AVAssetsWriter doesn't exist. + 2. use the pixelbuffer after calling startSessionAtTime: on the AVAssetsWriter. + 3. the settings of AVAssetWriterInput and AVAssetWriterInputPixelBufferAdaptor are correct. + 4. the present times of appendPixelBuffer uses are not the same. + https://stackoverflow.com/a/20110179/1275014 + */ + throw MovieOutputError.pixelBufferPoolNilError + } + + self.isRecording = true + + self.synchronizedEncodingDebugPrint("MovieOutput started writing") + + completionCallback?(true, nil) + } catch { + self.assetWriter.cancelWriting() + + completionCallback?(false, error) + } } - startTime = nil - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = self.assetWriter.startWriting() + } + + public func finishRecording(_ completionCallback:(() -> Void)? = nil) { + movieProcessingContext.runOperationAsynchronously{ + guard self.isRecording, + self.assetWriter.status == .writing else { + completionCallback?() + return + } - CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + self.audioEncodingIsFinished = true + self.videoEncodingIsFinished = true - /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion - * regardless of the kCVImageBufferYCbCrMatrixKey value. - * Tagging the resulting video file as BT.601, is the best option right now. - * Creating a proper BT.709 video is not possible at the moment. - */ - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) + self.isRecording = false - let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTexture? = nil - let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, self.pixelBuffer!, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) - let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + if let lastFrame = self.previousFrameTime { + // Resolve black frames at the end. Without this the end timestamp of the session's samples could be either video or audio. + // Documentation: "You do not need to call this method; if you call finishWriting without + // calling this method, the session's effective end time will be the latest end timestamp of + // the session's samples (that is, no samples will be edited out at the end)." + self.assetWriter.endSession(atSourceTime: lastFrame) + } - self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + self.assetWriter.finishWriting { + completionCallback?() + } + self.synchronizedEncodingDebugPrint("MovieOutput finished writing") + self.synchronizedEncodingDebugPrint("MovieOutput total frames appended: \(self.totalFramesAppended)") } } - public func finishRecording(_ completionCallback:(() -> Void)? = nil) { - sharedImageProcessingContext.runOperationSynchronously{ - self.isRecording = false + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + glFinish(); + + let work = { + guard self.isRecording, + self.assetWriter.status == .writing, + !self.videoEncodingIsFinished else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping frame") + return + } - if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { - sharedImageProcessingContext.runOperationAsynchronously{ - completionCallback?() - } + // Ignore still images and other non-video updates (do I still need this?) + guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } + + // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case. + guard (frameTime != self.previousFrameTime) else { return } + + if (self.previousFrameTime == nil) { + // This resolves black frames at the beginning. Any samples recieved before this time will be edited out. + self.assetWriter.startSession(atSourceTime: frameTime) + } + + self.previousFrameTime = frameTime + + guard (self.assetWriterVideoInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + print("Had to drop a frame at time \(frameTime)") return } - if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { - self.videoEncodingIsFinished = true - self.assetWriterVideoInput.markAsFinished() + + while(!self.assetWriterVideoInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.videoEncodingIsFinished) { + self.synchronizedEncodingDebugPrint("Video waiting...") + // Better to poll isReadyForMoreMediaData often since when it does become true + // we don't want to risk letting framebuffers pile up in between poll intervals. + usleep(100000) // 0.1 seconds } - if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { - self.audioEncodingIsFinished = true - self.assetWriterAudioInput?.markAsFinished() + + let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer) + guard ((self.pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { + print("WARNING: Unable to create pixel buffer, dropping frame") + return } - // Why can't I use ?? here for the callback? - if let callback = completionCallback { - self.assetWriter.finishWriting(completionHandler: callback) - } else { - self.assetWriter.finishWriting{} + do { + try self.renderIntoPixelBuffer(self.pixelBuffer!, framebuffer:framebuffer) + + self.synchronizedEncodingDebugPrint("Process frame output") + try NSObject.catchException { + if (!self.assetWriterPixelBufferInput.append(self.pixelBuffer!, withPresentationTime:frameTime)) { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(String(describing: self.assetWriter.error))") + } + } } - } - } - - public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { - defer { - framebuffer.unlock() - } - guard isRecording else { return } - // Ignore still images and other non-video updates (do I still need this?) - guard let frameTime = framebuffer.timingStyle.timestamp?.asCMTime else { return } - // If two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case - guard (frameTime != previousFrameTime) else { return } - - if (startTime == nil) { - if (assetWriter.status != .writing) { - assetWriter.startWriting() + catch { + print("WARNING: Trouble appending pixel buffer at time: \(frameTime) \(error)") } - assetWriter.startSession(atSourceTime: frameTime) - startTime = frameTime - } - - // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { - debugPrint("Had to drop a frame at time \(frameTime)") - return - } - - if !sharedImageProcessingContext.supportsTextureCaches() { - let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer) - guard ((pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return } + if(self.synchronizedEncodingDebug) { + self.totalFramesAppended += 1 + } + + CVPixelBufferUnlockBaseAddress(self.pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) + self.pixelBuffer = nil + + sharedImageProcessingContext.runOperationAsynchronously { + framebuffer.unlock() + } } - - renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) - - if (!assetWriterPixelBufferInput.append(pixelBuffer!, withPresentationTime:frameTime)) { - debugPrint("Problem appending pixel buffer at time: \(frameTime)") + + if(self.encodingLiveVideo) { + // This is done asynchronously to reduce the amount of work done on the sharedImageProcessingContext que + // so we can decrease the risk of frames being dropped by the camera. I believe it is unlikely a backlog of framebuffers will occur + // since the framebuffers come in much slower than during synchronized encoding. + movieProcessingContext.runOperationAsynchronously(work) } - - CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - if !sharedImageProcessingContext.supportsTextureCaches() { - pixelBuffer = nil + else { + // This is done synchronously to prevent framebuffers from piling up during synchronized encoding. + // If we don't force the sharedImageProcessingContext queue to wait for this frame to finish processing it will + // keep sending frames whenever isReadyForMoreMediaData = true but the movieProcessingContext queue would run when the system wants it to. + movieProcessingContext.runOperationSynchronously(work) } } - func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { - if !sharedImageProcessingContext.supportsTextureCaches() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) - renderFramebuffer.lock() + func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) throws { + // Is this the first pixel buffer we have recieved? + if(renderFramebuffer == nil) { + CVBufferSetAttachment(pixelBuffer, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(pixelBuffer, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) } + let bufferSize = GLSize(self.size) + var cachedTextureRef:CVOpenGLESTexture? = nil + let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) + let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) + + renderFramebuffer = try Framebuffer(context:self.movieProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:movieProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)], context: movieProcessingContext) - if sharedImageProcessingContext.supportsTextureCaches() { + if movieProcessingContext.supportsTextureCaches() { glFinish() } else { glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) - renderFramebuffer.unlock() } } // MARK: - // MARK: Audio support - + public func activateAudioTrack() { - // TODO: Add ability to set custom output settings - assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaType.audio, outputSettings:nil) + assetWriterAudioInput = AVAssetWriterInput(mediaType:.audio, outputSettings:self.audioSettings, sourceFormatHint:self.audioSourceFormatHint) + assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { - guard let assetWriterAudioInput = assetWriterAudioInput else { return } - - sharedImageProcessingContext.runOperationSynchronously{ - let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) - if (self.startTime == nil) { - if (self.assetWriter.status != .writing) { - self.assetWriter.startWriting() + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer, shouldInvalidateSampleWhenDone:Bool) { + let work = { + defer { + if(shouldInvalidateSampleWhenDone) { + CMSampleBufferInvalidate(sampleBuffer) } - - self.assetWriter.startSession(atSourceTime: currentSampleTime) - self.startTime = currentSampleTime } - guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard self.isRecording, + self.assetWriter.status == .writing, + !self.audioEncodingIsFinished, + let assetWriterAudioInput = self.assetWriterAudioInput else { + self.synchronizedEncodingDebugPrint("Guard fell through, dropping audio sample") + return + } + + let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) + + guard (assetWriterAudioInput.isReadyForMoreMediaData || !self.encodingLiveVideo) else { + print("Had to drop a audio sample at time \(currentSampleTime)") return } - if (!assetWriterAudioInput.append(sampleBuffer)) { - print("Trouble appending audio sample buffer") + while(!assetWriterAudioInput.isReadyForMoreMediaData && !self.encodingLiveVideo && !self.audioEncodingIsFinished) { + self.synchronizedEncodingDebugPrint("Audio waiting...") + usleep(100000) + } + + self.synchronizedEncodingDebugPrint("Process audio sample output") + + do { + try NSObject.catchException { + if (!assetWriterAudioInput.append(sampleBuffer)) { + print("WARNING: Trouble appending audio sample buffer: \(String(describing: self.assetWriter.error))") + } + } + } + catch { + print("WARNING: Trouble appending audio sample buffer: \(error)") } } + + if(self.encodingLiveVideo) { + movieProcessingContext.runOperationAsynchronously(work) + } + else { + work() + } + } + + // Note: This is not used for synchronized encoding, only live video. + public func readyForNextAudioBuffer() -> Bool { + return true + } + + func synchronizedEncodingDebugPrint(_ string: String) { + if(synchronizedEncodingDebug && !encodingLiveVideo) { print(string) } } } diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 9d45f95a..330b4ca4 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -4,6 +4,8 @@ import UIKit // TODO: Find a way to warn people if they set this after the context has been created var imageProcessingShareGroup:EAGLSharegroup? = nil +var dispatchQueKeyValueCounter = 81 + public class OpenGLContext: SerialDispatch { lazy var framebufferCache:FramebufferCache = { return FramebufferCache(context:self) @@ -25,14 +27,17 @@ public class OpenGLContext: SerialDispatch { }() - public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", qos: .userInitiated) public let dispatchQueueKey = DispatchSpecificKey() + public let dispatchQueueKeyValue: Int // MARK: - // MARK: Initialization and teardown init() { - serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) + dispatchQueueKeyValue = dispatchQueKeyValueCounter + serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:dispatchQueueKeyValue) + dispatchQueKeyValueCounter += 1 let generatedContext:EAGLContext? if let shareGroup = imageProcessingShareGroup { diff --git a/framework/Source/iOS/SpeakerOutput.swift b/framework/Source/iOS/SpeakerOutput.swift new file mode 100644 index 00000000..499d4763 --- /dev/null +++ b/framework/Source/iOS/SpeakerOutput.swift @@ -0,0 +1,333 @@ +// +// SpeakerOutput.swift +// GPUImage +// +// Rewritten by Josh Bernfeld on 3/1/18 +// and originally created by Uzi Refaeli on 3/9/13. +// Copyright (c) 2018 Brad Larson. All rights reserved. +// + +import Foundation +import AudioToolbox +import AVFoundation + +public class SpeakerOutput: AudioEncodingTarget { + + public var changesAudioSession = true + + public private(set) var isPlaying = false + + public var isMuted = false + + var hasBuffer = false + var isReadyForMoreMediaData = true { + willSet { + guard newValue else { return } + + // When we are ready to begin accepting new data check if we had something + // in the rescue buffer. If we did then move it to the main buffer. + self.copyRescueBufferContentsToCircularBuffer() + } + } + + var processingGraph:AUGraph? + var mixerUnit:AudioUnit? + + var firstBufferReached = false + + let outputBus:AudioUnitElement = 0 + let inputBus:AudioUnitElement = 1 + + let unitSize = UInt32(MemoryLayout.size) + let bufferUnit:UInt32 = 655360 + + var circularBuffer = TPCircularBuffer() + let circularBufferSize:UInt32 + + var rescueBuffer:UnsafeMutableRawPointer? + let rescueBufferSize:Int + var rescueBufferContentsSize:UInt32 = 0 + + + public init() { + circularBufferSize = bufferUnit * unitSize + rescueBufferSize = Int(bufferUnit / 2) + } + + deinit { + if let processingGraph = processingGraph { + DisposeAUGraph(processingGraph) + } + if let rescueBuffer = rescueBuffer { + free(rescueBuffer) + } + TPCircularBufferCleanup(&circularBuffer) + + self.cancel() + } + + // MARK: - + // MARK: Playback control + + public func start() { + if(isPlaying || processingGraph == nil) { return } + + AUGraphStart(processingGraph!) + + isPlaying = true + } + + public func cancel() { + if(!isPlaying || processingGraph == nil) { return } + + AUGraphStop(processingGraph!) + + isPlaying = false + + rescueBufferContentsSize = 0 + TPCircularBufferClear(&circularBuffer) + hasBuffer = false + isReadyForMoreMediaData = true + } + + // MARK: - + // MARK: AudioEncodingTarget protocol + + public func activateAudioTrack() { + if(changesAudioSession) { + do { + try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryAmbient) + try AVAudioSession.sharedInstance().setActive(true) + } + catch { + print("ERROR: Unable to set audio session: \(error)") + } + } + + // Create a new AUGraph + NewAUGraph(&processingGraph) + + // AUNodes represent AudioUnits on the AUGraph and provide an + // easy means for connecting audioUnits together. + var outputNode = AUNode() + var mixerNode = AUNode() + + // Create AudioComponentDescriptions for the AUs we want in the graph mixer component + var mixerDesc = AudioComponentDescription() + mixerDesc.componentType = kAudioUnitType_Mixer + mixerDesc.componentSubType = kAudioUnitSubType_SpatialMixer + mixerDesc.componentFlags = 0 + mixerDesc.componentFlagsMask = 0 + mixerDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Output component + var outputDesc = AudioComponentDescription() + outputDesc.componentType = kAudioUnitType_Output + outputDesc.componentSubType = kAudioUnitSubType_RemoteIO + outputDesc.componentFlags = 0 + outputDesc.componentFlagsMask = 0 + outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple + + // Add nodes to the graph to hold our AudioUnits, + // You pass in a reference to the AudioComponentDescription + // and get back an AudioUnit + AUGraphAddNode(processingGraph!, &mixerDesc, &mixerNode) + AUGraphAddNode(processingGraph!, &outputDesc, &outputNode) + + // Now we can manage connections using nodes in the graph. + // Connect the mixer node's output to the output node's input + AUGraphConnectNodeInput(processingGraph!, mixerNode, 0, outputNode, 0) + + // Upon return from this function call, the audio units belonging to the graph are open but not initialized. Specifically, no resource allocation occurs. + AUGraphOpen(processingGraph!) + + // Get a link to the mixer AU so we can talk to it later + AUGraphNodeInfo(processingGraph!, mixerNode, nil, &mixerUnit) + + var elementCount:UInt32 = 1 + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_ElementCount, kAudioUnitScope_Input, 0, &elementCount, UInt32(MemoryLayout.size)) + + // Set output callback, this is how audio sample data will be retrieved + var callbackStruct = AURenderCallbackStruct() + callbackStruct.inputProc = playbackCallback + callbackStruct.inputProcRefCon = bridgeObject(self) + AUGraphSetNodeInputCallback(processingGraph!, mixerNode, 0, &callbackStruct) + + // Describe the format, this will get adjusted when the first sample comes in. + var audioFormat = AudioStreamBasicDescription() + audioFormat.mFormatID = kAudioFormatLinearPCM + audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked + audioFormat.mSampleRate = 44100.0 + audioFormat.mReserved = 0 + + audioFormat.mBytesPerPacket = 2 + audioFormat.mFramesPerPacket = 1 + audioFormat.mBytesPerFrame = 2 + audioFormat.mChannelsPerFrame = 1 + audioFormat.mBitsPerChannel = 16 + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + + // Initialize the processing graph + AUGraphInitialize(processingGraph!) + + circularBuffer = TPCircularBuffer() + + // Initialize the circular buffer + _TPCircularBufferInit(&circularBuffer, circularBufferSize, MemoryLayout.size) + + hasBuffer = false + } + + public func processAudioBuffer(_ sampleBuffer: CMSampleBuffer, shouldInvalidateSampleWhenDone: Bool) { + defer { + if(shouldInvalidateSampleWhenDone) { + CMSampleBufferInvalidate(sampleBuffer) + } + } + + if(!isReadyForMoreMediaData || !isPlaying) { return } + + if(!firstBufferReached) { + firstBufferReached = true + // Get the format information of the sample + let desc = CMSampleBufferGetFormatDescription(sampleBuffer)! + let basicDesc = CMAudioFormatDescriptionGetStreamBasicDescription(desc)! + + var oSize = UInt32(MemoryLayout.size) + // Retrieve the existing set audio format + var audioFormat = AudioStreamBasicDescription() + AudioUnitGetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, &oSize) + + // Update the audio format with the information we have from the sample + audioFormat.mSampleRate = basicDesc.pointee.mSampleRate + + audioFormat.mBytesPerPacket = basicDesc.pointee.mBytesPerPacket + audioFormat.mFramesPerPacket = basicDesc.pointee.mFramesPerPacket + audioFormat.mBytesPerFrame = basicDesc.pointee.mBytesPerFrame + audioFormat.mChannelsPerFrame = basicDesc.pointee.mChannelsPerFrame + audioFormat.mBitsPerChannel = basicDesc.pointee.mBitsPerChannel + + // Apply the format + AudioUnitSetProperty(mixerUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, outputBus, &audioFormat, UInt32(MemoryLayout.size)) + AUGraphUpdate(processingGraph!, nil) + } + + // Populate an AudioBufferList with the sample + var audioBufferList = AudioBufferList() + var blockBuffer:CMBlockBuffer? + CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, nil, &audioBufferList, MemoryLayout.size, nil, nil, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer) + + // This is actually doing audioBufferList.mBuffers[0] + // Since the struct has an array of length of 1 the compiler is interpreting + // it as a single item array and not letting us use the above line. + // Since the array pointer points to the first item of the c array + // and all we want is the first item this is equally fine. + let audioBuffer = audioBufferList.mBuffers + + // Place the AudioBufferList in the circular buffer + let sampleSize = UInt32(CMSampleBufferGetTotalSampleSize(sampleBuffer)) + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, audioBuffer.mData, sampleSize) + + // The circular buffer has not been proceseed quickly enough and has filled up. + // Disable reading any further samples and save this last buffer so we don't lose it. + if(!didCopyBytes) { + //print("TPCircularBuffer limit reached: \(sampleSize) Bytes") + + isReadyForMoreMediaData = false + + self.writeToRescueBuffer(audioBuffer.mData, sampleSize) + } + else { + hasBuffer = true + } + } + + public func readyForNextAudioBuffer() -> Bool { + return isReadyForMoreMediaData + } + + // MARK: - + // MARK: Rescue buffer + + func writeToRescueBuffer(_ src: UnsafeRawPointer!, _ size: UInt32) { + if(rescueBufferContentsSize > 0) { + print("WARNING: Writing to rescue buffer with contents already inside") + } + + if(size > rescueBufferSize) { + print("WARNING: Unable to allocate enought space for rescue buffer, dropping audio sample") + } + else { + if(rescueBuffer == nil) { + rescueBuffer = malloc(rescueBufferSize) + } + + rescueBufferContentsSize = size + memcpy(rescueBuffer!, src, Int(size)) + } + } + + func copyRescueBufferContentsToCircularBuffer() { + if(rescueBufferContentsSize > 0) { + let didCopyBytes = TPCircularBufferProduceBytes(&circularBuffer, rescueBuffer, rescueBufferContentsSize) + if(!didCopyBytes) { + print("WARNING: Unable to copy rescue buffer into main buffer, dropping audio sample") + } + rescueBufferContentsSize = 0 + } + } +} + +func playbackCallback( + inRefCon:UnsafeMutableRawPointer, + ioActionFlags:UnsafeMutablePointer, + inTimeStamp:UnsafePointer, + inBusNumber:UInt32, + inNumberFrames:UInt32, + ioData:UnsafeMutablePointer?) -> OSStatus { + + let audioBuffer = ioData!.pointee.mBuffers + let numberOfChannels = audioBuffer.mNumberChannels + let outSamples = audioBuffer.mData + + // Zero-out all of the output samples first + memset(outSamples, 0, Int(audioBuffer.mDataByteSize)) + + let p = bridgeRawPointer(inRefCon) as! SpeakerOutput + + if(p.hasBuffer && p.isPlaying) { + var availableBytes:UInt32 = 0 + let bufferTail = TPCircularBufferTail(&p.circularBuffer, &availableBytes) + + let requestedBytesSize = inNumberFrames * p.unitSize * numberOfChannels + + let bytesToRead = min(availableBytes, requestedBytesSize) + if(!p.isMuted) { + // Copy the bytes from the circular buffer into the outSample + memcpy(outSamples, bufferTail, Int(bytesToRead)) + } + // Clear what we just read out of the circular buffer + TPCircularBufferConsume(&p.circularBuffer, bytesToRead) + + if(availableBytes <= requestedBytesSize*2) { + p.isReadyForMoreMediaData = true + } + + if(availableBytes <= requestedBytesSize) { + p.hasBuffer = false + } + } + + return noErr +} + +func bridgeObject(_ obj : AnyObject) -> UnsafeMutableRawPointer { + return UnsafeMutableRawPointer(Unmanaged.passUnretained(obj).toOpaque()) +} + +func bridgeRawPointer(_ ptr : UnsafeMutableRawPointer) -> AnyObject { + return Unmanaged.fromOpaque(ptr).takeUnretainedValue() +} +