Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/camera/camera_avfoundation/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 0.9.21+5

* Migrates `FLTCamConfiguration`, `FLTCamMediaSettingsAVWrapper` classes to Swift.
* Migrates `FLTCaptureOutput`, `FLTCapturePhotoOutput`, `FLTCaptureVideoDataOutput` protocols to Swift.

## 0.9.21+4

* Migrates `updateOrientation` and `setCaptureSessionPreset` methods to Swift.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ private final class TestMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper {
}

override func recommendedVideoSettingsForAssetWriter(
withFileType fileType: AVFileType, for output: FLTCaptureVideoDataOutput
withFileType fileType: AVFileType, for output: CaptureVideoDataOutput
) -> [String: Any]? {
return [:]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ final class FLTCamSetFlashModeTests: XCTestCase {
let (camera, mockDevice, mockCapturePhotoOutput) = createCamera()

mockCapturePhotoOutput.supportedFlashModes = [
NSNumber(value: AVCaptureDevice.FlashMode.auto.rawValue)
AVCaptureDevice.FlashMode.auto
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: just .auto

]

mockDevice.hasFlash = true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import camera_avfoundation
@testable import camera_avfoundation

// Import Objective-C part of the implementation when SwiftPM is used.
#if canImport(camera_avfoundation_objc)
Expand All @@ -11,11 +11,11 @@ import camera_avfoundation

/// Mock implementation of `FLTCapturePhotoOutput` protocol which allows injecting a custom
/// implementation.
final class MockCapturePhotoOutput: NSObject, FLTCapturePhotoOutput {
final class MockCapturePhotoOutput: NSObject, CapturePhotoOutput {
var avOutput = AVCapturePhotoOutput()
var availablePhotoCodecTypes: [AVVideoCodecType] = []
var highResolutionCaptureEnabled = false
var supportedFlashModes: [NSNumber] = []
var isHighResolutionCaptureEnabled = false
var supportedFlashModes: [AVCaptureDevice.FlashMode] = []

// Stub that is called when the corresponding public method is called.
var capturePhotoWithSettingsStub:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import camera_avfoundation
@testable import camera_avfoundation

// Import Objective-C part of the implementation when SwiftPM is used.
#if canImport(camera_avfoundation_objc)
Expand All @@ -11,11 +11,10 @@ import camera_avfoundation

/// Mock implementation of `FLTCaptureVideoDataOutput` protocol which allows injecting a custom
/// implementation.
class MockCaptureVideoDataOutput: NSObject, FLTCaptureVideoDataOutput {

class MockCaptureVideoDataOutput: NSObject, CaptureVideoDataOutput {
var avOutput = AVCaptureVideoDataOutput()
var alwaysDiscardsLateVideoFrames = false
var videoSettings: [String: Any] = [:]
var videoSettings: [String: Any]! = [:]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why change here?


var connectionWithMediaTypeStub: ((AVMediaType) -> FLTCaptureConnection?)?

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ private class FakeMediaSettingsAVWrapper: FLTCamMediaSettingsAVWrapper {
}

override func recommendedVideoSettingsForAssetWriter(
withFileType fileType: AVFileType, for output: FLTCaptureVideoDataOutput
withFileType fileType: AVFileType, for output: CaptureVideoDataOutput
) -> [String: Any]? {
return [:]
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
// Copyright 2013 The Flutter Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import AVFoundation
import CoreMedia
import Foundation
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Foundation should already be auto imported (@_exported) from UIKit import.

import UIKit

// Import Objective-C part of the implementation when SwiftPM is used.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you probably mentioned it before, but could you document both SwiftPM and non-SwiftPM (cocoapod?) behavior in the comment?

#if canImport(camera_avfoundation_objc)
import camera_avfoundation_objc
#endif

/// Factory block returning an FLTCaptureDevice.
/// Used in tests to inject a device into DefaultCamera.
typealias CaptureDeviceFactory = (String) -> FLTCaptureDevice
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this be "VdieoCaptureDeviceFactory"? (see below)


typealias AudioCaptureDeviceFactory = () -> FLTCaptureDevice

typealias CaptureSessionFactory = () -> FLTCaptureSession

typealias AssetWriterFactory = (URL, AVFileType, inout NSError?) -> FLTAssetWriter?
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you add label so it's more readable? You may have to put an underscore there _ assetURL: URL


typealias InputPixelBufferAdaptorFactory = (FLTAssetWriterInput, [String: Any]?) ->
FLTAssetWriterInputPixelBufferAdaptor

/// Determines the video dimensions (width and height) for a given capture device format.
/// Used in tests to mock CMVideoFormatDescriptionGetDimensions.
typealias VideoDimensionsForFormat = (FLTCaptureDeviceFormat) -> CMVideoDimensions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: from this naming it's hard to tell it's a typealias for a closure. It sounds like a method name but its first letter is uppercase.
Maybe a clearer name VideoDimensionsConverter?


/// A configuration object that centralizes dependencies for `DefaultCamera`.
class FLTCamConfiguration {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we call it CameraConfiguration (or not yet? )

var mediaSettings: FCPPlatformMediaSettings
var mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper
var captureSessionQueue: DispatchQueue
var videoCaptureSession: FLTCaptureSession
var audioCaptureSession: FLTCaptureSession
var captureDeviceFactory: CaptureDeviceFactory
let audioCaptureDeviceFactory: AudioCaptureDeviceFactory
let captureDeviceInputFactory: FLTCaptureDeviceInputFactory
var assetWriterFactory: AssetWriterFactory
var inputPixelBufferAdaptorFactory: InputPixelBufferAdaptorFactory
var videoDimensionsForFormat: VideoDimensionsForFormat
var deviceOrientationProvider: FLTDeviceOrientationProviding
let initialCameraName: String
var orientation: UIDeviceOrientation

init(
mediaSettings: FCPPlatformMediaSettings,
mediaSettingsWrapper: FLTCamMediaSettingsAVWrapper,
captureDeviceFactory: @escaping CaptureDeviceFactory,
audioCaptureDeviceFactory: @escaping AudioCaptureDeviceFactory,
captureSessionFactory: @escaping CaptureSessionFactory,
captureSessionQueue: DispatchQueue,
captureDeviceInputFactory: FLTCaptureDeviceInputFactory,
initialCameraName: String
) {
self.mediaSettings = mediaSettings
self.mediaSettingsWrapper = mediaSettingsWrapper
self.captureDeviceFactory = captureDeviceFactory
self.audioCaptureDeviceFactory = audioCaptureDeviceFactory
self.captureSessionQueue = captureSessionQueue
self.videoCaptureSession = captureSessionFactory()
self.audioCaptureSession = captureSessionFactory()
self.captureDeviceInputFactory = captureDeviceInputFactory
self.initialCameraName = initialCameraName
self.orientation = UIDevice.current.orientation
self.deviceOrientationProvider = FLTDefaultDeviceOrientationProvider()

self.videoDimensionsForFormat = { format in
return CMVideoFormatDescriptionGetDimensions(format.formatDescription)
}

self.assetWriterFactory = { url, fileType, error in
return FLTDefaultAssetWriter(url: url, fileType: fileType, error: &error)
}

self.inputPixelBufferAdaptorFactory = { assetWriterInput, sourcePixelBufferAttributes in
let adaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: assetWriterInput.input,
sourcePixelBufferAttributes: sourcePixelBufferAttributes
)
return FLTDefaultAssetWriterInputPixelBufferAdaptor(adaptor: adaptor)
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
// Copyright 2013 The Flutter Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import AVFoundation
import Foundation

// Import Objective-C part of the implementation when SwiftPM is used.
#if canImport(camera_avfoundation_objc)
import camera_avfoundation_objc
#endif

/// A protocol which is a direct passthrough to `AVCaptureOutput`. It exists to allow mocking
/// `AVCaptureOutput` in tests.
protocol CaptureOutput {
/// Returns a connection with the specified media type, or nil if no such connection exists.
func connection(withMediaType mediaType: AVMediaType) -> FLTCaptureConnection?
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

connection(with mediaType...)?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh is it an apple api? if so, maybe createConnection(with mediaType)?

}

/// A protocol which is a direct passthrough to `AVCaptureVideoDataOutput`. It exists to allow
/// mocking `AVCaptureVideoDataOutput` in tests.
protocol CaptureVideoDataOutput: CaptureOutput {
/// The underlying instance of `AVCaptureVideoDataOutput`.
var avOutput: AVCaptureVideoDataOutput { get }

/// Corresponds to the `alwaysDiscardsLateVideoFrames` property of `AVCaptureVideoDataOutput`
var alwaysDiscardsLateVideoFrames: Bool { get set }

/// Corresponds to the `videoSettings` property of `AVCaptureVideoDataOutput`
var videoSettings: [String: Any]! { get set }

/// Corresponds to the `setSampleBufferDelegate` method of `AVCaptureVideoDataOutput`
func setSampleBufferDelegate(
_ sampleBufferDelegate: AVCaptureVideoDataOutputSampleBufferDelegate?,
queue sampleBufferCallbackQueue: DispatchQueue?
)
}

extension AVCaptureVideoDataOutput: CaptureVideoDataOutput {
var avOutput: AVCaptureVideoDataOutput {
return self
}

func connection(withMediaType mediaType: AVMediaType) -> FLTCaptureConnection? {
guard let connection = connection(with: mediaType) else { return nil }
return FLTDefaultCaptureConnection(connection: connection)
}
}

/// A protocol which is a direct passthrough to `AVCapturePhotoOutput`. It exists to allow mocking
/// `AVCapturePhotoOutput` in tests.
protocol CapturePhotoOutput: CaptureOutput {
/// The underlying instance of `AVCapturePhotoOutput`.
var avOutput: AVCapturePhotoOutput { get }

/// Corresponds to the `availablePhotoCodecTypes` property of `AVCapturePhotoOutput`
var availablePhotoCodecTypes: [AVVideoCodecType] { get }

/// Corresponds to the `isHighResolutionCaptureEnabled` property of `AVCapturePhotoOutput`
var isHighResolutionCaptureEnabled: Bool { get set }

/// Corresponds to the `supportedFlashModes` property of `AVCapturePhotoOutput`
var supportedFlashModes: [AVCaptureDevice.FlashMode] { get }

/// Corresponds to the `capturePhotoWithSettings` method of `AVCapturePhotoOutput`
func capturePhoto(with settings: AVCapturePhotoSettings, delegate: AVCapturePhotoCaptureDelegate)
}

/// Make AVCapturePhotoOutput conform to FLTCapturePhotoOutput protocol directly
extension AVCapturePhotoOutput: CapturePhotoOutput {
var avOutput: AVCapturePhotoOutput {
return self
}

func connection(withMediaType mediaType: AVMediaType) -> FLTCaptureConnection? {
guard let connection = connection(with: mediaType) else { return nil }
return FLTDefaultCaptureConnection(connection: connection)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@ final class DefaultCamera: NSObject, Camera {

private(set) var captureDevice: FLTCaptureDevice
// Setter exposed for tests.
var captureVideoOutput: FLTCaptureVideoDataOutput
var captureVideoOutput: CaptureVideoDataOutput
// Setter exposed for tests.
var capturePhotoOutput: FLTCapturePhotoOutput
var capturePhotoOutput: CapturePhotoOutput
private var captureVideoInput: FLTCaptureInput

private var videoWriter: FLTAssetWriter?
Expand Down Expand Up @@ -135,13 +135,12 @@ final class DefaultCamera: NSObject, Camera {
captureDevice: FLTCaptureDevice,
videoFormat: FourCharCode,
captureDeviceInputFactory: FLTCaptureDeviceInputFactory
) throws -> (FLTCaptureInput, FLTCaptureVideoDataOutput, AVCaptureConnection) {
) throws -> (FLTCaptureInput, CaptureVideoDataOutput, AVCaptureConnection) {
// Setup video capture input.
let captureVideoInput = try captureDeviceInputFactory.deviceInput(with: captureDevice)

// Setup video capture output.
let captureVideoOutput = FLTDefaultCaptureVideoDataOutput(
captureVideoOutput: AVCaptureVideoDataOutput())
let captureVideoOutput = AVCaptureVideoDataOutput()
captureVideoOutput.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: videoFormat
]
Expand Down Expand Up @@ -176,8 +175,8 @@ final class DefaultCamera: NSObject, Camera {
captureDevice = captureDeviceFactory(configuration.initialCameraName)
flashMode = captureDevice.hasFlash ? .auto : .off

capturePhotoOutput = FLTDefaultCapturePhotoOutput(photoOutput: AVCapturePhotoOutput())
capturePhotoOutput.highResolutionCaptureEnabled = true
capturePhotoOutput = AVCapturePhotoOutput()
capturePhotoOutput.isHighResolutionCaptureEnabled = true

videoCaptureSession.automaticallyConfiguresApplicationAudioSession = false
audioCaptureSession.automaticallyConfiguresApplicationAudioSession = false
Expand Down Expand Up @@ -748,7 +747,7 @@ final class DefaultCamera: NSObject, Camera {
}

private func updateOrientation(
_ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: FLTCaptureOutput
_ orientation: UIDeviceOrientation, forCaptureOutput captureOutput: CaptureOutput
) {
if let connection = captureOutput.connection(withMediaType: .video),
connection.isVideoOrientationSupported
Expand Down Expand Up @@ -989,7 +988,7 @@ final class DefaultCamera: NSObject, Camera {
return
}
let avFlashMode = FCPGetAVCaptureFlashModeForPigeonFlashMode(mode)
guard capturePhotoOutput.supportedFlashModes.contains(NSNumber(value: avFlashMode.rawValue))
guard capturePhotoOutput.supportedFlashModes.contains(avFlashMode)
else {
completion(
FlutterError(
Expand Down
Loading