Skip to content

Commit 22a4ed0

Browse files
committed
fix(ios): working takePicture
1 parent 2224a03 commit 22a4ed0

File tree

7 files changed

+285
-23
lines changed

7 files changed

+285
-23
lines changed

packages/ui-cameraview/platforms/ios/src/NSCameraView.swift

Lines changed: 118 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,13 @@ struct RuntimeError: LocalizedError {
1919

2020
@objcMembers
2121
@objc(NSCameraView)
22-
public class NSCameraView: UIView, NextLevelVideoDelegate {
22+
public class NSCameraView: UIView, NextLevelVideoDelegate, NextLevelPhotoDelegate, NextLevelDelegate {
23+
2324
public weak var processingDelegate: ProcessRawVideoSampleBufferDelegate?
25+
public weak var videoDelegate: NSCameraViewVideoDelegate?
26+
public weak var photoDelegate: NSCameraViewPhotoDelegate?
2427
var nextLevel: NextLevel?
25-
28+
var captureModeCompletionHandler: (()->Void)?
2629
override init(frame: CGRect) {
2730
super.init(frame: frame )
2831
commonInit()
@@ -34,6 +37,8 @@ public class NSCameraView: UIView, NextLevelVideoDelegate {
3437
}
3538
deinit {
3639
self.processingDelegate = nil
40+
self.videoDelegate = nil
41+
self.photoDelegate = nil
3742
}
3843

3944
public override var frame: CGRect {
@@ -75,12 +80,18 @@ public class NSCameraView: UIView, NextLevelVideoDelegate {
7580
// nextLevel.delegate = self
7681
// nextLevel.deviceDelegate = self
7782
// nextLevel.flashDelegate = self
83+
nextLevel.delegate = self
7884
nextLevel.videoDelegate = self
79-
// nextLevel.photoDelegate = self
85+
nextLevel.photoDelegate = self
8086
// nextLevel.metadataObjectsDelegate = self
8187

8288
nextLevel.automaticallyUpdatesDeviceOrientation = true
8389

90+
// photo configuration
91+
nextLevel.photoConfiguration.preset = AVCaptureSession.Preset.photo
92+
let supportsHEVC = AVAssetExportSession.allExportPresets().contains(AVAssetExportPresetHEVCHighestQuality)
93+
nextLevel.photoConfiguration.codec = supportsHEVC ? .hevc : .jpeg
94+
8495
// video configuration
8596
nextLevel.videoConfiguration.preset = AVCaptureSession.Preset.hd1280x720
8697
nextLevel.videoConfiguration.bitRate = 5500000
@@ -97,7 +108,7 @@ public class NSCameraView: UIView, NextLevelVideoDelegate {
97108
}
98109
}
99110

100-
func startPreview() throws {
111+
public func startPreview() throws {
101112
do {
102113
try self.nextLevel?.start()
103114
} catch {
@@ -108,13 +119,93 @@ public class NSCameraView: UIView, NextLevelVideoDelegate {
108119

109120
}
110121

111-
func stopPreview() {
122+
public func stopPreview() {
112123
self.nextLevel?.stop()
113124
}
114-
func toggleCamera() {
125+
public func toggleCamera() {
115126
self.nextLevel?.flipCaptureDevicePosition()
116127
}
128+
public func focusAtAdjustedPointOfInterest(_ adjustedPoint: CGPoint){
129+
self.nextLevel?.focusAtAdjustedPointOfInterest(adjustedPoint: adjustedPoint)
130+
}
131+
132+
public var canCapturePhoto: Bool {
133+
get {
134+
return self.nextLevel?.canCapturePhoto ?? false
135+
}
136+
}
137+
public var canCaptureVideo: Bool {
138+
get {
139+
return self.nextLevel?.canCaptureVideo ?? false
140+
}
141+
}
142+
public func capturePhoto() {
143+
if let nextLevel = self.nextLevel , self.canCapturePhoto {
144+
if ( nextLevel.captureMode == NextLevelCaptureMode.photo) {
145+
nextLevel.capturePhoto()
146+
} else {
147+
captureModeCompletionHandler = {
148+
nextLevel.capturePhoto()
149+
}
150+
nextLevel.captureMode = NextLevelCaptureMode.photo
151+
}
152+
}
153+
}
154+
public func capturePhotoFromVideo() {
155+
if let nextLevel = self.nextLevel , self.canCaptureVideo {
156+
if ( nextLevel.captureMode == NextLevelCaptureMode.videoWithoutAudio || nextLevel.captureMode == NextLevelCaptureMode.video) {
157+
nextLevel.capturePhotoFromVideo()
158+
} else {
159+
captureModeCompletionHandler = {
160+
nextLevel.capturePhotoFromVideo()
161+
}
162+
nextLevel.captureMode = NextLevelCaptureMode.videoWithoutAudio
163+
}
164+
}
165+
}
166+
167+
// MARK: NextLevelDelegate
168+
169+
public func nextLevel(_ nextLevel: NextLevel, didUpdateVideoConfiguration videoConfiguration: NextLevelVideoConfiguration) {
170+
171+
}
172+
173+
public func nextLevel(_ nextLevel: NextLevel, didUpdateAudioConfiguration audioConfiguration: NextLevelAudioConfiguration) {
174+
175+
}
176+
177+
public func nextLevelSessionWillStart(_ nextLevel: NextLevel) {
178+
179+
}
180+
181+
public func nextLevelSessionDidStart(_ nextLevel: NextLevel) {
182+
183+
}
184+
185+
public func nextLevelSessionDidStop(_ nextLevel: NextLevel) {
186+
187+
}
188+
189+
public func nextLevelSessionWasInterrupted(_ nextLevel: NextLevel) {
190+
191+
}
192+
193+
public func nextLevelSessionInterruptionEnded(_ nextLevel: NextLevel) {
194+
195+
}
196+
197+
public func nextLevelCaptureModeWillChange(_ nextLevel: NextLevel) {
198+
199+
}
200+
201+
202+
203+
public func nextLevelCaptureModeDidChange(_ nextLevel: NextLevel) {
204+
captureModeCompletionHandler?()
205+
}
117206

207+
// MARK: NextLevelPhotoDelegate
208+
118209
public func nextLevel(_ nextLevel: NextLevel, willProcessRawVideoSampleBuffer sampleBuffer: CMSampleBuffer, onQueue queue: DispatchQueue) {
119210
self.processingDelegate?.cameraView(self, willProcessRawVideoSampleBuffer: sampleBuffer, onQueue: queue)
120211
}
@@ -176,6 +267,27 @@ public class NSCameraView: UIView, NextLevelVideoDelegate {
176267
}
177268

178269
public func nextLevel(_ nextLevel: NextLevel, didCompletePhotoCaptureFromVideoFrame photoDict: [String : Any]?) {
270+
self.videoDelegate?.cameraView(self, didCompletePhotoCaptureFromVideoFrame: photoDict)
271+
}
272+
273+
// MARK: NextLevelPhotoDelegate
274+
275+
public func nextLevel(_ nextLevel: NextLevel, output: AVCapturePhotoOutput, willBeginCaptureFor resolvedSettings: AVCaptureResolvedPhotoSettings, photoConfiguration: NextLevelPhotoConfiguration) {
179276

180277
}
278+
279+
public func nextLevel(_ nextLevel: NextLevel, output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings, photoConfiguration: NextLevelPhotoConfiguration) {
280+
}
281+
282+
public func nextLevel(_ nextLevel: NextLevel, output: AVCapturePhotoOutput, didCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings, photoConfiguration: NextLevelPhotoConfiguration) {
283+
}
284+
285+
public func nextLevel(_ nextLevel: NextLevel, didFinishProcessingPhoto photo: AVCapturePhoto, photoDict: [String : Any], photoConfiguration: NextLevelPhotoConfiguration) {
286+
self.photoDelegate?.cameraView(self, didFinishProcessingPhoto: photo, photoDict: photoDict, photoConfiguration: NSCameraViewPhotoConfiguration(configuration: photoConfiguration))
287+
}
288+
289+
public func nextLevelDidCompletePhotoCapture(_ nextLevel: NextLevel) {
290+
291+
}
292+
181293
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
2+
import Foundation
3+
import NextLevel
4+
5+
@objcMembers
6+
@objc(NSCameraViewPhotoConfiguration)
7+
public class NSCameraViewPhotoConfiguration: NSObject {
8+
var configuration: NextLevelPhotoConfiguration
9+
required init(configuration: NextLevelPhotoConfiguration) {
10+
self.configuration = configuration
11+
super.init()
12+
}
13+
}
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
import Foundation
2+
import AVFoundation
3+
4+
@objc(NSCameraViewPhotoDelegate)
5+
public protocol NSCameraViewPhotoDelegate: AnyObject {
6+
func cameraView(_ cameraView: NSCameraView, didCapturePhotoWithConfiguration photoConfiguration: NSCameraViewPhotoConfiguration)
7+
func cameraView(_ cameraView: NSCameraView, didFinishProcessingPhoto photo: AVCapturePhoto, photoDict: [String : Any]?, photoConfiguration: NSCameraViewPhotoConfiguration)
8+
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
import Foundation
2+
3+
@objc(NSCameraViewVideoDelegate)
4+
public protocol NSCameraViewVideoDelegate: AnyObject {
5+
func cameraView(_ cameraView: NSCameraView, didCompletePhotoCaptureFromVideoFrame photoDict: [String : Any]?)
6+
}

src/ui-cameraview/index.ios.ts

Lines changed: 94 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,8 @@ import { CameraViewBase, autoFocusProperty, flashModeProperty } from './index.co
55
class ProcessRawVideoSampleBufferDelegateImpl extends NSObject implements ProcessRawVideoSampleBufferDelegate {
66
cameraViewRenderToCustomContextWithImageBufferOnQueue(cameraView: NSCameraView, imageBuffer: any, queue: NSObject): void {}
77
cameraViewWillProcessRawVideoSampleBufferOnQueue(cameraView: NSCameraView, sampleBuffer: any, queue: NSObject): void {
8-
try {
9-
const owner = this._owner?.get();
10-
owner?.notify({ eventName: 'frame', object: owner, cameraView, sampleBuffer, queue });
11-
} catch (err) {
12-
console.log('process error', err, err.stack);
13-
}
8+
const owner = this._owner?.get();
9+
this._owner?.get()?.notify({ eventName: 'frame', object: owner, cameraView, sampleBuffer, queue });
1410
}
1511
_owner: WeakRef<CameraView>;
1612
public static ObjCProtocols = [ProcessRawVideoSampleBufferDelegate];
@@ -21,15 +17,79 @@ class ProcessRawVideoSampleBufferDelegateImpl extends NSObject implements Proces
2117
return delegate;
2218
}
2319
}
20+
@NativeClass
21+
class NSCameraViewVideoDelegateImpl extends NSObject implements NSCameraViewVideoDelegate {
22+
cameraViewDidCompletePhotoCaptureFromVideoFrame(cameraView: NSCameraView, photoDict: any): void {
23+
this._owner?.get()?.cameraViewDidCompletePhotoCaptureFromVideoFrame(photoDict);
24+
}
25+
26+
_owner: WeakRef<CameraView>;
27+
public static ObjCProtocols = [NSCameraViewVideoDelegate];
28+
29+
static initWithOwner(owner: CameraView) {
30+
const delegate = NSCameraViewVideoDelegateImpl.new() as NSCameraViewVideoDelegateImpl;
31+
delegate._owner = new WeakRef(owner);
32+
return delegate;
33+
}
34+
}
35+
@NativeClass
36+
class NSCameraViewPhotoDelegateImpl extends NSObject implements NSCameraViewPhotoDelegate {
37+
cameraViewDidCapturePhotoWithConfiguration(cameraView: NSCameraView, photoConfiguration: any): void {}
38+
cameraViewDidFinishProcessingPhotoPhotoDictPhotoConfiguration(
39+
cameraView: NSCameraView,
40+
photo: AVCapturePhoto,
41+
photoDict: NSDictionary<string, any>,
42+
photoConfiguration: NSCameraViewPhotoConfiguration
43+
) {
44+
this._owner?.get()?.cameraViewDidFinishProcessingPhotoPhotoDictPhotoConfiguration(photo, photoDict);
45+
}
46+
47+
_owner: WeakRef<CameraView>;
48+
public static ObjCProtocols = [NSCameraViewPhotoDelegate];
49+
50+
static initWithOwner(owner: CameraView) {
51+
const delegate = NSCameraViewPhotoDelegateImpl.new() as NSCameraViewPhotoDelegateImpl;
52+
delegate._owner = new WeakRef(owner);
53+
return delegate;
54+
}
55+
}
2456

2557
export class CameraView extends CameraViewBase {
58+
cameraViewDidFinishProcessingPhotoPhotoDictPhotoConfiguration(photo: AVCapturePhoto, photoDict: any) {
59+
const cgImage = photo.CGImageRepresentation();
60+
const orientation = photo.metadata.objectForKey(kCGImagePropertyOrientation);
61+
const image = UIImage.imageWithCGImageScaleOrientation(cgImage, 1, orientation);
62+
this.photoCaptureListener.forEach((c) => c(image, photoDict));
63+
}
64+
cameraViewDidProcessPhotoCaptureWithPhotoConfiguration(photoDict: any) {}
65+
videoCaptureListener = new Set<Function>();
66+
photoCaptureListener = new Set<Function>();
67+
cameraViewDidCompletePhotoCaptureFromVideoFrame(photoDict: any) {
68+
this.videoCaptureListener.forEach((c) => c(photoDict));
69+
}
2670
nativeViewProtected: NSCameraView;
2771
_processor: ProcessRawVideoSampleBufferDelegate;
72+
videoDelegate: NSCameraViewVideoDelegate;
73+
photoDelegate: NSCameraViewPhotoDelegate;
2874
createNativeView() {
2975
return NSCameraView.alloc().initWithFrame(CGRectZero);
3076
}
3177
private _frameChangeCount = 0;
3278

79+
initNativeView(): void {
80+
super.initNativeView();
81+
const nativeView = this.nativeViewProtected;
82+
nativeView.photoDelegate = this.photoDelegate = NSCameraViewPhotoDelegateImpl.initWithOwner(this);
83+
nativeView.videoDelegate = this.videoDelegate = NSCameraViewVideoDelegateImpl.initWithOwner(this);
84+
}
85+
disposeNativeView() {
86+
this.stopPreview();
87+
this.detachProcessor();
88+
const nativeView = this.nativeViewProtected;
89+
nativeView.videoDelegate = this.videoDelegate = null;
90+
nativeView.photoDelegate = this.photoDelegate = null;
91+
super.disposeNativeView();
92+
}
3393
get processor() {
3494
return this._processor;
3595
}
@@ -88,12 +148,6 @@ export class CameraView extends CameraViewBase {
88148
this.nativeViewProtected.processingDelegate = null;
89149
super.onUnloaded();
90150
}
91-
disposeNativeView() {
92-
this.stopPreview();
93-
this.detachProcessor();
94-
95-
super.disposeNativeView();
96-
}
97151
previewStarted = false;
98152
startPreview() {
99153
if (this.previewStarted) {
@@ -110,15 +164,39 @@ export class CameraView extends CameraViewBase {
110164
this.nativeViewProtected?.stopPreview();
111165
}
112166
focusAtPoint(x, y) {
113-
this.nativeViewProtected?.nextLevel?.focusAtAdjustedPointOfInterest(CGPointMake(x, y));
167+
this.nativeViewProtected?.focusAtAdjustedPointOfInterest(CGPointMake(x, y));
114168
}
115169
async takePicture(options: TakePictureOptions = {}) {
116-
throw new Error('Method not implemented.');
117-
// this.nativeViewProtected?.nextLevel?.capturePhoto();
170+
return new Promise((resolve, reject) => {
171+
try {
172+
// if (!this.nativeViewProtected.canCaptureVideo) {
173+
// return reject(new Error('this device cant capture photo: ' + this.nativeViewProtected.canCaptureVideo));
174+
// }
175+
// const onPhoto = (photoDict: NSDictionary<any, any>) => {
176+
// this.videoCaptureListener.delete(onPhoto);
177+
// const photoData = photoDict.objectForKey('NextLevelPhotoJPEGKey');
178+
// console.log('photoData', photoData, photoDict);
179+
// resolve({ image: new UIImage({ data: photoData }) });
180+
// };
181+
// this.videoCaptureListener.add(onPhoto);
182+
// console.log('capturePhotoFromVideo');
183+
// this.nativeViewProtected?.capturePhotoFromVideo();
184+
if (!this.nativeViewProtected.canCapturePhoto) {
185+
return reject(new Error('this device cant capture photo: ' + this.nativeViewProtected.canCapturePhoto));
186+
}
187+
const onPhoto = (image, photoDict: NSDictionary<any, any>) => {
188+
resolve({ image });
189+
};
190+
this.photoCaptureListener.add(onPhoto);
191+
this.nativeViewProtected?.capturePhoto();
192+
} catch (error) {
193+
reject(error);
194+
}
195+
});
118196
}
119197

120198
toggleCamera() {
121-
throw new Error('Method not implemented.');
199+
this.nativeViewProtected.toggleCamera();
122200
}
123201

124202
[flashModeProperty.setNative](value: string | number) {

src/ui-cameraview/references.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
/// <reference path="../../node_modules/@nativescript/types-ios/lib/ios/objc-x86_64/objc!AVFoundation.d.ts" />
2+
/// <reference path="../../node_modules/@nativescript/types-ios/lib/ios/objc-x86_64/objc!ImageIO.d.ts" />

0 commit comments

Comments
 (0)