2323
2424@interface CameraTextureDelegate : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
2525{
26- @public AVCaptureVideoOrientation videoOrientation ;
26+ @public AVCaptureVideoOrientation VideoOrientation ;
2727
2828 CVMetalTextureCacheRef textureCache;
29- bool orientationUpdated;
3029}
3130
3231- (id )init : (CVMetalTextureCacheRef)textureCache ;
@@ -52,12 +51,31 @@ static bool isPixelFormatSupported(uint32_t pixelFormat)
5251 vector_float2 uv;
5352 };
5453
55- constexpr Vertex vertices[] = {
56- // 2D positions, UV
54+ // The shader will use different UV coordinates to rotate the video from its natural sensor orientation to the
55+ // UI orientation. The format is 2D posistions to UV coordinates.
56+ constexpr Vertex vertices_portrait[] = {
5757 {{-1 , -1 }, {0 , 1 }},
58+ {{-1 , 1 }, {1 , 1 }},
59+ {{1 , -1 }, {0 , 0 }},
60+ {{1 , 1 }, {1 , 0 }},
61+ };
62+ constexpr Vertex vertices_landscape_right[] = {
63+ {{-1 , -1 }, {0 , 0 }},
64+ {{-1 , 1 }, {0 , 1 }},
65+ {{1 , -1 }, {1 , 0 }},
66+ {{1 , 1 }, {1 , 1 }},
67+ };
68+ constexpr Vertex vertices_landscape_left[] = {
69+ {{-1 , -1 }, {1 , 1 }},
70+ {{-1 , 1 }, {1 , 0 }},
71+ {{1 , -1 }, {0 , 1 }},
72+ {{1 , 1 }, {0 , 0 }},
73+ };
74+ constexpr Vertex vertices_portrait_upsideddown[] = {
75+ {{-1 , -1 }, {1 , 0 }},
5876 {{-1 , 1 }, {0 , 0 }},
5977 {{1 , -1 }, {1 , 1 }},
60- {{1 , 1 }, {1 , 0 }},
78+ {{1 , 1 }, {0 , 1 }},
6179 };
6280
6381 constexpr char shaderSource[] = R"(
@@ -173,6 +191,7 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
173191 bool refreshBgfxHandle{true };
174192
175193 arcana::background_dispatcher<32 > cameraSessionDispatcher{};
194+ std::shared_ptr<arcana::cancellation_source> cancellationSource{std::make_shared<arcana::cancellation_source>()};
176195 };
177196
178197 std::vector<CameraDevice> CameraDevice::GetCameraDevices (Napi::Env env)
@@ -374,8 +393,8 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
374393 CameraDevice::CameraDimensions cameraDimensions{static_cast <uint32_t >(dimensions.width ), static_cast <uint32_t >(dimensions.height )};
375394
376395 // For portrait orientations swap the height and width of the video format dimensions.
377- if (m_impl->cameraTextureDelegate ->videoOrientation == AVCaptureVideoOrientationPortrait
378- || m_impl->cameraTextureDelegate ->videoOrientation == AVCaptureVideoOrientationPortraitUpsideDown)
396+ if (m_impl->cameraTextureDelegate ->VideoOrientation == AVCaptureVideoOrientationPortrait
397+ || m_impl->cameraTextureDelegate ->VideoOrientation == AVCaptureVideoOrientationPortraitUpsideDown)
379398 {
380399 std::swap (cameraDimensions.width , cameraDimensions.height );
381400 }
@@ -423,7 +442,9 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
423442
424443 void CameraDevice::UpdateCameraTexture (bgfx::TextureHandle textureHandle)
425444 {
426- arcana::make_task (m_impl->deviceContext ->BeforeRenderScheduler (), arcana::cancellation::none (), [this, textureHandle] {
445+ // Hook into AfterRender to copy over the texture, ensuring that the textureHandle has already been initialized by bgfx.
446+ // Capture the cancellation token so that the shared pointer is kept alive when arcana checks internally for cancellation.
447+ arcana::make_task (m_impl->deviceContext ->AfterRenderScheduler (), *m_impl->cancellationSource , [this, textureHandle, cancellationSource{m_impl->cancellationSource }] {
427448 id <MTLTexture > textureY{};
428449 id <MTLTexture > textureCbCr{};
429450 int64_t width{0 };
@@ -432,8 +453,22 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
432453 @synchronized (m_impl->cameraTextureDelegate ) {
433454 textureY = [m_impl->cameraTextureDelegate getCameraTextureY ];
434455 textureCbCr = [m_impl->cameraTextureDelegate getCameraTextureCbCr ];
435- width = [textureY width ];
436- height = [textureY height ];
456+
457+ switch (m_impl->cameraTextureDelegate ->VideoOrientation )
458+ {
459+ case AVCaptureVideoOrientationLandscapeRight:
460+ case AVCaptureVideoOrientationLandscapeLeft:
461+ width = [textureY width ];
462+ height = [textureY height ];
463+ break ;
464+ case AVCaptureVideoOrientationPortrait:
465+ case AVCaptureVideoOrientationPortraitUpsideDown:
466+ // In portrait orientation the camera sensor is rotated 90 degrees so the width and height should be swapped
467+ width = [textureY height ];
468+ height = [textureY width ];
469+ break ;
470+ }
471+
437472 }
438473
439474 // Skip processing this frame if width and height are invalid.
@@ -447,17 +482,18 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
447482 MTLTextureDescriptor * textureDescriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat: MTLPixelFormatRGBA8Unorm width: width height: height mipmapped: NO ];
448483 textureDescriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead ;
449484 m_impl->textureRGBA = [m_impl->metalDevice newTextureWithDescriptor: textureDescriptor];
450- bgfx::overrideInternal (textureHandle, reinterpret_cast <uintptr_t >(m_impl->textureRGBA ));
451485 m_impl->cameraDimensions .width = static_cast <uint32_t >(width);
452486 m_impl->cameraDimensions .height = static_cast <uint32_t >(height);
453- m_impl->refreshBgfxHandle = false ;
454- } else if (m_impl->refreshBgfxHandle ) {
455- // On texture re-use across sessions set the bgfx texture handle.
456- bgfx::overrideInternal (textureHandle, reinterpret_cast <uintptr_t >(m_impl->textureRGBA ));
457- m_impl->refreshBgfxHandle = false ;
487+ // Setting up the bgfx texture may fail if the textureHandle hasn't been initialized in a bgfx::frame call yet, if so try agin on
488+ // the next frame to override it.
489+ m_impl->refreshBgfxHandle = bgfx::overrideInternal (textureHandle, reinterpret_cast <uintptr_t >(m_impl->textureRGBA )) == 0 ;
490+ }
491+ else if (m_impl->refreshBgfxHandle )
492+ {
493+ m_impl->refreshBgfxHandle = bgfx::overrideInternal (textureHandle, reinterpret_cast <uintptr_t >(m_impl->textureRGBA )) == 0 ;
458494 }
459495
460- if (textureY != nil && textureCbCr != nil && m_impl->textureRGBA != nil )
496+ if (textureY != nil && textureCbCr != nil && m_impl->textureRGBA != nil && !m_impl-> refreshBgfxHandle )
461497 {
462498 m_impl->currentCommandBuffer = [m_impl->commandQueue commandBuffer ];
463499 m_impl->currentCommandBuffer .label = @" NativeCameraCommandBuffer" ;
@@ -476,8 +512,38 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
476512 // Set the shader pipeline.
477513 [renderEncoder setRenderPipelineState: m_impl->cameraPipelineState];
478514
479- // Set the vertex data.
480- [renderEncoder setVertexBytes: vertices length: sizeof (vertices) atIndex: 0 ];
515+ // Set the vertex & UV data based on current orientation
516+ switch (m_impl->cameraTextureDelegate ->VideoOrientation )
517+ {
518+ case AVCaptureVideoOrientationLandscapeLeft:
519+ if (m_impl->avDevice .position == AVCaptureDevicePositionFront)
520+ {
521+ // The front camera sensor is oriented 180 out of sync from the rear sensor on iOS devices. Swap landscape orientations.
522+ [renderEncoder setVertexBytes: vertices_landscape_right length: sizeof (vertices_landscape_right) atIndex: 0 ];
523+ }
524+ else
525+ {
526+ [renderEncoder setVertexBytes: vertices_landscape_left length: sizeof (vertices_landscape_left) atIndex: 0 ];
527+ }
528+ break ;
529+ case AVCaptureVideoOrientationPortrait:
530+ [renderEncoder setVertexBytes: vertices_portrait length: sizeof (vertices_portrait) atIndex: 0 ];
531+ break ;
532+ case AVCaptureVideoOrientationPortraitUpsideDown:
533+ [renderEncoder setVertexBytes: vertices_portrait_upsideddown length: sizeof (vertices_portrait_upsideddown) atIndex: 0 ];
534+ break ;
535+ case AVCaptureVideoOrientationLandscapeRight:
536+ if (m_impl->avDevice .position == AVCaptureDevicePositionFront)
537+ {
538+ // The front camera sensor is oriented 180 out of sync from the rear sensor on iOS devices. Swap landscape orientations.
539+ [renderEncoder setVertexBytes: vertices_landscape_left length: sizeof (vertices_landscape_left) atIndex: 0 ];
540+ }
541+ else
542+ {
543+ [renderEncoder setVertexBytes: vertices_landscape_right length: sizeof (vertices_landscape_right) atIndex: 0 ];
544+ }
545+ break ;
546+ }
481547
482548 // Set the textures.
483549 [renderEncoder setFragmentTexture: textureY atIndex: 1 ];
@@ -513,29 +579,37 @@ fragment float4 fragmentShader(RasterizerData in [[stage_in]],
513579 // No action is required.
514580 return ;
515581 }
582+
583+ // Cancel any pending async operations
584+ m_impl->cancellationSource ->cancel ();
516585
517- // Stop collecting frames, release camera texture delegate.
518- [m_impl->cameraTextureDelegate reset ];
519- m_impl->cameraTextureDelegate = nil ;
520586
521587 // Complete any running command buffers before destroying the cache.
522588 if (m_impl->currentCommandBuffer != nil ) {
523589 [m_impl->currentCommandBuffer waitUntilCompleted ];
524590 }
525591
526- // Free the texture cache.
527- if (m_impl->textureCache )
528- {
529- CVMetalTextureCacheFlush (m_impl->textureCache , 0 );
530- CFRelease (m_impl->textureCache );
531- m_impl->textureCache = nil ;
532- }
533-
534592 if (m_impl->avCaptureSession != nil ) {
535- // Stopping the capture session is a synchronous (and long running call). Complete the request on the dispatcher thread
536- // instead of the main thread.
537- arcana::make_task (arcana::threadpool_scheduler, arcana::cancellation::none (), [avCaptureSession = m_impl->avCaptureSession](){
593+ // Stopping the capture session is a synchronous call that requires marshalling to the main thread. Calling it from background thread can lead
594+ // to a deadlock where Babylon is waiting for the frame to finish render on the main thread and AVCaptureSession::stopRunning is waiting
595+ // for the main thread to free up while blocking the current frame from rendering.
596+ //
597+ // Capturing textureRGBA, textureDelegate, and textureCache is done here because it's used in bgfx::overrideInternal but due to ARC being enabled in this project the lifetime of the texture
598+ // needs to be maintained until after the render pass. Otherwise bgfx will try to access a destroyed texture handle during the render pass.
599+ arcana::make_task (m_impl->deviceContext ->AfterRenderScheduler (), arcana::cancellation::none (),
600+ [avCaptureSession = m_impl->avCaptureSession, textureRGBA = m_impl->textureRGBA, textureDelegate = m_impl->cameraTextureDelegate, textureCache = m_impl->textureCache]
601+ {
538602 [avCaptureSession stopRunning ];
603+
604+ // Stop collecting frames, release camera texture delegate.
605+ [textureDelegate reset ];
606+
607+ // Free the texture cache.
608+ if (textureCache)
609+ {
610+ CVMetalTextureCacheFlush (textureCache, 0 );
611+ CFRelease (textureCache);
612+ }
539613 });
540614 }
541615 }
@@ -553,11 +627,9 @@ - (id)init:(CVMetalTextureCacheRef)textureCache
553627#if (TARGET_OS_IPHONE)
554628 [[NSNotificationCenter defaultCenter ]addObserver:self selector: @selector (OrientationDidChange: ) name: UIDeviceOrientationDidChangeNotification object: nil ];
555629 [self updateOrientation ];
556- self->orientationUpdated = true ;
557630#else
558- // Orientation not supported on non-iOS devices.
559- self->videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
560- self->orientationUpdated = false ;
631+ // Orientation not supported on non-iOS devices. LandscapeLeft assumes the video is already in the correct orientation.
632+ self->VideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
561633#endif
562634
563635 return self;
@@ -613,31 +685,25 @@ - (void)updateOrientation {
613685 }
614686#endif
615687
616- // Determine device orienation, and adjust output to match.
617- AVCaptureVideoOrientation newVideoOrientation{AVCaptureVideoOrientationPortraitUpsideDown};
688+ // Convert from UIInterfaceOrientation to AVCaptureVideoOrientation. The conversion is only used becauase
689+ // MacOS doesn't have access to UIInterfaceOrientation but it does have AVCaptureVideoOrientation which
690+ // lets us share more code without ifdefs
618691 switch (orientation)
619692 {
620693 case UIInterfaceOrientationUnknown:
621- return ;
622- case UIInterfaceOrientationPortrait:
623- newVideoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
624- break ;
625- case UIInterfaceOrientationPortraitUpsideDown:
626- newVideoOrientation = AVCaptureVideoOrientationPortrait;
627- break ;
628694 case UIInterfaceOrientationLandscapeLeft:
629- newVideoOrientation = AVCaptureVideoOrientationLandscapeRight ;
695+ self-> VideoOrientation = AVCaptureVideoOrientationLandscapeLeft ;
630696 break ;
631697 case UIInterfaceOrientationLandscapeRight:
632- newVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
698+ self->VideoOrientation = AVCaptureVideoOrientationLandscapeRight;
699+ break ;
700+ case UIInterfaceOrientationPortrait:
701+ self->VideoOrientation = AVCaptureVideoOrientationPortrait;
702+ break ;
703+ case UIInterfaceOrientationPortraitUpsideDown:
704+ self->VideoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
633705 break ;
634706 }
635-
636- if (newVideoOrientation != self->videoOrientation )
637- {
638- self->videoOrientation = newVideoOrientation;
639- self->orientationUpdated = true ;
640- }
641707}
642708
643709-(void )OrientationDidChange : (NSNotification *)notification
@@ -648,13 +714,6 @@ -(void)OrientationDidChange:(NSNotification*)notification
648714
649715- (void )captureOutput : (AVCaptureOutput*)__unused captureOutput didOutputSampleBuffer : (CMSampleBufferRef)sampleBuffer fromConnection : (AVCaptureConnection*) connection
650716{
651- if (self->orientationUpdated )
652- {
653- connection.videoMirrored = true ;
654- connection.videoOrientation = self->videoOrientation ;
655- self->orientationUpdated = false ;
656- }
657-
658717 CVPixelBufferRef pixelBuffer{CMSampleBufferGetImageBuffer (sampleBuffer)};
659718
660719 // Update both metal textures used by the renderer to display the camera image.
0 commit comments