Skip to content

Commit fba4511

Browse files
mrousavyhannojg
andauthored
feat: Use AVAssetWriterInputPixelBufferAdaptor directly, remove unnecessary extra CVPixelBufferPoolRef (#203)
Co-authored-by: Hanno J. Gödecke <[email protected]> Co-authored-by: Hanno J. Gödecke <[email protected]>
1 parent ad097f6 commit fba4511

File tree

5 files changed

+64
-56
lines changed

5 files changed

+64
-56
lines changed

package/android/libs/filament/include/gltfio/Animator.h

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,10 @@ class UTILS_PUBLIC Animator {
5959
void updateBoneMatrices();
6060

6161
/**
62-
* Updates the bone matrices of the specified instance using the state of this animatior.
63-
* This is useful if you have another instance that has the same skeleton as the asset of this animator,
64-
* and you wish to apply the same animation to those instances (e.g. clothing).
65-
*
66-
* NOTE: In most cases, you only need to use the updateBoneMatrices() method. This method is necessary
67-
* only when you need to synchronize animations across multiple instances with the same skeleton.
62+
* Updates the bone matrices of the specified instance using the state of this animation.
63+
* This is useful if you have other instances that have the same skeleton as the animator
64+
* from this asset, and you want those instances to be animated by the same animation (e.g. clothing).
65+
* Usually you don't need this and using updateBoneMatrices() is enough.
6866
*
6967
* @param instance The instance to update.
7068
*/

package/example/Shared/src/AnimationTransitionsRecording.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ const near = 0.1
2828
const far = 1000
2929

3030
const FPS = 60
31-
const DURATION = 3 // seconds
31+
const DURATION = 10 // seconds
3232

3333
function Renderer() {
3434
const { camera } = useFilamentContext()
@@ -154,7 +154,7 @@ function Renderer() {
154154
repeat={true}
155155
controls={true}
156156
source={{ uri: videoUri }}
157-
onError={(e) => console.error(e)}
157+
onError={(e) => console.error('Video error', e)}
158158
onLoad={() => console.log('On load')}
159159
onEnd={() => console.log('On end')}
160160
/>

package/ios/libs/filament/include/gltfio/Animator.h

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,10 @@ class UTILS_PUBLIC Animator {
5959
void updateBoneMatrices();
6060

6161
/**
62-
* Updates the bone matrices of the specified instance using the state of this animatior.
63-
* This is useful if you have another instance that has the same skeleton as the asset of this animator,
64-
* and you wish to apply the same animation to those instances (e.g. clothing).
65-
*
66-
* NOTE: In most cases, you only need to use the updateBoneMatrices() method. This method is necessary
67-
* only when you need to synchronize animations across multiple instances with the same skeleton.
62+
* Updates the bone matrices of the specified instance using the state of this animation.
63+
* This is useful if you have other instances that have the same skeleton as the animator
64+
* from this asset, and you want those instances to be animated by the same animation (e.g. clothing).
65+
* Usually you don't need this and using updateBoneMatrices() is enough.
6866
*
6967
* @param instance The instance to update.
7068
*/

package/ios/src/RNFAppleFilamentRecorder.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ class AppleFilamentRecorder : public FilamentRecorder {
3737
private:
3838
// Render Target is a single PixelBuffer that acts as a 32BGRA Metal Texture
3939
CVPixelBufferRef _pixelBuffer;
40-
CVPixelBufferPoolRef _pixelBufferPool;
4140
// Actual recorder instance
4241
AVAssetWriter* _assetWriter;
4342
AVAssetWriterInput* _assetWriterInput;

package/ios/src/RNFAppleFilamentRecorder.mm

Lines changed: 54 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -6,39 +6,33 @@
66
//
77

88
#include "RNFAppleFilamentRecorder.h"
9+
#include <CoreFoundation/CoreFoundation.h>
10+
#include <CoreVideo/CoreVideo.h>
911
#include <VideoToolbox/VTCompressionProperties.h>
1012
#include <memory>
1113
#include <mutex>
1214

1315
namespace margelo {
1416

17+
static int kCVPixelBufferLock_Write = 0;
18+
1519
AppleFilamentRecorder::AppleFilamentRecorder(std::shared_ptr<Dispatcher> renderThreadDispatcher, int width, int height, int fps,
1620
double bitRate)
1721
: FilamentRecorder(renderThreadDispatcher, width, height, fps, bitRate) {
1822
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INITIATED, -1);
1923
_queue = dispatch_queue_create("filament.recorder.queue", qos);
2024

21-
Logger::log(TAG, "Creating CVPixelBufferPool...");
22-
int maxBufferCount = 30;
23-
NSDictionary* poolAttributes = @{(NSString*)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount)};
25+
Logger::log(TAG, "Creating CVPixelBuffer target texture...");
2426
NSDictionary* pixelBufferAttributes = @{
2527
(NSString*)kCVPixelBufferWidthKey : @(width),
2628
(NSString*)kCVPixelBufferHeightKey : @(height),
2729
(NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
2830
(NSString*)kCVPixelBufferMetalCompatibilityKey : @(YES)
2931
};
30-
CVReturn result = CVPixelBufferPoolCreate(kCFAllocatorDefault, (__bridge CFDictionaryRef)poolAttributes,
31-
(__bridge CFDictionaryRef)pixelBufferAttributes, &_pixelBufferPool);
32-
if (result != kCVReturnSuccess) {
33-
throw std::runtime_error("Failed to create " + std::to_string(width) + "x" + std::to_string(height) +
34-
" CVPixelBufferPool! Status: " + std::to_string(result));
35-
}
36-
37-
Logger::log(TAG, "Creating CVPixelBuffer target texture...");
38-
result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, _pixelBufferPool, &_pixelBuffer);
32+
CVReturn result =
33+
CVPixelBufferCreate(nil, width, height, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef)pixelBufferAttributes, &_pixelBuffer);
3934
if (result != kCVReturnSuccess) {
40-
throw std::runtime_error("Failed to create " + std::to_string(width) + "x" + std::to_string(height) +
41-
" CVPixelBuffer texture! Status: " + std::to_string(result));
35+
throw std::runtime_error("Failed to create input texture CVPixelBuffer!");
4236
}
4337

4438
Logger::log(TAG, "Creating temporary file...");
@@ -74,19 +68,19 @@
7468
AVVideoHeightKey : @(height)
7569
};
7670
_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
71+
_assetWriterInput.expectsMediaDataInRealTime = NO;
72+
_assetWriterInput.performsMultiPassEncodingIfSupported = YES;
7773
if (![_assetWriter canAddInput:_assetWriterInput]) {
7874
std::string settingsJson = outputSettings.description.UTF8String;
7975
throw std::runtime_error("Failed to add AVAssetWriterInput to AVAssetWriter! Settings used: " + settingsJson);
8076
}
8177

82-
_assetWriterInput.expectsMediaDataInRealTime = NO;
83-
_assetWriterInput.performsMultiPassEncodingIfSupported = YES;
84-
85-
_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterInput
86-
sourcePixelBufferAttributes:nil];
87-
8878
Logger::log(TAG, "Adding AVAssetWriterInput...");
8979
[_assetWriter addInput:_assetWriterInput];
80+
81+
Logger::log(TAG, "Creating AVAssetWriterInputPixelBufferAdaptor...");
82+
_pixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterInput
83+
sourcePixelBufferAttributes:pixelBufferAttributes];
9084
}
9185

9286
bool AppleFilamentRecorder::getSupportsHEVC() {
@@ -106,38 +100,46 @@
106100

107101
Logger::log(TAG, "Rendering Frame with timestamp %f...", timestamp);
108102
if (!_assetWriterInput.isReadyForMoreMediaData) {
109-
// TODO: Dropping this frame is probably not a good idea, as we are rendering from an offscreen context anyways
110-
// and could just wait until the input is ready for more data again. Maybe we can implement a mechanism
111-
// that only renders when isReadyForMoreMediaData turns true?
103+
// This should never happen because we only poll Frames from the AVAssetWriter.
104+
// Once it's ready, renderFrame will be called. But better safe than sorry.
112105
throw std::runtime_error("AVAssetWriterInput was not ready for more data!");
113106
}
114107

115-
CVPixelBufferRef targetBuffer;
116-
CVReturn result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, _pixelBufferPool, &targetBuffer);
117-
if (result != kCVReturnSuccess) {
118-
throw std::runtime_error("Failed to create CVPixelBuffer for writing! Status: " + std::to_string(result));
108+
CVPixelBufferPoolRef pool = _pixelBufferAdaptor.pixelBufferPool;
109+
if (pool == nil) {
110+
// The pool should always be created once startSession has been called. So in theory that also shouldn't happen.
111+
throw std::runtime_error("AVAssetWriterInputPixelBufferAdaptor's pixel buffer pool was nil! Cannot write Frame.");
119112
}
120113

121-
result = CVPixelBufferLockBaseAddress(targetBuffer, /* write flag */ 0);
122-
if (result != kCVReturnSuccess) {
123-
throw std::runtime_error("Failed to lock target buffer for write access!");
114+
// 1. Get (or create) a pixel buffer from the cache pool
115+
CVPixelBufferRef targetBuffer;
116+
CVReturn result = CVPixelBufferPoolCreatePixelBuffer(nil, pool, &targetBuffer);
117+
if (result != kCVReturnSuccess || targetBuffer == nil) {
118+
throw std::runtime_error("Failed to get a new CVPixelBuffer from the CVPixelBufferPool!");
124119
}
120+
121+
// 2. Lock both pixel buffers for CPU access
125122
result = CVPixelBufferLockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
126123
if (result != kCVReturnSuccess) {
127124
throw std::runtime_error("Failed to lock input buffer for read access!");
128125
}
126+
result = CVPixelBufferLockBaseAddress(targetBuffer, /* write flag */ 0);
127+
if (result != kCVReturnSuccess) {
128+
throw std::runtime_error("Failed to lock target buffer for write access!");
129+
}
129130

131+
// 3. Copy over Frame data
130132
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(_pixelBuffer);
131133
size_t height = CVPixelBufferGetHeight(_pixelBuffer);
132-
133134
void* destination = CVPixelBufferGetBaseAddress(targetBuffer);
134135
void* source = CVPixelBufferGetBaseAddress(_pixelBuffer);
135-
136136
memcpy(destination, source, bytesPerRow * height);
137137

138-
CVPixelBufferUnlockBaseAddress(targetBuffer, /* write flag */ 0);
138+
// 4. Unlock pixel buffers again
139+
CVPixelBufferUnlockBaseAddress(targetBuffer, kCVPixelBufferLock_Write);
139140
CVPixelBufferUnlockBaseAddress(_pixelBuffer, kCVPixelBufferLock_ReadOnly);
140141

142+
// 5. Append the new copy of the buffer to the pool
141143
CMTime time = CMTimeMake(_frameCount++, getFps());
142144
BOOL success = [_pixelBufferAdaptor appendPixelBuffer:targetBuffer withPresentationTime:time];
143145
if (!success || _assetWriter.status != AVAssetWriterStatusWriting) {
@@ -148,6 +150,9 @@
148150
}
149151
throw std::runtime_error("Failed to append buffer to AVAssetWriter! " + errorMessage);
150152
}
153+
154+
// 6. Release the pixel buffer
155+
CFRelease(targetBuffer);
151156
}
152157

153158
void* AppleFilamentRecorder::getNativeWindow() {
@@ -187,13 +192,22 @@
187192
Logger::log(TAG, "Recorder is ready for more data.");
188193
auto self = weakSelf.lock();
189194
if (self != nullptr) {
190-
self->_renderThreadDispatcher->runAsync([self]() {
191-
bool shouldContinueNext = self->onReadyForMoreData();
192-
if (!shouldContinueNext) {
193-
// stop the render queue
194-
[self->_assetWriterInput markAsFinished];
195-
}
196-
});
195+
auto futurePromise =
196+
self->_renderThreadDispatcher->runAsyncAwaitable<void>([self]() {
197+
while ([self->_assetWriterInput isReadyForMoreMediaData]) {
198+
// This will cause our JS render callbacks to be called, which will call
199+
// renderFrame renderFrame will call appendPixelBuffer, and we should call
200+
// appendPixelBuffer as long as isReadyForMoreMediaData is true.
201+
bool shouldContinueNext = self->onReadyForMoreData();
202+
if (!shouldContinueNext) {
203+
// stop the render queue
204+
[self->_assetWriterInput markAsFinished];
205+
}
206+
}
207+
});
208+
// The block in requestMediaDataWhenReadyOnQueue needs to call appendPixelBuffer
209+
// synchronously
210+
futurePromise.get();
197211
}
198212
}];
199213
});
@@ -235,7 +249,6 @@
235249
}];
236250

237251
self->_isRecording = false;
238-
CVPixelBufferPoolFlush(self->_pixelBufferPool, 0);
239252
});
240253

241254
return promise->get_future();

0 commit comments

Comments
 (0)