1
1
"""
2
2
Post-process data in Spectacular AI format and convert it to input
3
- for NeRF or Gaussian Splatting methods.
3
+ for NeRF or Gaussian Splatting methods, or export optimized pointclouds in ply and pcd formats .
4
4
"""
5
5
6
6
# --- The following mechanism allows using this both as a stand-alone
7
7
# script and as a subcommand in sai-cli.
8
8
9
9
def define_args (parser ):
10
10
parser .add_argument ("input" , help = "Path to folder with session to process" )
11
- parser .add_argument ("output" , help = "Output folder" )
12
- parser .add_argument ('--format' , choices = ['taichi' , 'nerfstudio' ], default = 'nerfstudio' , help = 'Output format' )
11
+ parser .add_argument ("output" , help = "Output folder, or filename with .ply or .pcd extension if exporting pointcloud " )
12
+ parser .add_argument ('--format' , choices = ['taichi' , 'nerfstudio' ], default = 'nerfstudio' , help = 'Output format. ' )
13
13
parser .add_argument ("--cell_size" , help = "Dense point cloud decimation cell size (meters)" , type = float , default = 0.1 )
14
14
parser .add_argument ("--distance_quantile" , help = "Max point distance filter quantile (0 = disabled)" , type = float , default = 0.99 )
15
15
parser .add_argument ("--key_frame_distance" , help = "Minimum distance between keyframes (meters)" , type = float , default = 0.05 )
@@ -33,10 +33,17 @@ def process(args):
33
33
import json
34
34
import os
35
35
import shutil
36
+ import tempfile
36
37
import numpy as np
37
38
import pandas as pd
38
39
from collections import OrderedDict
39
40
41
+ # Overwrite format if output is set to pointcloud
42
+ if args .output .endswith (".ply" ):
43
+ args .format = "ply"
44
+ elif args .output .endswith (".pcd" ):
45
+ args .format = "pcd"
46
+
40
47
useMono = None
41
48
42
49
def interpolate_missing_properties (df_source , df_query , k_nearest = 3 ):
@@ -262,6 +269,10 @@ def onMappingOutput(output):
262
269
if visualizer is not None :
263
270
visualizer .onMappingOutput (output )
264
271
272
+ if args .format in ['ply' , 'pcd' ]:
273
+ if output .finalMap : finalMapWritten = True
274
+ return
275
+
265
276
if not output .finalMap :
266
277
# New frames, let's save the images to disk
267
278
for frameId in output .updatedKeyFrames :
@@ -288,7 +299,7 @@ def onMappingOutput(output):
288
299
img = undistortedFrame .image .toArray ()
289
300
290
301
bgrImage = cv2 .cvtColor (img , cv2 .COLOR_RGB2BGR )
291
- fileName = f"{ args . output } /tmp /frame_{ frameId :05} .{ args .image_format } "
302
+ fileName = f"{ tmp_dir } /frame_{ frameId :05} .{ args .image_format } "
292
303
cv2 .imwrite (fileName , bgrImage )
293
304
294
305
# Find colors for sparse features
@@ -307,7 +318,7 @@ def onMappingOutput(output):
307
318
if frameSet .depthFrame is not None and frameSet .depthFrame .image is not None and not useMono :
308
319
alignedDepth = frameSet .getAlignedDepthFrame (undistortedFrame )
309
320
depthData = alignedDepth .image .toArray ()
310
- depthFrameName = f"{ args . output } /tmp /depth_{ frameId :05} .png"
321
+ depthFrameName = f"{ tmp_dir } /depth_{ frameId :05} .png"
311
322
cv2 .imwrite (depthFrameName , depthData )
312
323
313
324
DEPTH_PREVIEW = False
@@ -330,7 +341,7 @@ def onMappingOutput(output):
330
341
sparsePointCloud = OrderedDict ()
331
342
imageSharpness = []
332
343
for frameId in output .map .keyFrames :
333
- imageSharpness .append ((frameId , blurScore (f"{ args . output } /tmp /frame_{ frameId :05} .{ args .image_format } " )))
344
+ imageSharpness .append ((frameId , blurScore (f"{ tmp_dir } /frame_{ frameId :05} .{ args .image_format } " )))
334
345
335
346
# Look two images forward and two backwards, if current frame is blurriest, don't use it
336
347
for i in range (len (imageSharpness )):
@@ -377,11 +388,11 @@ def onMappingOutput(output):
377
388
"camera_id" : index # camera id, not used
378
389
}
379
390
380
- oldImgName = f"{ args . output } /tmp /frame_{ frameId :05} .{ args .image_format } "
391
+ oldImgName = f"{ tmp_dir } /frame_{ frameId :05} .{ args .image_format } "
381
392
newImgName = f"{ args .output } /images/frame_{ index :05} .{ args .image_format } "
382
393
os .rename (oldImgName , newImgName )
383
394
384
- oldDepth = f"{ args . output } /tmp /depth_{ frameId :05} .png"
395
+ oldDepth = f"{ tmp_dir } /depth_{ frameId :05} .png"
385
396
newDepth = f"{ args .output } /images/depth_{ index :05} .png"
386
397
if os .path .exists (oldDepth ):
387
398
os .rename (oldDepth , newDepth )
@@ -484,13 +495,6 @@ def detect_device_preset(input_dir):
484
495
if device : break
485
496
return (device , cameras )
486
497
487
- # Clear output dir
488
- shutil .rmtree (f"{ args .output } /images" , ignore_errors = True )
489
- os .makedirs (f"{ args .output } /images" , exist_ok = True )
490
- tmp_dir = f"{ args .output } /tmp"
491
- tmp_input = f"{ tmp_dir } /input"
492
- copy_input_to_tmp_safe (args .input , tmp_input )
493
-
494
498
config = {
495
499
"maxMapSize" : 0 ,
496
500
"useSlam" : True ,
@@ -499,6 +503,17 @@ def detect_device_preset(input_dir):
499
503
"icpVoxelSize" : min (args .key_frame_distance , 0.1 )
500
504
}
501
505
506
+ if args .format in ['ply' , 'pcd' ]:
507
+ config ["mapSavePath" ] = args .output
508
+ else :
509
+ # Clear output dir
510
+ shutil .rmtree (f"{ args .output } /images" , ignore_errors = True )
511
+ os .makedirs (f"{ args .output } /images" , exist_ok = True )
512
+
513
+ tmp_dir = tempfile .mkdtemp ()
514
+ tmp_input = tempfile .mkdtemp ()
515
+ copy_input_to_tmp_safe (args .input , tmp_input )
516
+
502
517
device_preset , cameras = detect_device_preset (args .input )
503
518
504
519
useMono = args .mono or (cameras != None and cameras == 1 )
@@ -575,6 +590,11 @@ def detect_device_preset(input_dir):
575
590
except :
576
591
print (f"Failed to clean temporary directory, you can delete these files manually, they are no longer required: { tmp_dir } " , flush = True )
577
592
593
+ try :
594
+ shutil .rmtree (tmp_input )
595
+ except :
596
+ print (f"Failed to clean temporary directory, you can delete these files manually, they are no longer required: { tmp_input } " , flush = True )
597
+
578
598
if not finalMapWritten :
579
599
print ('Mapping failed: no output generated' )
580
600
exit (1 )
@@ -589,7 +609,7 @@ def detect_device_preset(input_dir):
589
609
print (f"output-model-dir: data/{ name } /output" , flush = True )
590
610
print (f"train-dataset-json-path: 'data/{ name } /train.json'" , flush = True )
591
611
print (f"val-dataset-json-path: 'data/{ name } /val.json'" , flush = True )
592
- elif args . format == 'nerfstudio' :
612
+ else :
593
613
print (f'output written to { args .output } ' , flush = True )
594
614
595
615
if __name__ == '__main__' :
0 commit comments