Skip to content

Commit 8cb3467

Browse files
author
David Josef Emmerichs
committed
load pointwise laser origins
1 parent 664e4c3 commit 8cb3467

File tree

3 files changed

+50
-20
lines changed

3 files changed

+50
-20
lines changed

pcdet/datasets/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def collate_batch(batch_list, _unused=False):
234234
batch_size_ratio = len(val[0])
235235
val = [i for item in val for i in item]
236236
ret[key] = np.concatenate(val, axis=0)
237-
elif key in ['points', 'voxel_coords']:
237+
elif key in ['points', 'origins', 'voxel_coords']:
238238
coors = []
239239
if isinstance(val[0], list):
240240
val = [i for item in val for i in item]

pcdet/datasets/waymo/waymo_dataset.py

Lines changed: 48 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def load_data_to_shared_memory(self):
127127
if os.path.exists(f"/dev/shm/{sa_key}"):
128128
continue
129129

130-
points = self.get_lidar(sequence_name, sample_idx)
130+
points, _origins = self.get_lidar(sequence_name, sample_idx)
131131
common_utils.sa_create(f"shm://{sa_key}", points)
132132

133133
dist.barrier()
@@ -193,15 +193,29 @@ def get_infos(self, raw_data_path, save_path, num_workers=multiprocessing.cpu_co
193193
all_sequences_infos = [item for infos in sequence_infos for item in infos]
194194
return all_sequences_infos
195195

196-
def get_lidar(self, sequence_name, sample_idx):
196+
def get_lidar(self, sequence_name, sample_idx, load_origins=False):
197197
lidar_file = self.data_path / sequence_name / ('%04d.npy' % sample_idx)
198198
point_features = np.load(lidar_file) # (N, 7): [x, y, z, intensity, elongation, NLZ_flag]
199199

200200
points_all, NLZ_flag = point_features[:, 0:5], point_features[:, 5]
201201
if not self.dataset_cfg.get('DISABLE_NLZ_FLAG_ON_POINTS', False):
202202
points_all = points_all[NLZ_flag == -1]
203203
points_all[:, 3] = np.tanh(points_all[:, 3])
204-
return points_all
204+
205+
if load_origins:
206+
sample_info = self.seq_name_to_infos[sequence_name][sample_idx]
207+
if 'extrinsics' not in sample_info:
208+
raise ValueError('extrinsics not saved to database, use db version >= v0_6_0')
209+
origins = [extr[:3, 3] for extr in sample_info['extrinsics']]
210+
laser_counts = sample_info['num_points_of_each_lidar']
211+
assert sum(laser_counts) == points_all.shape[0], (laser_counts, points_all.shape)
212+
assert len(origins) == len(laser_counts), (origins, laser_counts)
213+
origins = np.concatenate([np.tile(extr[None, :], (c, 1)) for c, extr in zip(laser_counts, origins)], axis=0)
214+
assert origins.shape == points_all[:, :3].shape, (origins.shape, points_all.shape)
215+
else:
216+
origins = None
217+
218+
return points_all, origins
205219

206220
@staticmethod
207221
def transform_prebox_to_current(pred_boxes3d, pose_pre, pose_cur):
@@ -243,7 +257,7 @@ def reorder_rois_for_refining(pred_bboxes):
243257
ordered_bboxes[bs_idx, :len(pred_bboxes[bs_idx])] = pred_bboxes[bs_idx]
244258
return ordered_bboxes
245259

246-
def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
260+
def get_sequence_data(self, info, points, origins, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
247261
"""
248262
Args:
249263
info:
@@ -256,7 +270,7 @@ def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cf
256270

257271
def remove_ego_points(points, center_radius=1.0):
258272
mask = ~((np.abs(points[:, 0]) < center_radius) & (np.abs(points[:, 1]) < center_radius))
259-
return points[mask]
273+
return points[mask], mask
260274

261275
def load_pred_boxes_from_dict(sequence_name, sample_idx):
262276
"""
@@ -268,6 +282,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
268282
load_boxes[:, 7:9] = -0.1 * load_boxes[:, 7:9] # transfer speed to negtive motion from t to t-1
269283
return load_boxes
270284

285+
load_origins = origins is not None
271286
pose_cur = info['pose'].reshape((4, 4))
272287
num_pts_cur = points.shape[0]
273288
sample_idx_pre_list = np.clip(sample_idx + np.arange(sequence_cfg.SAMPLE_OFFSET[0], sequence_cfg.SAMPLE_OFFSET[1]), 0, 0x7FFFFFFF)
@@ -281,6 +296,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
281296
points = np.hstack([points, np.zeros((points.shape[0], 1)).astype(points.dtype)])
282297
points_pre_all = []
283298
num_points_pre = []
299+
origins_pre_all = []
284300

285301
pose_all = [pose_cur]
286302
pred_boxes_all = []
@@ -292,7 +308,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
292308

293309
for idx, sample_idx_pre in enumerate(sample_idx_pre_list):
294310

295-
points_pre = self.get_lidar(sequence_name, sample_idx_pre)
311+
points_pre, origins_pre = self.get_lidar(sequence_name, sample_idx_pre, load_origins=load_origins)
296312
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
297313
expand_points_pre = np.concatenate([points_pre[:, :3], np.ones((points_pre.shape[0], 1))], axis=-1)
298314
points_pre_global = np.dot(expand_points_pre, pose_pre.T)[:, :3]
@@ -306,11 +322,19 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
306322
else:
307323
# add timestamp
308324
points_pre = np.hstack([points_pre, 0.1 * (sample_idx - sample_idx_pre) * np.ones((points_pre.shape[0], 1)).astype(points_pre.dtype)]) # one frame 0.1s
309-
points_pre = remove_ego_points(points_pre, 1.0)
325+
points_pre, ego_mask = remove_ego_points(points_pre, 1.0)
310326
points_pre_all.append(points_pre)
311327
num_points_pre.append(points_pre.shape[0])
312328
pose_all.append(pose_pre)
313329

330+
if load_origins:
331+
expand_origins_pre = np.concatenate([origins_pre[:, :3], np.ones((origins_pre.shape[0], 1))], axis=-1)
332+
origins_pre_global = np.dot(expand_origins_pre, pose_pre.T)[:, :3]
333+
expand_origins_pre_global = np.concatenate([origins_pre_global, np.ones((origins_pre_global.shape[0], 1))], axis=-1)
334+
origins_pre = np.dot(expand_origins_pre_global, np.linalg.inv(pose_cur.T))[:, :3]
335+
origins_pre = origins_pre[ego_mask]
336+
origins_pre_all.append(origins_pre)
337+
314338
if load_pred_boxes:
315339
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
316340
pred_boxes = load_pred_boxes_from_dict(sequence_name, sample_idx_pre)
@@ -321,6 +345,11 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
321345
num_points_all = np.array([num_pts_cur] + num_points_pre).astype(np.int32)
322346
poses = np.concatenate(pose_all, axis=0).astype(np.float32)
323347

348+
if load_origins:
349+
origins = np.concatenate([origins] + origins_pre_all, axis=0).astype(np.float32)
350+
else:
351+
origins = None
352+
324353
if load_pred_boxes:
325354
temp_pred_boxes = self.reorder_rois_for_refining(pred_boxes_all)
326355
pred_boxes = temp_pred_boxes[:, :, 0:9]
@@ -329,7 +358,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
329358
else:
330359
pred_boxes = pred_scores = pred_labels = None
331360

332-
return points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
361+
return points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
333362

334363
def __len__(self):
335364
if self._merge_all_iters_to_one_epoch:
@@ -348,15 +377,15 @@ def __getitem__(self, index):
348377
input_dict = {
349378
'sample_idx': sample_idx
350379
}
351-
if self.use_shared_memory and index < self.shared_memory_file_limit:
380+
if self.use_shared_memory and index < self.shared_memory_file_limit and not self.dataset_cfg.get('USE_ORIGINS', False):
352381
sa_key = f'{sequence_name}___{sample_idx}'
353382
points = SharedArray.attach(f"shm://{sa_key}").copy()
354383
else:
355-
points = self.get_lidar(sequence_name, sample_idx)
384+
points, origins = self.get_lidar(sequence_name, sample_idx, load_origins=self.dataset_cfg.get('USE_ORIGINS', False))
356385

357386
if self.dataset_cfg.get('SEQUENCE_CONFIG', None) is not None and self.dataset_cfg.SEQUENCE_CONFIG.ENABLED:
358-
points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
359-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
387+
points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
388+
info, points, origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
360389
load_pred_boxes=self.dataset_cfg.get('USE_PREDBOX', False)
361390
)
362391
input_dict['poses'] = poses
@@ -369,6 +398,7 @@ def __getitem__(self, index):
369398

370399
input_dict.update({
371400
'points': points,
401+
'origins': origins,
372402
'frame_id': info['frame_id'],
373403
})
374404

@@ -487,11 +517,11 @@ def create_groundtruth_database(self, info_path, save_path, used_classes=None, s
487517
pc_info = info['point_cloud']
488518
sequence_name = pc_info['lidar_sequence']
489519
sample_idx = pc_info['sample_idx']
490-
points = self.get_lidar(sequence_name, sample_idx)
520+
points, _origins = self.get_lidar(sequence_name, sample_idx)
491521

492522
if use_sequence_data:
493-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
494-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
523+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
524+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
495525
)
496526

497527
annos = info['annos']
@@ -565,11 +595,11 @@ def create_gt_database_of_single_scene(self, info_with_idx, database_save_path=N
565595
pc_info = info['point_cloud']
566596
sequence_name = pc_info['lidar_sequence']
567597
sample_idx = pc_info['sample_idx']
568-
points = self.get_lidar(sequence_name, sample_idx)
598+
points, _origins = self.get_lidar(sequence_name, sample_idx)
569599

570600
if use_sequence_data:
571-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
572-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
601+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
602+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
573603
)
574604

575605
annos = info['annos']

pcdet/utils/common_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def drop_info_with_name(info, name):
3535
def apply_data_transform(data_dict, transforms):
3636
assert set(transforms.keys()).issubset({'point', 'box'})
3737
data_keys = {
38-
'point': ['points'],
38+
'point': ['points', 'origins'],
3939
'box': ['gt_boxes', 'roi_boxes']
4040
}
4141
for tf_type, tf in transforms.items():

0 commit comments

Comments
 (0)