Skip to content

Commit 90bc193

Browse files
David Josef Emmerichsdemmerichs
authored andcommitted
load pointwise laser origins
1 parent dac8161 commit 90bc193

File tree

3 files changed

+51
-20
lines changed

3 files changed

+51
-20
lines changed

pcdet/datasets/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def collate_batch(batch_list, _unused=False):
234234
batch_size_ratio = len(val[0])
235235
val = [i for item in val for i in item]
236236
ret[key] = np.concatenate(val, axis=0)
237-
elif key in ['points', 'voxel_coords']:
237+
elif key in ['points', 'origins', 'voxel_coords']:
238238
coors = []
239239
if isinstance(val[0], list):
240240
val = [i for item in val for i in item]

pcdet/datasets/waymo/waymo_dataset.py

Lines changed: 49 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def load_data_to_shared_memory(self):
127127
if os.path.exists(f"/dev/shm/{sa_key}"):
128128
continue
129129

130-
points = self.get_lidar(sequence_name, sample_idx)
130+
points, _origins = self.get_lidar(sequence_name, sample_idx)
131131
common_utils.sa_create(f"shm://{sa_key}", points)
132132

133133
dist.barrier()
@@ -193,19 +193,34 @@ def get_infos(self, raw_data_path, save_path, num_workers=multiprocessing.cpu_co
193193
all_sequences_infos = [item for infos in sequence_infos for item in infos]
194194
return all_sequences_infos
195195

196-
def get_lidar(self, sequence_name, sample_idx):
196+
def get_lidar(self, sequence_name, sample_idx, load_origins=False):
197197
lidar_file = self.data_path / sequence_name / ('%04d.npy' % sample_idx)
198198
point_features = np.load(lidar_file) # (N, 7): [x, y, z, intensity, elongation, NLZ_flag]
199199

200200
points_all, NLZ_flag = point_features[:, 0:5], point_features[:, 5]
201201
if not self.dataset_cfg.get('DISABLE_NLZ_FLAG_ON_POINTS', False):
202202
points_all = points_all[NLZ_flag == -1]
203+
203204
if self.dataset_cfg.get('POINTS_TANH_DIM', None) is None:
204205
points_all[:, 3] = np.tanh(points_all[:, 3])
205206
else:
206207
for dim_idx in self.dataset_cfg.POINTS_TANH_DIM:
207208
points_all[:, dim_idx] = np.tanh(points_all[:, dim_idx])
208-
return points_all
209+
210+
if load_origins:
211+
sample_info = self.seq_name_to_infos[sequence_name][sample_idx]
212+
if 'extrinsics' not in sample_info:
213+
raise ValueError('extrinsics not saved to database, use db version >= v0_6_0')
214+
origins = [extr[:3, 3] for extr in sample_info['extrinsics']]
215+
laser_counts = sample_info['num_points_of_each_lidar']
216+
assert sum(laser_counts) == points_all.shape[0], (laser_counts, points_all.shape)
217+
assert len(origins) == len(laser_counts), (origins, laser_counts)
218+
origins = np.concatenate([np.tile(extr[None, :], (c, 1)) for c, extr in zip(laser_counts, origins)], axis=0)
219+
assert origins.shape == points_all[:, :3].shape, (origins.shape, points_all.shape)
220+
else:
221+
origins = None
222+
223+
return points_all, origins
209224

210225
@staticmethod
211226
def transform_prebox_to_current(pred_boxes3d, pose_pre, pose_cur):
@@ -247,7 +262,7 @@ def reorder_rois_for_refining(pred_bboxes):
247262
ordered_bboxes[bs_idx, :len(pred_bboxes[bs_idx])] = pred_bboxes[bs_idx]
248263
return ordered_bboxes
249264

250-
def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
265+
def get_sequence_data(self, info, points, origins, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
251266
"""
252267
Args:
253268
info:
@@ -260,7 +275,7 @@ def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cf
260275

261276
def remove_ego_points(points, center_radius=1.0):
262277
mask = ~((np.abs(points[:, 0]) < center_radius) & (np.abs(points[:, 1]) < center_radius))
263-
return points[mask]
278+
return points[mask], mask
264279

265280
def load_pred_boxes_from_dict(sequence_name, sample_idx):
266281
"""
@@ -272,6 +287,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
272287
load_boxes[:, 7:9] = -0.1 * load_boxes[:, 7:9] # transfer speed to negtive motion from t to t-1
273288
return load_boxes
274289

290+
load_origins = origins is not None
275291
pose_cur = info['pose'].reshape((4, 4))
276292
num_pts_cur = points.shape[0]
277293
sample_idx_pre_list = np.clip(sample_idx + np.arange(sequence_cfg.SAMPLE_OFFSET[0], sequence_cfg.SAMPLE_OFFSET[1]), 0, 0x7FFFFFFF)
@@ -285,6 +301,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
285301
points = np.hstack([points, np.zeros((points.shape[0], 1)).astype(points.dtype)])
286302
points_pre_all = []
287303
num_points_pre = []
304+
origins_pre_all = []
288305

289306
pose_all = [pose_cur]
290307
pred_boxes_all = []
@@ -296,7 +313,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
296313

297314
for idx, sample_idx_pre in enumerate(sample_idx_pre_list):
298315

299-
points_pre = self.get_lidar(sequence_name, sample_idx_pre)
316+
points_pre, origins_pre = self.get_lidar(sequence_name, sample_idx_pre, load_origins=load_origins)
300317
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
301318
expand_points_pre = np.concatenate([points_pre[:, :3], np.ones((points_pre.shape[0], 1))], axis=-1)
302319
points_pre_global = np.dot(expand_points_pre, pose_pre.T)[:, :3]
@@ -310,11 +327,19 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
310327
else:
311328
# add timestamp
312329
points_pre = np.hstack([points_pre, 0.1 * (sample_idx - sample_idx_pre) * np.ones((points_pre.shape[0], 1)).astype(points_pre.dtype)]) # one frame 0.1s
313-
points_pre = remove_ego_points(points_pre, 1.0)
330+
points_pre, ego_mask = remove_ego_points(points_pre, 1.0)
314331
points_pre_all.append(points_pre)
315332
num_points_pre.append(points_pre.shape[0])
316333
pose_all.append(pose_pre)
317334

335+
if load_origins:
336+
expand_origins_pre = np.concatenate([origins_pre[:, :3], np.ones((origins_pre.shape[0], 1))], axis=-1)
337+
origins_pre_global = np.dot(expand_origins_pre, pose_pre.T)[:, :3]
338+
expand_origins_pre_global = np.concatenate([origins_pre_global, np.ones((origins_pre_global.shape[0], 1))], axis=-1)
339+
origins_pre = np.dot(expand_origins_pre_global, np.linalg.inv(pose_cur.T))[:, :3]
340+
origins_pre = origins_pre[ego_mask]
341+
origins_pre_all.append(origins_pre)
342+
318343
if load_pred_boxes:
319344
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
320345
pred_boxes = load_pred_boxes_from_dict(sequence_name, sample_idx_pre)
@@ -325,6 +350,11 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
325350
num_points_all = np.array([num_pts_cur] + num_points_pre).astype(np.int32)
326351
poses = np.concatenate(pose_all, axis=0).astype(np.float32)
327352

353+
if load_origins:
354+
origins = np.concatenate([origins] + origins_pre_all, axis=0).astype(np.float32)
355+
else:
356+
origins = None
357+
328358
if load_pred_boxes:
329359
temp_pred_boxes = self.reorder_rois_for_refining(pred_boxes_all)
330360
pred_boxes = temp_pred_boxes[:, :, 0:9]
@@ -333,7 +363,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
333363
else:
334364
pred_boxes = pred_scores = pred_labels = None
335365

336-
return points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
366+
return points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
337367

338368
def __len__(self):
339369
if self._merge_all_iters_to_one_epoch:
@@ -352,15 +382,15 @@ def __getitem__(self, index):
352382
input_dict = {
353383
'sample_idx': sample_idx
354384
}
355-
if self.use_shared_memory and index < self.shared_memory_file_limit:
385+
if self.use_shared_memory and index < self.shared_memory_file_limit and not self.dataset_cfg.get('USE_ORIGINS', False):
356386
sa_key = f'{sequence_name}___{sample_idx}'
357387
points = SharedArray.attach(f"shm://{sa_key}").copy()
358388
else:
359-
points = self.get_lidar(sequence_name, sample_idx)
389+
points, origins = self.get_lidar(sequence_name, sample_idx, load_origins=self.dataset_cfg.get('USE_ORIGINS', False))
360390

361391
if self.dataset_cfg.get('SEQUENCE_CONFIG', None) is not None and self.dataset_cfg.SEQUENCE_CONFIG.ENABLED:
362-
points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
363-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
392+
points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
393+
info, points, origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
364394
load_pred_boxes=self.dataset_cfg.get('USE_PREDBOX', False)
365395
)
366396
input_dict['poses'] = poses
@@ -373,6 +403,7 @@ def __getitem__(self, index):
373403

374404
input_dict.update({
375405
'points': points,
406+
'origins': origins,
376407
'frame_id': info['frame_id'],
377408
})
378409

@@ -491,11 +522,11 @@ def create_groundtruth_database(self, info_path, save_path, used_classes=None, s
491522
pc_info = info['point_cloud']
492523
sequence_name = pc_info['lidar_sequence']
493524
sample_idx = pc_info['sample_idx']
494-
points = self.get_lidar(sequence_name, sample_idx)
525+
points, _origins = self.get_lidar(sequence_name, sample_idx)
495526

496527
if use_sequence_data:
497-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
498-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
528+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
529+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
499530
)
500531

501532
annos = info['annos']
@@ -569,11 +600,11 @@ def create_gt_database_of_single_scene(self, info_with_idx, database_save_path=N
569600
pc_info = info['point_cloud']
570601
sequence_name = pc_info['lidar_sequence']
571602
sample_idx = pc_info['sample_idx']
572-
points = self.get_lidar(sequence_name, sample_idx)
603+
points, _origins = self.get_lidar(sequence_name, sample_idx)
573604

574605
if use_sequence_data:
575-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
576-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
606+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
607+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
577608
)
578609

579610
annos = info['annos']

pcdet/utils/common_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def drop_info_with_name(info, name):
3535
def apply_data_transform(data_dict, transforms):
3636
assert set(transforms.keys()).issubset({'point', 'box'})
3737
data_keys = {
38-
'point': ['points'],
38+
'point': ['points', 'origins'],
3939
'box': ['gt_boxes', 'roi_boxes']
4040
}
4141
for tf_type, tf in transforms.items():

0 commit comments

Comments
 (0)