Skip to content

Commit dcb8abd

Browse files
David Josef Emmerichsdemmerichs
authored andcommitted
load pointwise laser origins
1 parent dac8161 commit dcb8abd

File tree

3 files changed

+50
-20
lines changed

3 files changed

+50
-20
lines changed

pcdet/datasets/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ def collate_batch(batch_list, _unused=False):
234234
batch_size_ratio = len(val[0])
235235
val = [i for item in val for i in item]
236236
ret[key] = np.concatenate(val, axis=0)
237-
elif key in ['points', 'voxel_coords']:
237+
elif key in ['points', 'origins', 'voxel_coords']:
238238
coors = []
239239
if isinstance(val[0], list):
240240
val = [i for item in val for i in item]

pcdet/datasets/waymo/waymo_dataset.py

Lines changed: 48 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def load_data_to_shared_memory(self):
127127
if os.path.exists(f"/dev/shm/{sa_key}"):
128128
continue
129129

130-
points = self.get_lidar(sequence_name, sample_idx)
130+
points, _origins = self.get_lidar(sequence_name, sample_idx)
131131
common_utils.sa_create(f"shm://{sa_key}", points)
132132

133133
dist.barrier()
@@ -193,19 +193,33 @@ def get_infos(self, raw_data_path, save_path, num_workers=multiprocessing.cpu_co
193193
all_sequences_infos = [item for infos in sequence_infos for item in infos]
194194
return all_sequences_infos
195195

196-
def get_lidar(self, sequence_name, sample_idx):
196+
def get_lidar(self, sequence_name, sample_idx, load_origins=False):
197197
lidar_file = self.data_path / sequence_name / ('%04d.npy' % sample_idx)
198198
point_features = np.load(lidar_file) # (N, 7): [x, y, z, intensity, elongation, NLZ_flag]
199199

200200
points_all, NLZ_flag = point_features[:, 0:5], point_features[:, 5]
201201
if not self.dataset_cfg.get('DISABLE_NLZ_FLAG_ON_POINTS', False):
202202
points_all = points_all[NLZ_flag == -1]
203+
203204
if self.dataset_cfg.get('POINTS_TANH_DIM', None) is None:
204205
points_all[:, 3] = np.tanh(points_all[:, 3])
205206
else:
206207
for dim_idx in self.dataset_cfg.POINTS_TANH_DIM:
207208
points_all[:, dim_idx] = np.tanh(points_all[:, dim_idx])
208-
return points_all
209+
210+
if load_origins:
211+
sample_info = self.seq_name_to_infos[sequence_name][sample_idx]
212+
raise ValueError('extrinsics not saved to database, use db version >= v0_6_0')
213+
origins = [extr[:3, 3] for extr in sample_info['extrinsics']]
214+
laser_counts = sample_info['num_points_of_each_lidar']
215+
assert sum(laser_counts) == points_all.shape[0], (laser_counts, points_all.shape)
216+
assert len(origins) == len(laser_counts), (origins, laser_counts)
217+
origins = np.concatenate([np.tile(extr[None, :], (c, 1)) for c, extr in zip(laser_counts, origins)], axis=0)
218+
assert origins.shape == points_all[:, :3].shape, (origins.shape, points_all.shape)
219+
else:
220+
origins = None
221+
222+
return points_all, origins
209223

210224
@staticmethod
211225
def transform_prebox_to_current(pred_boxes3d, pose_pre, pose_cur):
@@ -247,7 +261,7 @@ def reorder_rois_for_refining(pred_bboxes):
247261
ordered_bboxes[bs_idx, :len(pred_bboxes[bs_idx])] = pred_bboxes[bs_idx]
248262
return ordered_bboxes
249263

250-
def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
264+
def get_sequence_data(self, info, points, origins, sequence_name, sample_idx, sequence_cfg, load_pred_boxes=False):
251265
"""
252266
Args:
253267
info:
@@ -260,7 +274,7 @@ def get_sequence_data(self, info, points, sequence_name, sample_idx, sequence_cf
260274

261275
def remove_ego_points(points, center_radius=1.0):
262276
mask = ~((np.abs(points[:, 0]) < center_radius) & (np.abs(points[:, 1]) < center_radius))
263-
return points[mask]
277+
return points[mask], mask
264278

265279
def load_pred_boxes_from_dict(sequence_name, sample_idx):
266280
"""
@@ -272,6 +286,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
272286
load_boxes[:, 7:9] = -0.1 * load_boxes[:, 7:9] # transfer speed to negtive motion from t to t-1
273287
return load_boxes
274288

289+
load_origins = origins is not None
275290
pose_cur = info['pose'].reshape((4, 4))
276291
num_pts_cur = points.shape[0]
277292
sample_idx_pre_list = np.clip(sample_idx + np.arange(sequence_cfg.SAMPLE_OFFSET[0], sequence_cfg.SAMPLE_OFFSET[1]), 0, 0x7FFFFFFF)
@@ -285,6 +300,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
285300
points = np.hstack([points, np.zeros((points.shape[0], 1)).astype(points.dtype)])
286301
points_pre_all = []
287302
num_points_pre = []
303+
origins_pre_all = []
288304

289305
pose_all = [pose_cur]
290306
pred_boxes_all = []
@@ -296,7 +312,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
296312

297313
for idx, sample_idx_pre in enumerate(sample_idx_pre_list):
298314

299-
points_pre = self.get_lidar(sequence_name, sample_idx_pre)
315+
points_pre, origins_pre = self.get_lidar(sequence_name, sample_idx_pre, load_origins=load_origins)
300316
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
301317
expand_points_pre = np.concatenate([points_pre[:, :3], np.ones((points_pre.shape[0], 1))], axis=-1)
302318
points_pre_global = np.dot(expand_points_pre, pose_pre.T)[:, :3]
@@ -310,11 +326,19 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
310326
else:
311327
# add timestamp
312328
points_pre = np.hstack([points_pre, 0.1 * (sample_idx - sample_idx_pre) * np.ones((points_pre.shape[0], 1)).astype(points_pre.dtype)]) # one frame 0.1s
313-
points_pre = remove_ego_points(points_pre, 1.0)
329+
points_pre, ego_mask = remove_ego_points(points_pre, 1.0)
314330
points_pre_all.append(points_pre)
315331
num_points_pre.append(points_pre.shape[0])
316332
pose_all.append(pose_pre)
317333

334+
if load_origins:
335+
expand_origins_pre = np.concatenate([origins_pre[:, :3], np.ones((origins_pre.shape[0], 1))], axis=-1)
336+
origins_pre_global = np.dot(expand_origins_pre, pose_pre.T)[:, :3]
337+
expand_origins_pre_global = np.concatenate([origins_pre_global, np.ones((origins_pre_global.shape[0], 1))], axis=-1)
338+
origins_pre = np.dot(expand_origins_pre_global, np.linalg.inv(pose_cur.T))[:, :3]
339+
origins_pre = origins_pre[ego_mask]
340+
origins_pre_all.append(origins_pre)
341+
318342
if load_pred_boxes:
319343
pose_pre = sequence_info[sample_idx_pre]['pose'].reshape((4, 4))
320344
pred_boxes = load_pred_boxes_from_dict(sequence_name, sample_idx_pre)
@@ -325,6 +349,11 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
325349
num_points_all = np.array([num_pts_cur] + num_points_pre).astype(np.int32)
326350
poses = np.concatenate(pose_all, axis=0).astype(np.float32)
327351

352+
if load_origins:
353+
origins = np.concatenate([origins] + origins_pre_all, axis=0).astype(np.float32)
354+
else:
355+
origins = None
356+
328357
if load_pred_boxes:
329358
temp_pred_boxes = self.reorder_rois_for_refining(pred_boxes_all)
330359
pred_boxes = temp_pred_boxes[:, :, 0:9]
@@ -333,7 +362,7 @@ def load_pred_boxes_from_dict(sequence_name, sample_idx):
333362
else:
334363
pred_boxes = pred_scores = pred_labels = None
335364

336-
return points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
365+
return points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels
337366

338367
def __len__(self):
339368
if self._merge_all_iters_to_one_epoch:
@@ -352,15 +381,15 @@ def __getitem__(self, index):
352381
input_dict = {
353382
'sample_idx': sample_idx
354383
}
355-
if self.use_shared_memory and index < self.shared_memory_file_limit:
384+
if self.use_shared_memory and index < self.shared_memory_file_limit and not self.dataset_cfg.get('USE_ORIGINS', False):
356385
sa_key = f'{sequence_name}___{sample_idx}'
357386
points = SharedArray.attach(f"shm://{sa_key}").copy()
358387
else:
359-
points = self.get_lidar(sequence_name, sample_idx)
388+
points, origins = self.get_lidar(sequence_name, sample_idx, load_origins=self.dataset_cfg.get('USE_ORIGINS', False))
360389

361390
if self.dataset_cfg.get('SEQUENCE_CONFIG', None) is not None and self.dataset_cfg.SEQUENCE_CONFIG.ENABLED:
362-
points, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
363-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
391+
points, origins, num_points_all, sample_idx_pre_list, poses, pred_boxes, pred_scores, pred_labels = self.get_sequence_data(
392+
info, points, origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG,
364393
load_pred_boxes=self.dataset_cfg.get('USE_PREDBOX', False)
365394
)
366395
input_dict['poses'] = poses
@@ -373,6 +402,7 @@ def __getitem__(self, index):
373402

374403
input_dict.update({
375404
'points': points,
405+
'origins': origins,
376406
'frame_id': info['frame_id'],
377407
})
378408

@@ -491,11 +521,11 @@ def create_groundtruth_database(self, info_path, save_path, used_classes=None, s
491521
pc_info = info['point_cloud']
492522
sequence_name = pc_info['lidar_sequence']
493523
sample_idx = pc_info['sample_idx']
494-
points = self.get_lidar(sequence_name, sample_idx)
524+
points, _origins = self.get_lidar(sequence_name, sample_idx)
495525

496526
if use_sequence_data:
497-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
498-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
527+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
528+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
499529
)
500530

501531
annos = info['annos']
@@ -569,11 +599,11 @@ def create_gt_database_of_single_scene(self, info_with_idx, database_save_path=N
569599
pc_info = info['point_cloud']
570600
sequence_name = pc_info['lidar_sequence']
571601
sample_idx = pc_info['sample_idx']
572-
points = self.get_lidar(sequence_name, sample_idx)
602+
points, _origins = self.get_lidar(sequence_name, sample_idx)
573603

574604
if use_sequence_data:
575-
points, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
576-
info, points, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
605+
points, _origins, num_points_all, sample_idx_pre_list, _, _, _, _ = self.get_sequence_data(
606+
info, points, _origins, sequence_name, sample_idx, self.dataset_cfg.SEQUENCE_CONFIG
577607
)
578608

579609
annos = info['annos']

pcdet/utils/common_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def drop_info_with_name(info, name):
3535
def apply_data_transform(data_dict, transforms):
3636
assert set(transforms.keys()).issubset({'point', 'box'})
3737
data_keys = {
38-
'point': ['points'],
38+
'point': ['points', 'origins'],
3939
'box': ['gt_boxes', 'roi_boxes']
4040
}
4141
for tf_type, tf in transforms.items():

0 commit comments

Comments
 (0)