Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions ppdet/data/source/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,14 +217,14 @@ def _load_images(self, do_eval=False):
images = self._parse()
ct = 0
records = []
anno_file = self.get_anno()
coco = COCO(anno_file)
for image in images:
assert image != '' and os.path.isfile(image), \
"Image {} not found".format(image)
if self.sample_num > 0 and ct >= self.sample_num:
break
if do_eval:
anno_file = self.get_anno()
coco = COCO(anno_file)
image_id = self.get_image_id(image, coco)
ct = image_id
rec = {'im_id': np.array([ct]), 'im_file': image}
Expand Down
4 changes: 2 additions & 2 deletions ppdet/data/transform/operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -3165,15 +3165,15 @@ def crop(self, sample, region):
[0, 4], dtype=np.float32)
sample['gt_class'] = sample['gt_class'][keep_index] if len(
keep_index) > 0 else np.zeros(
[0, 1], dtype=np.float32)
[0, 1], dtype=np.int32)
if 'gt_score' in sample:
sample['gt_score'] = sample['gt_score'][keep_index] if len(
keep_index) > 0 else np.zeros(
[0, 1], dtype=np.float32)
if 'is_crowd' in sample:
sample['is_crowd'] = sample['is_crowd'][keep_index] if len(
keep_index) > 0 else np.zeros(
[0, 1], dtype=np.float32)
[0, 1], dtype=np.int32)
if 'gt_areas' in sample:
sample['gt_areas'] = np.take(
sample['gt_areas'], keep_index, axis=0)
Expand Down
14 changes: 10 additions & 4 deletions ppdet/modeling/losses/detr_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,10 +281,16 @@ def _get_index_updates(self, num_query_objects, target, match_indices):
])
src_idx = paddle.concat([src for (src, _) in match_indices])
src_idx += (batch_idx * num_query_objects)
target_assign = paddle.concat([
paddle.gather(
t, dst, axis=0) for t, (_, dst) in zip(target, match_indices)
])
if 'npu' in paddle.device.get_device():
target_assign = paddle.concat([
paddle.gather(
t.to(paddle.int32), dst.to(paddle.int32), axis=0) for t, (_, dst) in zip(target, match_indices)
])
else:
target_assign = paddle.concat([
paddle.gather(
t, dst, axis=0) for t, (_, dst) in zip(target, match_indices)
])
return src_idx, target_assign

def _get_src_target_assign(self, src, target, match_indices):
Expand Down
3 changes: 3 additions & 0 deletions ppdet/modeling/transformers/matchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ def forward(self,
out_bbox = boxes.detach().flatten(0, 1)

# Also concat the target labels and boxes
if 'npu' in paddle.device.get_device():
gt_class = [tensor.to(paddle.int32) for tensor in gt_class]

tgt_ids = paddle.concat(gt_class).flatten()
tgt_bbox = paddle.concat(gt_bbox)

Expand Down
8 changes: 4 additions & 4 deletions ppdet/modeling/transformers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,8 @@ def get_denoising_training_group(targets,
num_denoising = int(max_gt_num * num_group)

if label_noise_ratio > 0:
input_query_class = input_query_class.flatten()
pad_gt_mask = pad_gt_mask.flatten()
input_query_class = paddle.assign(input_query_class.flatten())
pad_gt_mask = paddle.assign(pad_gt_mask.flatten())
# half of bbox prob, cast mask from bool to float bacause dtype promotaion
# between bool and float is not supported in static mode.
mask = paddle.cast(
Expand Down Expand Up @@ -356,8 +356,8 @@ def get_contrastive_denoising_training_group(targets,
num_denoising = int(max_gt_num * 2 * num_group)

if label_noise_ratio > 0:
input_query_class = input_query_class.flatten()
pad_gt_mask = pad_gt_mask.flatten()
input_query_class = paddle.assign(input_query_class.flatten())
pad_gt_mask = paddle.assign(pad_gt_mask.flatten())
# half of bbox prob
mask = paddle.rand(input_query_class.shape) < (label_noise_ratio * 0.5)
chosen_idx = paddle.nonzero(mask.cast(pad_gt_mask.dtype) *
Expand Down