Skip to content

Commit 5e63af3

Browse files
authored
Merge branch 'main' into pin_memory_bbox_fix
2 parents 7cae837 + e239710 commit 5e63af3

File tree

19 files changed

+634
-120
lines changed

19 files changed

+634
-120
lines changed

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def get_version():
7979

8080
def write_version_file(version, sha):
8181
# Exists for BC, probably completely useless.
82-
with open(ROOT_DIR / "torchvision/version.py", "w") as f:
82+
with open(ROOT_DIR / "torchvision" / "version.py", "w") as f:
8383
f.write(f"__version__ = '{version}'\n")
8484
f.write(f"git_version = {repr(sha)}\n")
8585
f.write("from torchvision.extension import _check_cuda_version\n")
@@ -194,7 +194,7 @@ def make_C_extension():
194194

195195
def find_libpng():
196196
# Returns (found, include dir, library dir, library name)
197-
if sys.platform in ("linux", "darwin"):
197+
if sys.platform in ("linux", "darwin", "aix"):
198198
libpng_config = shutil.which("libpng-config")
199199
if libpng_config is None:
200200
warnings.warn("libpng-config not found")

test/common_utils.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -423,6 +423,7 @@ def sample_position(values, max_value):
423423
h, w = [torch.randint(1, s, (num_boxes,)) for s in canvas_size]
424424
y = sample_position(h, canvas_size[0])
425425
x = sample_position(w, canvas_size[1])
426+
r = -360 * torch.rand((num_boxes,)) + 180
426427

427428
if format is tv_tensors.BoundingBoxFormat.XYWH:
428429
parts = (x, y, w, h)
@@ -435,6 +436,23 @@ def sample_position(values, max_value):
435436
cx = x + w / 2
436437
cy = y + h / 2
437438
parts = (cx, cy, w, h)
439+
elif format is tv_tensors.BoundingBoxFormat.XYWHR:
440+
parts = (x, y, w, h, r)
441+
elif format is tv_tensors.BoundingBoxFormat.CXCYWHR:
442+
cx = x + w / 2
443+
cy = y + h / 2
444+
parts = (cx, cy, w, h, r)
445+
elif format is tv_tensors.BoundingBoxFormat.XYXYXYXY:
446+
r_rad = r * torch.pi / 180.0
447+
cos, sin = torch.cos(r_rad), torch.sin(r_rad)
448+
x1, y1 = x, y
449+
x3 = x1 + w * cos
450+
y3 = y1 - w * sin
451+
x2 = x3 + h * sin
452+
y2 = y3 + h * cos
453+
x4 = x1 + h * sin
454+
y4 = y1 + h * cos
455+
parts = (x1, y1, x3, y3, x2, y2, x4, y4)
438456
else:
439457
raise ValueError(f"Format {format} is not supported")
440458

test/test_datasets.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -532,7 +532,8 @@ def inject_fake_data(self, tmpdir, config):
532532
self._create_bbox_txt(base_folder, num_images)
533533
self._create_landmarks_txt(base_folder, num_images)
534534

535-
return dict(num_examples=num_images_per_split[config["split"]], attr_names=attr_names)
535+
num_samples = num_images_per_split.get(config["split"], 0) if isinstance(config["split"], str) else 0
536+
return dict(num_examples=num_samples, attr_names=attr_names)
536537

537538
def _create_split_txt(self, root):
538539
num_images_per_split = dict(train=4, valid=3, test=2)
@@ -635,6 +636,28 @@ def test_transforms_v2_wrapper_spawn(self):
635636
with self.create_dataset(target_type=target_type, transform=v2.Resize(size=expected_size)) as (dataset, _):
636637
datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size)
637638

639+
def test_invalid_split_list(self):
640+
with pytest.raises(ValueError, match="Expected type str for argument split, but got type <class 'list'>."):
641+
with self.create_dataset(split=[1]):
642+
pass
643+
644+
def test_invalid_split_int(self):
645+
with pytest.raises(ValueError, match="Expected type str for argument split, but got type <class 'int'>."):
646+
with self.create_dataset(split=1):
647+
pass
648+
649+
def test_invalid_split_value(self):
650+
with pytest.raises(
651+
ValueError,
652+
match="Unknown value '{value}' for argument {arg}. Valid values are {{{valid_values}}}.".format(
653+
value="invalid",
654+
arg="split",
655+
valid_values=("train", "valid", "test", "all"),
656+
),
657+
):
658+
with self.create_dataset(split="invalid"):
659+
pass
660+
638661

639662
class VOCSegmentationTestCase(datasets_utils.ImageDatasetTestCase):
640663
DATASET_CLASS = datasets.VOCSegmentation

test/test_ops.py

Lines changed: 55 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1339,8 +1339,61 @@ def test_bbox_xywh_cxcywh(self):
13391339
box_xywh = ops.box_convert(box_cxcywh, in_fmt="cxcywh", out_fmt="xywh")
13401340
assert_equal(box_xywh, box_tensor)
13411341

1342-
@pytest.mark.parametrize("inv_infmt", ["xwyh", "cxwyh"])
1343-
@pytest.mark.parametrize("inv_outfmt", ["xwcx", "xhwcy"])
1342+
def test_bbox_xywhr_cxcywhr(self):
1343+
box_tensor = torch.tensor(
1344+
[
1345+
[0, 0, 100, 100, 0],
1346+
[0, 0, 0, 0, 0],
1347+
[10, 15, 20, 20, 0],
1348+
[23, 35, 70, 60, 0],
1349+
[4, 2, 4, 2, 0],
1350+
[5, 5, 4, 2, 90],
1351+
[8, 4, 4, 2, 180],
1352+
[7, 1, 4, 2, -90],
1353+
],
1354+
dtype=torch.float,
1355+
)
1356+
1357+
exp_cxcywhr = torch.tensor(
1358+
[
1359+
[50, 50, 100, 100, 0],
1360+
[0, 0, 0, 0, 0],
1361+
[20, 25, 20, 20, 0],
1362+
[58, 65, 70, 60, 0],
1363+
[6, 3, 4, 2, 0],
1364+
[6, 3, 4, 2, 90],
1365+
[6, 3, 4, 2, 180],
1366+
[6, 3, 4, 2, -90],
1367+
],
1368+
dtype=torch.float,
1369+
)
1370+
1371+
assert exp_cxcywhr.size() == torch.Size([8, 5])
1372+
box_cxcywhr = ops.box_convert(box_tensor, in_fmt="xywhr", out_fmt="cxcywhr")
1373+
torch.testing.assert_close(box_cxcywhr, exp_cxcywhr)
1374+
1375+
# Reverse conversion
1376+
box_xywhr = ops.box_convert(box_cxcywhr, in_fmt="cxcywhr", out_fmt="xywhr")
1377+
torch.testing.assert_close(box_xywhr, box_tensor)
1378+
1379+
def test_bbox_cxcywhr_to_xyxyxyxy(self):
1380+
box_tensor = torch.tensor([[5, 3, 4, 2, 90]], dtype=torch.float)
1381+
exp_xyxyxyxy = torch.tensor([[4, 5, 4, 1, 6, 1, 6, 5]], dtype=torch.float)
1382+
1383+
assert exp_xyxyxyxy.size() == torch.Size([1, 8])
1384+
box_xyxyxyxy = ops.box_convert(box_tensor, in_fmt="cxcywhr", out_fmt="xyxyxyxy")
1385+
torch.testing.assert_close(box_xyxyxyxy, exp_xyxyxyxy)
1386+
1387+
def test_bbox_xywhr_to_xyxyxyxy(self):
1388+
box_tensor = torch.tensor([[4, 5, 4, 2, 90]], dtype=torch.float)
1389+
exp_xyxyxyxy = torch.tensor([[4, 5, 4, 1, 6, 1, 6, 5]], dtype=torch.float)
1390+
1391+
assert exp_xyxyxyxy.size() == torch.Size([1, 8])
1392+
box_xyxyxyxy = ops.box_convert(box_tensor, in_fmt="xywhr", out_fmt="xyxyxyxy")
1393+
torch.testing.assert_close(box_xyxyxyxy, exp_xyxyxyxy)
1394+
1395+
@pytest.mark.parametrize("inv_infmt", ["xwyh", "cxwyh", "xwyhr", "cxwyhr", "xxxxyyyy"])
1396+
@pytest.mark.parametrize("inv_outfmt", ["xwcx", "xhwcy", "xwcxr", "xhwcyr", "xyxyxxyy"])
13441397
def test_bbox_invalid(self, inv_infmt, inv_outfmt):
13451398
box_tensor = torch.tensor(
13461399
[[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 20, 20], [23, 35, 70, 60]], dtype=torch.float

0 commit comments

Comments
 (0)