|
| 1 | +import numpy as np |
1 | 2 | import os |
2 | 3 | import sys |
3 | 4 | import torch |
4 | 5 | import unittest |
5 | 6 |
|
6 | 7 | from torch.autograd import gradcheck |
7 | 8 |
|
| 9 | +from . import run_if_cuda |
| 10 | + |
| 11 | + |
8 | 12 | ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") |
9 | 13 | sys.path.insert(0, ROOT) |
10 | 14 |
|
11 | | -from torch_points_kernels import ChamferFunction |
| 15 | +from torch_points_kernels import ChamferFunction, chamfer_dist |
12 | 16 |
|
13 | 17 |
|
14 | 18 | class TestChamferDistance(unittest.TestCase): |
15 | | - def test_chamfer_dist(self): |
| 19 | + @run_if_cuda |
| 20 | + def test_chamfer_dist_grad(self): |
16 | 21 | x = torch.rand(4, 64, 3).double() |
17 | 22 | y = torch.rand(4, 128, 3).double() |
18 | 23 | x.requires_grad = True |
19 | 24 | y.requires_grad = True |
20 | 25 | test = gradcheck(ChamferFunction.apply, [x.cuda(), y.cuda()]) |
21 | 26 |
|
| 27 | + @run_if_cuda |
| 28 | + def test_chamfer_dist(self): |
| 29 | + xyz1 = torch.from_numpy(np.array([[ |
| 30 | + [0, 0, 0], |
| 31 | + [1, 1, 1], |
| 32 | + [2, 0, 1] |
| 33 | + ]])).float() |
| 34 | + xyz2 = torch.from_numpy(np.array([[[1, 0, 0], [1, 2, 1]]])).float() |
| 35 | + dist = chamfer_dist(xyz1.cuda(), xyz2.cuda()) |
| 36 | + self.assertAlmostEqual(dist.item(), 2.333333, places=5) |
| 37 | + |
| 38 | + @run_if_cuda |
| 39 | + def test_chamfer_dist_ignore_zeros(self): |
| 40 | + xyz1 = torch.from_numpy(np.array([[ |
| 41 | + [0, 0, 0], |
| 42 | + [1, 1, 1], |
| 43 | + [2, 0, 1] |
| 44 | + ]])).float() |
| 45 | + xyz2 = torch.from_numpy(np.array([[[1, 0, 0], [1, 2, 1]]])).float() |
| 46 | + dist = chamfer_dist(xyz1.cuda(), xyz2.cuda(), True) |
| 47 | + self.assertAlmostEqual(dist.item(), 3.0, places=5) |
| 48 | + |
22 | 49 |
|
23 | | -if __name__ == '__main__': |
| 50 | +if __name__ == "__main__": |
24 | 51 | unittest.main() |
0 commit comments