|
1 | 1 | from time import perf_counter |
2 | | -import torch |
| 2 | + |
3 | 3 | import pypose as pp |
| 4 | +import torch |
| 5 | +import torch.nn as nn |
4 | 6 |
|
5 | | -from ba_helpers import Reproj, least_square_error |
6 | | -from datapipes.bal_loader import get_problem, read_bal_data |
7 | | -from bae.sparse.py_ops import * |
| 7 | +from datapipes.bal_loader import get_problem |
| 8 | +from bae.autograd.function import TrackingTensor, map_transform |
8 | 9 | from bae.optim import LM |
9 | | -from bae.utils.pysolvers import PCG, CuDSS |
| 10 | +from bae.utils.ba import rotate_quat |
| 11 | +from bae.utils.pysolvers import PCG |
10 | 12 |
|
11 | | -# TARGET_DATASET = "ladybug" |
12 | | -# TARGET_PROBLEM = "problem-1723-156502-pre" |
13 | | -# TARGET_PROBLEM = "problem-49-7776-pre" |
14 | | -# TARGET_PROBLEM = "problem-1695-155710-pre" |
15 | | -# TARGET_PROBLEM = "problem-969-105826-pre" |
16 | 13 | TARGET_DATASET = "trafalgar" |
17 | 14 | TARGET_PROBLEM = "problem-257-65132-pre" |
| 15 | +# other options: |
| 16 | +# TARGET_DATASET = "ladybug" |
| 17 | +# TARGET_PROBLEM = "problem-1723-156502-pre" |
18 | 18 | # TARGET_DATASET = "dubrovnik" |
19 | 19 | # TARGET_PROBLEM = "problem-356-226730-pre" |
20 | 20 |
|
21 | | - |
22 | | - |
23 | | -DEVICE = 'cuda' |
| 21 | +DEVICE = "cuda" |
24 | 22 | OPTIMIZE_INTRINSICS = True |
25 | | - |
26 | | -USE_QUATERNIONS = True |
27 | | - |
28 | | -file_name = f'{TARGET_DATASET}.{TARGET_PROBLEM}' |
29 | | -dataset = get_problem(TARGET_PROBLEM, TARGET_DATASET, use_quat=USE_QUATERNIONS) |
30 | | - |
31 | | -if OPTIMIZE_INTRINSICS: |
32 | | - NUM_CAMERA_PARAMS = 10 if USE_QUATERNIONS else 9 |
33 | | -else: |
34 | | - NUM_CAMERA_PARAMS = 7 if USE_QUATERNIONS else 6 |
35 | | - |
36 | | -print(f'Fetched {TARGET_PROBLEM} from {TARGET_DATASET}') |
37 | | - |
38 | | -trimmed_dataset = dataset |
39 | | -trimmed_dataset = {k: v.to(DEVICE) for k, v in trimmed_dataset.items() if type(v) == torch.Tensor} |
40 | | - |
41 | | -input = { |
42 | | - "points_2d": trimmed_dataset['points_2d'], |
43 | | - "camera_indices": trimmed_dataset['camera_index_of_observations'], |
44 | | - "point_indices": trimmed_dataset['point_index_of_observations'] |
45 | | -} |
46 | | - |
47 | | -model = Reproj( |
48 | | - trimmed_dataset['camera_params'][:, :NUM_CAMERA_PARAMS].clone(), |
49 | | - trimmed_dataset['points_3d'].clone() |
50 | | -).to(DEVICE) |
51 | | -strategy = pp.optim.strategy.TrustRegion(up=2.0, down=0.5**4) |
52 | | -solver = PCG(tol=1e-4, maxiter=250) # or CuDSS() |
53 | | -optimizer = LM(model, strategy=strategy, solver=solver, reject=30) |
54 | | - |
55 | | -print('Loss:', least_square_error( |
56 | | - model.pose, |
57 | | - model.points_3d, |
58 | | - trimmed_dataset['camera_index_of_observations'], |
59 | | - trimmed_dataset['point_index_of_observations'], |
60 | | - trimmed_dataset['points_2d'], |
61 | | -).item()) |
62 | | - |
63 | | -print("Initial loss", optimizer.model.loss(input, None).item()) |
64 | | - |
65 | | -start = perf_counter() |
66 | | -for idx in range(20): |
67 | | - loss = optimizer.step(input) |
68 | | - print('Iteration', idx, 'loss', loss.item(), 'time', perf_counter() - start) |
69 | | - |
70 | | -torch.cuda.synchronize() |
71 | | -end = perf_counter() |
72 | | -print('Time', end - start) |
73 | | - |
74 | | -print('Ending loss:', least_square_error( |
75 | | - model.pose, |
76 | | - model.points_3d, |
77 | | - trimmed_dataset['camera_index_of_observations'], |
78 | | - trimmed_dataset['point_index_of_observations'], |
79 | | - trimmed_dataset['points_2d'], |
80 | | -).item()) |
| 23 | +NUM_CAMERA_PARAMS = 10 if OPTIMIZE_INTRINSICS else 7 |
| 24 | + |
| 25 | + |
| 26 | +@map_transform |
| 27 | +def project(points, camera_params): |
| 28 | + projection = rotate_quat(points, camera_params[..., :7]) |
| 29 | + projection = -projection[..., :2] / projection[..., [2]] |
| 30 | + |
| 31 | + f = camera_params[..., [-3]] |
| 32 | + k1 = camera_params[..., [-2]] |
| 33 | + k2 = camera_params[..., [-1]] |
| 34 | + |
| 35 | + n = torch.sum(projection**2, axis=-1, keepdim=True) |
| 36 | + r = 1 + k1 * n + k2 * n**2 |
| 37 | + return projection * r * f |
| 38 | + |
| 39 | + |
| 40 | +class Residual(nn.Module): |
| 41 | + def __init__(self, camera_params, points): |
| 42 | + super().__init__() |
| 43 | + self.pose = nn.Parameter(TrackingTensor(camera_params)) |
| 44 | + self.points = nn.Parameter(TrackingTensor(points)) |
| 45 | + self.pose.trim_SE3_grad = True |
| 46 | + |
| 47 | + def forward(self, observes, cidx, pidx): |
| 48 | + points_proj = project(self.points[pidx], self.pose[cidx]) |
| 49 | + return points_proj - observes |
| 50 | + |
| 51 | + |
| 52 | +def least_square_error(camera_params, points, cidx, pidx, observes): |
| 53 | + model = Residual(camera_params, points) |
| 54 | + loss = model(observes, cidx, pidx) |
| 55 | + return torch.sum(loss**2, dim=-1).mean() |
| 56 | + |
| 57 | + |
| 58 | +def main(): |
| 59 | + dataset = get_problem(TARGET_PROBLEM, TARGET_DATASET) |
| 60 | + print(f"Fetched {TARGET_PROBLEM} from {TARGET_DATASET}") |
| 61 | + |
| 62 | + dataset = { |
| 63 | + key: value.to(DEVICE) |
| 64 | + for key, value in dataset.items() |
| 65 | + if isinstance(value, torch.Tensor) |
| 66 | + } |
| 67 | + input = { |
| 68 | + "observes": dataset["points_2d"], |
| 69 | + "cidx": dataset["camera_index_of_observations"], |
| 70 | + "pidx": dataset["point_index_of_observations"], |
| 71 | + } |
| 72 | + |
| 73 | + model = Residual( |
| 74 | + dataset["camera_params"][:, :NUM_CAMERA_PARAMS].clone(), |
| 75 | + dataset["points_3d"].clone(), |
| 76 | + ).to(DEVICE) |
| 77 | + strategy = pp.optim.strategy.TrustRegion(up=2.0, down=0.5**4) |
| 78 | + solver = PCG(tol=1e-4, maxiter=250) |
| 79 | + optimizer = LM(model, strategy=strategy, solver=solver, reject=30) |
| 80 | + |
| 81 | + print('Loss:', least_square_error( |
| 82 | + model.pose, |
| 83 | + model.points, |
| 84 | + dataset["camera_index_of_observations"], |
| 85 | + dataset["point_index_of_observations"], |
| 86 | + dataset["points_2d"], |
| 87 | + ).item()) |
| 88 | + |
| 89 | + print("Initial loss", optimizer.model.loss(input, None).item()) |
| 90 | + |
| 91 | + start = perf_counter() |
| 92 | + for idx in range(20): |
| 93 | + loss = optimizer.step(input) |
| 94 | + print("Iteration", idx, "loss", loss.item(), "time", perf_counter() - start) |
| 95 | + |
| 96 | + torch.cuda.synchronize() |
| 97 | + end = perf_counter() |
| 98 | + print("Time", end - start) |
| 99 | + |
| 100 | + print('Ending loss:', least_square_error( |
| 101 | + model.pose, |
| 102 | + model.points, |
| 103 | + dataset["camera_index_of_observations"], |
| 104 | + dataset["point_index_of_observations"], |
| 105 | + dataset["points_2d"], |
| 106 | + ).item()) |
| 107 | + |
| 108 | + |
| 109 | +if __name__ == "__main__": |
| 110 | + main() |
0 commit comments