Skip to content

Commit 90a17c6

Browse files
authored
Merge pull request #6 from ori-drs/et-tro
Silvr 2.0
2 parents faa40dd + 71ca772 commit 90a17c6

39 files changed

+3468
-204
lines changed

.docker/.env

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,9 @@
11
UID=1000
2-
GID=1000
2+
GID=1000
3+
USERNAME=docker_dev # Set your username for the container
4+
5+
HOST_DATA_DIR=~/data # folder on the host machine which you can attach to the container
6+
7+
DOCKER_HOME_DIR=/home/${USERNAME} # Home directory inside the container
8+
DOCKER_WORK_DIR=${DOCKER_HOME_DIR}/silvr # Path to the main codebase inside the docker container
9+
DOCKER_DATA_DIR=${DOCKER_HOME_DIR}/data # folder inside the docker container of the attached folder HOST_DATA_DIR

.docker/Dockerfile

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -33,24 +33,23 @@ RUN python -m pip install --no-cache-dir git+https://github.com/NVlabs/tiny-cuda
3333

3434
ARG GID
3535
ARG UID
36-
ENV UNAME=docker_dev
37-
RUN addgroup --gid $GID $UNAME
38-
RUN adduser --disabled-password --gecos '' --uid $UID --gid $GID $UNAME
36+
ARG USERNAME
37+
RUN addgroup --gid $GID $USERNAME
38+
RUN adduser --disabled-password --gecos '' --uid $UID --gid $GID $USERNAME
3939

40-
ARG SILVR_DIR=/home/docker_dev/silvr
41-
WORKDIR ${SILVR_DIR}
40+
ARG DOCKER_HOME_DIR
41+
ARG DOCKER_WORK_DIR
42+
WORKDIR ${DOCKER_WORK_DIR}
4243

43-
COPY ./requirements.txt ${SILVR_DIR}/requirements.txt
44+
COPY ./requirements.txt ${DOCKER_WORK_DIR}/requirements.txt
4445
RUN pip install -r requirements.txt
45-
COPY ./silvr/ ${SILVR_DIR}/silvr
46-
COPY ./pyproject.toml ${SILVR_DIR}/pyproject.toml
47-
COPY ./scripts/ ${SILVR_DIR}/scripts
48-
COPY ./config/ ${SILVR_DIR}/config
46+
COPY ./silvr/ ${DOCKER_WORK_DIR}/silvr
47+
COPY ./pyproject.toml ${DOCKER_WORK_DIR}/pyproject.toml
4948
RUN pip install -e .
5049
# Make the outputs of the container match the host
5150

52-
RUN chown -R ${UID}:${GID} ${SILVR_DIR}/*
53-
USER ${UNAME}
51+
RUN chown -R ${UID}:${GID} ${DOCKER_HOME_DIR}/*
52+
USER ${USERNAME}
5453
RUN echo "PS1='${debian_chroot:+($debian_chroot)}\[\033[01;33m\]\u@docker-\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc
5554

5655
CMD ["/bin/bash"]

.docker/docker_compose.yaml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,15 @@ services:
77
args:
88
- UID=${UID}
99
- GID=${GID}
10+
- USERNAME=${USERNAME}
11+
- DOCKER_HOME_DIR=${DOCKER_HOME_DIR}
12+
- DOCKER_WORK_DIR=${DOCKER_WORK_DIR}
1013
environment:
1114
- NVIDIA_VISIBLE_DEVICES=all
1215
- NVIDIA_DRIVER_CAPABILITIES=all
1316
runtime: nvidia
1417
network_mode: "host"
1518
tty: true
16-
# volumes:
17-
# - /home/yifu/data:/home/docker_dev/data # mount the data folder
18-
# - /home/yifu/workspace/silvr_public:/home/docker_dev/silvr # mount silvr folder to access the outputs and update the code
19-
# - /home/yifu/.netrc:/home/docker_dev/.netrc # for wandb authentication
20-
19+
volumes:
20+
- ${HOST_DATA_DIR}:${DOCKER_DATA_DIR}
2121

README.md

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ Build a docker image to install dependencies and run SiLVR.
1616
```
1717
docker compose -f .docker/docker_compose.yaml run --build silvr
1818
```
19+
Note: You can use your own `CUDA_ARCHITECTURES` in the Dockerfile to make the `tinycudann` build quicker.
1920
### Manual Installation
2021
You can also install SiLVR to your system manually.
2122
```
@@ -33,6 +34,15 @@ pip install -e .
3334

3435

3536
## Running
37+
### T-RO 25 Results
38+
```
39+
python scripts/data_downloader.py
40+
41+
python scripts/main.py --config config/2024-03-13-roq-01-unc.yaml
42+
43+
python scripts/main.py --config config/2024-bodleian-01+02-unc.yaml
44+
```
45+
### ICRA 24 Results
3646
Download sample data from [Hugging face](https://huggingface.co/datasets/ori-drs/silvr_data/tree/main), setup the [config file](./scripts/config_train.yaml), and then run the training script.
3747
```bash
3848
python scripts/data_downloader.py
@@ -54,10 +64,10 @@ pre-commit install
5464
## Citation
5565
If you found this software package useful, please consider citing our paper as
5666
```bibtex
57-
@inproceedings{tao2024silvr,
58-
title = {SiLVR: Scalable Lidar-Visual Reconstruction with Neural Radiance Fields for Robotic Inspection},
59-
author = {Tao, Yifu and Bhalgat, Yash and Fu, Lanke Frank Tarimo and Mattamala, Matias and Chebrolu, Nived and Fallon, Maurice},
60-
booktitle = {IEEE International Conference on Robotics and Automation (ICRA)},
61-
year = {2024},
67+
@article{tao2025silvr,
68+
title={SiLVR: Scalable Lidar-Visual Radiance Field Reconstruction with Uncertainty Quantification},
69+
author={Tao, Yifu and Fallon, Maurice},
70+
journal={IEEE Transactions on Robotics},
71+
year={2025},
6272
}
6373
```

config/2024-03-13-roq-01-unc.yaml

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
base:
2+
method: "bayes-lidar-normal-nerfacto"
3+
data: "/home/docker_dev/silvr/data/2024-03-13-roq-01-tro/transforms_colmap_scaled_lidar.json" # This will be skipped if run_submap is true
4+
vis: "wandb"
5+
max_train_images: 500
6+
max_eval_images: 30
7+
cam_optimiser_mode: "off"
8+
cam_opt_lf: 0.0006
9+
max_num_iterations: 30001
10+
steps_per_eval_image: 2000
11+
steps_per_eval_all_images: 70000
12+
output_dir: "/home/docker_dev/silvr/outputs"
13+
14+
lidar_nerf:
15+
lidar_depth_loss_type: "DS_NERF_NEW"
16+
is_euclidean_depth: false
17+
depth_loss_mult: 10
18+
normal_loss_mult: 0.001
19+
depth_sigma: 0.001
20+
should_decay_sigma: true
21+
starting_depth_sigma: 0.01
22+
sigma_decay_rate: 0.99985
23+
save_pcd: false
24+
25+
post_process:
26+
save_folder_name: "post_process"
27+
compute_nvs_metrics: false
28+
compute_uncertainty: true
29+
unc_iterations: 1000
30+
unc_grid_lod: 8
31+
render_uncertainty: true
32+
render_max_uncertainty_point_legacy: 1
33+
render_downscale: 1
34+
render_cam_path_with_submap: false
35+
camera_path_file: ""
36+
camera_path_model_folder_path: ""
37+
export_cloud: true
38+
export_cloud_suffix: ""
39+
cloud_max_uncertainty_ray: 1
40+
export_num_points: 1000000
41+
evaluate_cloud: false
42+
ground_truth_3d_map_path: "/home/docker_dev/silvr/data/2024-03-13-roq-01/gt_5cm_lidar_frame.pcd"
43+
T_gt_nerf_path: ""
44+
45+
only_post_process:
46+
turn_on: false # if turned on, training will be skipped
47+
output_folders:
48+
# - "/home/yifu/workspace/silvr/outputs/hbac_maths_19/bayes-lidar-normal-nerfacto/2024-07-14_132335"
49+
50+
submap:
51+
run_submap: true
52+
data_main_folder: "/home/docker_dev/silvr/data/2024-03-13-roq-01-tro"
53+
submap_folder: "/home/docker_dev/silvr/data/2024-03-13-roq-01-tro/submaps_visibility"
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
base:
2+
method: "bayes-lidar-normal-nerfacto"
3+
data: "/home/docker_dev/silvr/data/2024-bodleian-01+02-tro/transforms_colmap_scaled_lidar_no_cam2_bod_1.json" # This will be skipped if run_submap is true
4+
vis: "wandb"
5+
max_train_images: 500
6+
max_eval_images: 30
7+
cam_optimiser_mode: "off"
8+
cam_opt_lf: 0.0006
9+
max_num_iterations: 30001
10+
steps_per_eval_image: 2000
11+
steps_per_eval_all_images: 70000
12+
output_dir: "/home/docker_dev/silvr/outputs"
13+
14+
lidar_nerf:
15+
lidar_depth_loss_type: "DS_NERF_NEW"
16+
is_euclidean_depth: false
17+
depth_loss_mult: 10
18+
normal_loss_mult: 0.001
19+
depth_sigma: 0.001
20+
should_decay_sigma: true
21+
starting_depth_sigma: 0.01
22+
sigma_decay_rate: 0.99985
23+
save_pcd: false
24+
25+
post_process:
26+
save_folder_name: "post_process"
27+
compute_nvs_metrics: false
28+
compute_uncertainty: true
29+
unc_iterations: 2000
30+
unc_grid_lod: 8
31+
render_uncertainty: true
32+
render_max_uncertainty_point_legacy: 1
33+
render_downscale: 1
34+
render_cam_path_with_submap: false
35+
camera_path_file: ""
36+
camera_path_model_folder_path: ""
37+
export_cloud: true
38+
export_cloud_suffix: ""
39+
cloud_max_uncertainty_ray: 1
40+
export_num_points: 1000000
41+
evaluate_cloud: false
42+
ground_truth_3d_map_path: ""
43+
T_gt_nerf_path: ""
44+
45+
only_post_process:
46+
turn_on: false # if turned on, training will be skipped
47+
output_folders:
48+
# - "/home/yifu/workspace/silvr/outputs/hbac_maths_19/bayes-lidar-normal-nerfacto/2024-07-14_132335"
49+
50+
submap:
51+
run_submap: true
52+
data_main_folder: "/home/docker_dev/silvr/data/2024-bodleian-01+02-tro"
53+
submap_folder: "/home/docker_dev/silvr/data/2024-bodleian-01+02-tro/submaps_visibility"

pyproject.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,11 @@ dependencies = {file = ["requirements.txt"]}
1818
# ndp = "silvr.test:main"
1919

2020
[project.entry-points.'nerfstudio.method_configs']
21-
lidar_nerfacto = 'silvr.silvr_config:Lidar_nerfacto'
2221
lidar_depth_nerfacto = 'silvr.silvr_config:Lidar_depth_nerfacto'
22+
lidar_normal_nerfacto = 'silvr.silvr_config:Lidar_normal_nerfacto'
23+
bayes_nerfacto = 'silvr.silvr_config:Bayes_nerfacto'
24+
bayes_lidar_normal_nerfacto = 'silvr.silvr_config:Bayes_Lidar_normal_nerfacto'
25+
bayes_lidar_normal_nerfacto_big = 'silvr.silvr_config:Bayes_Lidar_normal_nerfacto_big'
2326

2427
[project.optional-dependencies]
2528
style = [

requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,7 @@ gsplat==0.1.12
44
pre-commit==3.0.0
55
ruff==0.11.4
66
tyro==0.6.6
7+
pytest==8.3.3
78
wandb==0.13.3
89
huggingface_hub==0.30.2
10+
hf_xet==1.2.0

scripts/cloud_cropping.py

Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
from pathlib import Path
2+
from typing import Optional
3+
4+
import numpy as np
5+
import open3d as o3d
6+
7+
from silvr.utils.eval import compute_p2p_distance, get_recon_metrics, save_error_cloud
8+
9+
10+
class EvalCloudCropper:
11+
def __init__(
12+
self,
13+
input_cloud: np.ndarray,
14+
gt_cloud: np.ndarray,
15+
output_folder: str,
16+
input_crop_voxel_grid: Optional[o3d.geometry.VoxelGrid] = None,
17+
gt_crop_voxel_grid: Optional[o3d.geometry.VoxelGrid] = None,
18+
voxel_size: float = 0.5,
19+
):
20+
assert input_cloud.shape[1] == 3 and gt_cloud.shape[1] == 3 # N x 3
21+
self.input_cloud = input_cloud
22+
self.gt_cloud = gt_cloud
23+
self.output_folder = Path(output_folder)
24+
self.output_folder.mkdir(parents=True, exist_ok=True)
25+
self.voxel_size = voxel_size
26+
27+
def compute_crop_cloud_proposal(self, max_error: float = 1.0):
28+
lidar_error_cloud = self.compute_high_error_cloud(self.input_cloud, self.gt_cloud, max_error=max_error)
29+
lidar_error_cloud.paint_uniform_color([1, 0, 0])
30+
o3d.io.write_point_cloud(str(self.output_folder / "lidar_error_cloud.pcd"), lidar_error_cloud)
31+
gt_error_cloud = self.compute_high_error_cloud(self.gt_cloud, self.input_cloud, max_error=max_error)
32+
gt_error_cloud.paint_uniform_color([1, 0, 0])
33+
o3d.io.write_point_cloud(str(self.output_folder / "gt_error_cloud.pcd"), gt_error_cloud)
34+
return lidar_error_cloud, gt_error_cloud
35+
36+
def compute_high_error_cloud(self, input_cloud: np.ndarray, gt_cloud: np.ndarray, max_error: float = 1.0):
37+
input_error_map = compute_p2p_distance(input_cloud, gt_cloud)
38+
input_error_mask = input_error_map > max_error
39+
input_error_cloud = input_cloud[input_error_mask]
40+
input_error_cloud_o3d = o3d.geometry.PointCloud()
41+
input_error_cloud_o3d.points = o3d.utility.Vector3dVector(input_error_cloud)
42+
# lidar_error_1m_o3d.remove_statistical_outlier(nb_neighbors=20, std_ratio=2.0)
43+
return input_error_cloud_o3d
44+
45+
def compute_crop_voxel_grid(self, cloud: np.ndarray, voxel_size: float = 0.5):
46+
if isinstance(cloud, np.ndarray):
47+
cloud = o3d.geometry.PointCloud(o3d.utility.Vector3dVector(cloud))
48+
assert isinstance(cloud, o3d.geometry.PointCloud)
49+
voxel_grid = o3d.geometry.VoxelGrid.create_from_point_cloud(cloud, voxel_size)
50+
return voxel_grid
51+
52+
def crop_cloud_by_voxel_grid(self, cloud: np.ndarray, voxel_grid: o3d.geometry.VoxelGrid):
53+
def points_in_voxels(points, voxel_grid):
54+
queries = o3d.utility.Vector3dVector(points)
55+
return voxel_grid.check_if_included(queries)
56+
57+
if isinstance(cloud, np.ndarray):
58+
cloud = o3d.geometry.PointCloud(o3d.utility.Vector3dVector(cloud))
59+
assert isinstance(cloud, o3d.geometry.PointCloud)
60+
points_to_crop = points_in_voxels(cloud.points, voxel_grid)
61+
points_to_keep = [not x for x in points_to_crop]
62+
remaining_cloud = cloud.select_by_index(np.where(points_to_keep)[0])
63+
return remaining_cloud
64+
65+
66+
def convert_voxel_grid_to_point_cloud(voxel_grid):
67+
points = []
68+
for voxel in voxel_grid.get_voxels():
69+
voxel_centre = voxel_grid.get_voxel_center_coordinate(voxel.grid_index)
70+
points.append(voxel_centre)
71+
point_cloud = o3d.geometry.PointCloud()
72+
point_cloud.points = o3d.utility.Vector3dVector(points)
73+
return point_cloud
74+
75+
76+
if __name__ == "__main__":
77+
folder_path = Path("/home/yifu/workspace/T-RO_2025/Observatory_quater")
78+
lidar_map_path = folder_path / "ROQ_lidar_map_cleaned.pcd"
79+
leica_map_path = folder_path / "roq_5cm.pcd"
80+
81+
T_leica_lidar = np.loadtxt(str(folder_path / "T_RTC_vilens.txt"))
82+
83+
lidar_map = o3d.io.read_point_cloud(str(lidar_map_path))
84+
# lidar_map.remove_statistical_outlier(nb_neighbors=20, std_ratio=2.0)
85+
lidar_map.transform(T_leica_lidar)
86+
lidar_map = np.array(lidar_map.points)
87+
leica_map = o3d.io.read_point_cloud(str(leica_map_path))
88+
leica_map = np.array(leica_map.points)
89+
90+
output_folder_path = folder_path / "test"
91+
eval_cloud_cropper = EvalCloudCropper(lidar_map, leica_map, output_folder_path)
92+
create_proposed_error_cloud = True
93+
crop_cloud = True
94+
evaluation = True
95+
96+
lidar_error_cloud_path = output_folder_path / "lidar_error_cloud.pcd"
97+
gt_error_cloud_path = output_folder_path / "gt_error_cloud.pcd"
98+
cropped_lidar_cloud_save_path = output_folder_path / "lidar_cropped.pcd"
99+
cropped_gt_cloud_save_path = output_folder_path / "gt_cropped.pcd"
100+
101+
if create_proposed_error_cloud:
102+
lidar_error_cloud, gt_error_cloud = eval_cloud_cropper.compute_crop_cloud_proposal(max_error=0.2)
103+
o3d.io.write_point_cloud(str(lidar_error_cloud_path), lidar_error_cloud)
104+
o3d.io.write_point_cloud(str(gt_error_cloud_path), gt_error_cloud)
105+
106+
# load error cloud and convert to voxel grid, then crop the map
107+
if crop_cloud:
108+
lidar_error_cloud = o3d.io.read_point_cloud(str(lidar_error_cloud_path))
109+
lidar_crop_voxel_grid = eval_cloud_cropper.compute_crop_voxel_grid(lidar_error_cloud)
110+
lidar_cropped_cloud = eval_cloud_cropper.crop_cloud_by_voxel_grid(lidar_map, lidar_crop_voxel_grid)
111+
o3d.io.write_point_cloud(str(cropped_lidar_cloud_save_path), lidar_cropped_cloud)
112+
113+
gt_error_cloud = o3d.io.read_point_cloud(str(gt_error_cloud_path))
114+
gt_crop_voxel_grid = eval_cloud_cropper.compute_crop_voxel_grid(gt_error_cloud)
115+
gt_cropped_cloud = eval_cloud_cropper.crop_cloud_by_voxel_grid(leica_map, gt_crop_voxel_grid)
116+
o3d.io.write_point_cloud(str(cropped_gt_cloud_save_path), gt_cropped_cloud)
117+
118+
# apply cropping
119+
if evaluation:
120+
lidar_cropped_cloud_np = np.array(o3d.io.read_point_cloud(str(cropped_lidar_cloud_save_path)).points)
121+
gt_cropped_cloud_np = np.array(o3d.io.read_point_cloud(str(cropped_gt_cloud_save_path)).points)
122+
print(get_recon_metrics(lidar_cropped_cloud_np, gt_cropped_cloud_np))
123+
lidar_error_cmap_cloud_save_path = output_folder_path / "lidar_error_cmap_cloud.ply"
124+
save_error_cloud(lidar_cropped_cloud_np, gt_cropped_cloud_np, lidar_error_cmap_cloud_save_path)
125+
gt_error_cmap_cloud_save_path = output_folder_path / "gt_error_cmap_cloud.ply"
126+
save_error_cloud(gt_cropped_cloud_np, lidar_cropped_cloud_np, gt_error_cmap_cloud_save_path)

0 commit comments

Comments
 (0)