Skip to content

Commit 2362099

Browse files
committed
misc: update scripts for benchmark
1 parent 63db6f6 commit 2362099

File tree

8 files changed

+366
-1
lines changed

8 files changed

+366
-1
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,6 @@ examples/benchmarks/viewer
147147
examples/readme_on_STG.md
148148
examples/benchmarks/stg/old
149149

150-
examples/benchmarks/mpeg
150+
# examples/benchmarks/mpeg
151151

152152
*.zip
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
# ----------------- Training Setting-------------- #
2+
SCENE_DIR="data/GSC"
3+
SCENE_LIST="Bartender"
4+
5+
# 0.16M GSs
6+
RESULT_DIR="results/mpeg_basic"
7+
CAP_MAX=160000
8+
9+
RD_LAMBDA=0.01
10+
11+
# ----------------- Training Setting-------------- #
12+
13+
# ----------------- Args ------------------------- #
14+
15+
if [ ! -z "$1" ]; then
16+
RD_LAMBDA="$1"
17+
RESULT_DIR="results/Ours_TT_rd_lambda_${RD_LAMBDA}"
18+
fi
19+
20+
# ----------------- Args ------------------------- #
21+
22+
# ----------------- Main Job --------------------- #
23+
run_single_scene() {
24+
local GPU_ID=$1
25+
local SCENE=$2
26+
27+
echo "Running $SCENE on GPU: $GPU_ID"
28+
29+
# train without eval
30+
CUDA_VISIBLE_DEVICES=$GPU_ID python simple_trainer.py default --eval_steps -1 --disable_viewer --data_factor 1 \
31+
--scene_type GSC \
32+
--test_view_id 8 10 12 \
33+
--data_dir $SCENE_DIR/$SCENE/colmap/colmap_50 \
34+
--result_dir $RESULT_DIR/$SCENE/ \
35+
--lpips_net vgg \
36+
--compression png
37+
38+
39+
# eval: use vgg for lpips to align with other benchmarks
40+
CUDA_VISIBLE_DEVICES=$GPU_ID python simple_trainer.py default --disable_viewer --data_factor 1 \
41+
--scene_type GSC \
42+
--test_view_id 8 10 12 \
43+
--data_dir $SCENE_DIR/$SCENE/colmap/colmap_50 \
44+
--result_dir $RESULT_DIR/$SCENE/ \
45+
--lpips_net vgg \
46+
--ckpt $RESULT_DIR/$SCENE/ckpts/ckpt_29999_rank0.pt \
47+
--compression png
48+
49+
}
50+
# ----------------- Main Job --------------------- #
51+
52+
53+
54+
# ----------------- Experiment Loop -------------- #
55+
GPU_LIST=(1)
56+
GPU_COUNT=${#GPU_LIST[@]}
57+
58+
SCENE_IDX=-1
59+
60+
for SCENE in $SCENE_LIST;
61+
do
62+
SCENE_IDX=$((SCENE_IDX + 1))
63+
{
64+
run_single_scene ${GPU_LIST[$SCENE_IDX]} $SCENE
65+
} #&
66+
67+
done
68+
69+
# ----------------- Experiment Loop -------------- #
70+
71+
# Wait for finishing the jobs across all scenes
72+
wait
73+
echo "All scenes finished."
74+
75+
# Zip the compressed files and summarize the stats
76+
if command -v zip &> /dev/null
77+
then
78+
echo "Zipping results"
79+
python benchmarks/compression/summarize_stats.py --results_dir $RESULT_DIR --scenes $SCENE_LIST
80+
else
81+
echo "zip command not found, skipping zipping"
82+
fi
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
# ----------------- Training Setting-------------- #
2+
SCENE_DIR="data/GSC"
3+
SCENE_LIST="Bartender"
4+
5+
# 0.16M GSs
6+
RESULT_DIR="results/mpeg_gscodec"
7+
CAP_MAX=160000
8+
9+
RD_LAMBDA=0.01
10+
11+
# ----------------- Training Setting-------------- #
12+
13+
# ----------------- Args ------------------------- #
14+
15+
if [ ! -z "$1" ]; then
16+
RD_LAMBDA="$1"
17+
RESULT_DIR="results/Ours_TT_rd_lambda_${RD_LAMBDA}"
18+
fi
19+
20+
# ----------------- Args ------------------------- #
21+
22+
# ----------------- Main Job --------------------- #
23+
run_single_scene() {
24+
local GPU_ID=$1
25+
local SCENE=$2
26+
27+
echo "Running $SCENE on GPU: $GPU_ID"
28+
29+
# train without eval
30+
CUDA_VISIBLE_DEVICES=$GPU_ID python simple_trainer.py mcmc --eval_steps -1 --disable_viewer --data_factor 1 \
31+
--strategy.cap-max $CAP_MAX \
32+
--scene_type GSC \
33+
--test_view_id 8 10 12 \
34+
--data_dir $SCENE_DIR/$SCENE/colmap/colmap_50 \
35+
--result_dir $RESULT_DIR/$SCENE/ \
36+
--lpips_net vgg \
37+
--compression_sim \
38+
--entropy_model_opt \
39+
--rd_lambda $RD_LAMBDA \
40+
--shN_ada_mask_opt \
41+
--compression png
42+
43+
44+
# eval: use vgg for lpips to align with other benchmarks
45+
CUDA_VISIBLE_DEVICES=$GPU_ID python simple_trainer.py mcmc --disable_viewer --data_factor 1 \
46+
--strategy.cap-max $CAP_MAX \
47+
--scene_type GSC \
48+
--test_view_id 8 10 12 \
49+
--data_dir $SCENE_DIR/$SCENE/colmap/colmap_50 \
50+
--result_dir $RESULT_DIR/$SCENE/ \
51+
--lpips_net vgg \
52+
--ckpt $RESULT_DIR/$SCENE/ckpts/ckpt_29999_rank0.pt \
53+
--compression png
54+
55+
}
56+
# ----------------- Main Job --------------------- #
57+
58+
59+
60+
# ----------------- Experiment Loop -------------- #
61+
GPU_LIST=(1)
62+
GPU_COUNT=${#GPU_LIST[@]}
63+
64+
SCENE_IDX=-1
65+
66+
for SCENE in $SCENE_LIST;
67+
do
68+
SCENE_IDX=$((SCENE_IDX + 1))
69+
{
70+
run_single_scene ${GPU_LIST[$SCENE_IDX]} $SCENE
71+
} #&
72+
73+
done
74+
75+
# ----------------- Experiment Loop -------------- #
76+
77+
# Wait for finishing the jobs across all scenes
78+
wait
79+
echo "All scenes finished."
80+
81+
# Zip the compressed files and summarize the stats
82+
if command -v zip &> /dev/null
83+
then
84+
echo "Zipping results"
85+
python benchmarks/compression/summarize_stats.py --results_dir $RESULT_DIR --scenes $SCENE_LIST
86+
else
87+
echo "zip command not found, skipping zipping"
88+
fi
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
SCENE_DIR=data/GSC
2+
SCENE_LIST="Bartender"
3+
4+
BASE_DIR="data/GSC/Splats/zju_internal/gsc_benchmark_data" # /bartender_colmap/colmap_50
5+
6+
7+
PLY_TYPE_LIST="basic_gsplat gscodec_studio"
8+
9+
RESULT_DIR=results/gsc_compression_benchmark
10+
11+
for PLY_TYPE in $PLY_TYPE_LIST;
12+
do
13+
# echo "Running evaluation and compression on $PLY_TYPE"
14+
CUDA_VISIBLE_DEVICES=0 python ply_loader_renderer.py \
15+
png_compression \
16+
--disable_viewer --data_factor 1 \
17+
--scene_type GSC \
18+
--test_view_id 8 10 12 \
19+
--data_dir $BASE_DIR/bartender_colmap/colmap_50 \
20+
--result_dir $RESULT_DIR/$PLY_TYPE \
21+
--lpips_net vgg \
22+
--ply_path $BASE_DIR/$PLY_TYPE/splats.ply
23+
24+
echo "================"
25+
echo "R-D Results"
26+
zip -q -r $RESULT_DIR/$PLY_TYPE/compression.zip $RESULT_DIR/$PLY_TYPE/compression/
27+
du -b $RESULT_DIR/$PLY_TYPE/compression.zip | awk '{printf "%.2f MB\n", $1/1024/1024}'
28+
echo
29+
cat $RESULT_DIR/$PLY_TYPE/stats/val_step-001.json
30+
echo
31+
cat $RESULT_DIR/$PLY_TYPE/stats/compress_step-001.json
32+
33+
done
34+
# Zip the compressed files and summarize the stats
35+
# if command -v zip &> /dev/null
36+
# then
37+
# echo "Zipping results"
38+
# python benchmarks/compression/summarize_stats.py --results_dir $RESULT_DIR --scenes $SCENE_LIST
39+
# else
40+
# echo "zip command not found, skipping zipping"
41+
# fi
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
SCENE_DIR=data/GSC
2+
SCENE_LIST="Bartender"
3+
4+
BASE_DIR="data/GSC/Splats/zju_internal/gsc_benchmark_data" # /bartender_colmap/colmap_50
5+
6+
PLY_TYPE_LIST="basic_gsplat gscodec_studio"
7+
8+
RP_LIST="0 1 2 3"
9+
10+
RESULT_DIR=results/gsc_compression_benchmark
11+
12+
for PLY_TYPE in $PLY_TYPE_LIST;
13+
do
14+
for RP in $RP_LIST;
15+
do
16+
SAVE_DIR="$RESULT_DIR/$PLY_TYPE/rp$RP"
17+
echo "Running evaluation and compression on $PLY_TYPE and RP:$RP"
18+
19+
CUDA_VISIBLE_DEVICES=0 python ply_loader_renderer.py \
20+
x265_compression_rp$RP \
21+
--disable_viewer --data_factor 1 \
22+
--scene_type GSC \
23+
--test_view_id 8 10 12 \
24+
--data_dir $BASE_DIR/bartender_colmap/colmap_50 \
25+
--result_dir $SAVE_DIR \
26+
--lpips_net vgg \
27+
--ply_path $BASE_DIR/$PLY_TYPE/splats.ply
28+
done
29+
30+
python benchmarks/mpeg/zip_and_summarize_stats.py --results_dir $RESULT_DIR/$PLY_TYPE --rps rp0 rp1 rp2 rp3
31+
32+
done
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
SCENE_LIST="Bartender"
2+
SCENE_PATH=data/GSC/Splats/Bartender_250303/frame000
3+
4+
RESULT_DIR=results/mpeg_splats
5+
6+
PLY_FILE=/work/Users/lisicheng/Dataset/GSC/Splats/Bartender_250303/0000.ply
7+
8+
for SCENE in $SCENE_LIST;
9+
do
10+
CUDA_VISIBLE_DEVICES=0 python ply_loader_renderer.py mcmc --disable_viewer --data_factor 1 \
11+
--data_dir $SCENE_PATH \
12+
--result_dir $RESULT_DIR/$SCENE/ \
13+
--lpips_net vgg \
14+
--compression png \
15+
--ply_path $PLY_FILE
16+
done
17+
18+
# Zip the compressed files and summarize the stats
19+
if command -v zip &> /dev/null
20+
then
21+
echo "Zipping results"
22+
python benchmarks/compression/summarize_stats.py --results_dir $RESULT_DIR --scenes $SCENE_LIST
23+
else
24+
echo "zip command not found, skipping zipping"
25+
fi
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
SCENE_DIR=data/tandt
2+
SCENE_LIST="truck"
3+
SCENE=truck
4+
5+
PLY_FILE=./results/Ours_TT/truck/ply/splats.ply
6+
7+
RESULT_DIR=results/ply_rendering
8+
9+
for SCENE in $SCENE_LIST;
10+
do
11+
CUDA_VISIBLE_DEVICES=0 python ply_loader_renderer.py mcmc --disable_viewer --data_factor 1 \
12+
--data_dir $SCENE_DIR/$SCENE/ \
13+
--result_dir $RESULT_DIR/$SCENE/ \
14+
--lpips_net vgg \
15+
--compression png \
16+
--ply_path $PLY_FILE
17+
done
18+
19+
# Zip the compressed files and summarize the stats
20+
if command -v zip &> /dev/null
21+
then
22+
echo "Zipping results"
23+
python benchmarks/compression/summarize_stats.py --results_dir $RESULT_DIR --scenes $SCENE_LIST
24+
else
25+
echo "zip command not found, skipping zipping"
26+
fi
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
import json
2+
import os
3+
import subprocess
4+
from collections import defaultdict
5+
from typing import List
6+
7+
import numpy as np
8+
import tyro
9+
10+
11+
def main(results_dir: str, rps: List[str]):
12+
print("rps:", rps)
13+
stage = "compress"
14+
15+
summary = defaultdict(dict)
16+
for rp in rps:
17+
rp_dir = os.path.join(results_dir, rp)
18+
19+
if stage == "compress":
20+
zip_path = f"{rp_dir}/compression.zip"
21+
if os.path.exists(zip_path):
22+
subprocess.run(f"rm {zip_path}", shell=True)
23+
subprocess.run(f"zip -r {zip_path} {rp_dir}/compression/", shell=True)
24+
out = subprocess.run(
25+
f"stat -c%s {zip_path}", shell=True, capture_output=True
26+
)
27+
size = int(out.stdout) # Byte
28+
# summary["size"].append(size)
29+
summary[rp]["size"] = size
30+
31+
try:
32+
with open(os.path.join(rp_dir, f"stats/{stage}_step29999.json"), "r") as f:
33+
stats = json.load(f)
34+
for k, v in stats.items():
35+
summary[rp][k] = v
36+
except:
37+
with open(os.path.join(rp_dir, f"stats/{stage}_step0000.json"), "r") as f:
38+
stats = json.load(f)
39+
for k, v in stats.items():
40+
summary[rp][k] = v
41+
try:
42+
stage = "val"
43+
for rp in rps:
44+
rp_dir = os.path.join(results_dir, rp)
45+
try:
46+
with open(os.path.join(rp_dir, f"stats/{stage}_step29999.json"), "r") as f:
47+
stats = json.load(f)
48+
for k, v in stats.items():
49+
if k in ['psnr', 'ssim', 'lpips']:
50+
summary[rp]['val_'+k] = v
51+
except:
52+
with open(os.path.join(rp_dir, f"stats/{stage}_step-001.json"), "r") as f:
53+
stats = json.load(f)
54+
for k, v in stats.items():
55+
if k in ['psnr', 'ssim', 'lpips']:
56+
summary[rp]['val_'+k] = v
57+
except:
58+
print("Could not find val stats, so do not include these val metrics in summary json.")
59+
60+
# for k, v in summary.items():
61+
# print(k, np.mean(v))
62+
63+
# mean_summary = {k: np.mean(v) for k, v in summary.items()}
64+
65+
with open(f"{results_dir}/rp_summary.json", "w") as fp:
66+
json.dump(summary, fp, indent=2)
67+
68+
print(f"Summary result is saved to: {results_dir}/rp_summary.json")
69+
70+
if __name__ == "__main__":
71+
tyro.cli(main)

0 commit comments

Comments
 (0)