Skip to content

Commit a33e5f4

Browse files
authored
Shake and bake sampling (GeomScale#361)
* Final reorganization Shake and Bake Signed-off-by: Iva Janković <ivaxjankovic@gmail.com> * Update shakeandbake.cpp with improved diagnostics * Updates for high dimensional polytopes * Uniformity tests updates * For debugging: Sampling testing * Corrected testing * Updated shake and bake walk with some corrections * Apply func fixes * Apply func fixes in hpp * Compute boundary point function * Revised shake and bake * Just running SB * Everything for revision * Quick fix of SB * 2 helper functions * Fixed SB * Line_positive_intersect_skip func * For revision * Latest version SB * SB without v*=1 * Latest hpolytope.h * Get_direction romeijn1991 * SB fixed + scaling_ratio.hpp * SB + scaling test fixed * Fixed scaling_ratio.hpp * Latest SB (hpp and cpp) * Ready to merge sb.hpp? * For revision 2 * For Revision 2.1 * For revision 3 * Scaling ratio test fixed * Cleaning 1 * Cleaned 2 * Ready to go! * Shake and bake: unit tests * Unit test: polished! * Update README.md --------- Signed-off-by: Iva Janković <ivaxjankovic@gmail.com>
1 parent 32ef561 commit a33e5f4

File tree

12 files changed

+899
-26
lines changed

12 files changed

+899
-26
lines changed
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
# VolEsti (volume computation and sampling library)
2+
3+
# Copyright (c) 2012-2025 Vissarion Fisikopoulos
4+
# Copyright (c) 2018-2025 Apostolos Chalkis
5+
# Copyright (c) 2025-2025 Iva Janković
6+
7+
# Contributed and/or modified by Iva Janković, as part of Google Summer of Code 2025 program.
8+
9+
# Licensed under GNU LGPL.3, see LICENCE file
10+
11+
project( VolEsti )
12+
13+
14+
CMAKE_MINIMUM_REQUIRED(VERSION 3.11)
15+
16+
set(CMAKE_ALLOW_LOOSE_LOOP_CONSTRUCTS true)
17+
18+
if(COMMAND cmake_policy)
19+
cmake_policy(SET CMP0003 NEW)
20+
endif(COMMAND cmake_policy)
21+
22+
23+
option(DISABLE_NLP_ORACLES "Disable non-linear oracles (used in collocation)" ON)
24+
option(BUILTIN_EIGEN "Use eigen from ../../external" OFF)
25+
26+
27+
if(DISABLE_NLP_ORACLES)
28+
add_definitions(-DDISABLE_NLP_ORACLES)
29+
else()
30+
find_library(IFOPT NAMES libifopt_core.so PATHS /usr/local/lib)
31+
find_library(IFOPT_IPOPT NAMES libifopt_ipopt.so PATHS /usr/local/lib)
32+
find_library(GMP NAMES libgmp.so PATHS /usr/lib/x86_64-linux-gnu /usr/lib/i386-linux-gnu)
33+
find_library(MPSOLVE NAMES libmps.so PATHS /usr/local/lib)
34+
find_library(PTHREAD NAMES libpthread.so PATHS /usr/lib/x86_64-linux-gnu /usr/lib/i386-linux-gnu)
35+
find_library(FFTW3 NAMES libfftw3.so.3 PATHS /usr/lib/x86_64-linux-gnu /usr/lib/i386-linux-gnu)
36+
37+
if (NOT IFOPT)
38+
39+
message(FATAL_ERROR "This program requires the ifopt library, and will not be compiled.")
40+
41+
elseif (NOT GMP)
42+
43+
message(FATAL_ERROR "This program requires the gmp library, and will not be compiled.")
44+
45+
elseif (NOT MPSOLVE)
46+
47+
message(FATAL_ERROR "This program requires the mpsolve library, and will not be compiled.")
48+
49+
elseif (NOT FFTW3)
50+
51+
message(FATAL_ERROR "This program requires the fftw3 library, and will not be compiled.")
52+
53+
else()
54+
message(STATUS "Library ifopt found: ${IFOPT}")
55+
message(STATUS "Library gmp found: ${GMP}")
56+
message(STATUS "Library mpsolve found: ${MPSOLVE}")
57+
message(STATUS "Library fftw3 found:" ${FFTW3})
58+
59+
endif(NOT IFOPT)
60+
61+
endif(DISABLE_NLP_ORACLES)
62+
63+
include("../../external/cmake-files/Eigen.cmake")
64+
GetEigen()
65+
66+
include("../../external/cmake-files/Boost.cmake")
67+
GetBoost()
68+
69+
include("../../external/cmake-files/LPSolve.cmake")
70+
GetLPSolve()
71+
72+
73+
# Find lpsolve library
74+
find_library(LP_SOLVE NAMES liblpsolve55.so PATHS /usr/lib/lp_solve)
75+
76+
if (NOT LP_SOLVE)
77+
message(FATAL_ERROR "This program requires the lp_solve library, and will not be compiled.")
78+
else ()
79+
message(STATUS "Library lp_solve found: ${LP_SOLVE}")
80+
81+
set(CMAKE_EXPORT_COMPILE_COMMANDS "ON")
82+
83+
include_directories (BEFORE ../../external)
84+
include_directories (BEFORE ../../include)
85+
86+
# for Eigen
87+
if (${CMAKE_VERSION} VERSION_LESS "3.12.0")
88+
add_compile_options(-D "EIGEN_NO_DEBUG")
89+
else ()
90+
add_compile_definitions("EIGEN_NO_DEBUG")
91+
endif ()
92+
93+
add_definitions(${CMAKE_CXX_FLAGS} "-std=c++17") # enable C++17 standard
94+
add_definitions(${CMAKE_CXX_FLAGS} "-O3") # optimization of the compiler
95+
add_definitions(${CXX_COVERAGE_COMPILE_FLAGS} "-lm")
96+
add_definitions(${CXX_COVERAGE_COMPILE_FLAGS} "-ldl")
97+
add_definitions(${CXX_COVERAGE_COMPILE_FLAGS} "-DBOOST_NO_AUTO_PTR")
98+
99+
100+
add_executable (shakeandbake shakeandbake.cpp)
101+
TARGET_LINK_LIBRARIES(shakeandbake ${LP_SOLVE})
102+
103+
endif()

examples/sb_sampling/README.md

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
## Explanation
2+
In 'shake_and_bake_walk.hpp' the Running variant of Shake And Bake class of boundary sampling algorithms. It follows the steps as described in [1]. The walk can be run by using 'shakeandbake.cpp' Additionaly, to test the uniformity, the scaling ratio test ('scaling_ratio.hpp') along with random facet 2D projection for generic polytopes (cube, simplex, birkhoff) with the plots are added in 'sbtest.py' (with generated txt files).
3+
4+
[1] C. G. E. Boender, R. J. Caron, J. F. McDonald, A. H. G. Rinnooy Kan,
5+
H. E. Romeijn, R. L. Smith, J. Telgen i A. C. F. Vorst,
6+
*Shake-And-Bake Algorithms for Generating Uniform Points on the Boundary of Bounded Polyhedra*, 1991.
7+
Available at: https://doi.org/10.1016/0166-218X(91)90006-7
8+
9+
## Original and Limping Variant
10+
The implemented Shake And Bake variant is Running SB because it performs significantly better than its counterparts Original and Limping. But, if one wants to test that out, here is the additional piece of code to be added in `apply` function alongside some simple enum switch / branch logic. Also, it is very important to use `_v = GetDirection<Point>::apply(n, rng); ` (coressponds to Step 1. of Original and Limping SB from the paper ) instead of `Point v = get_direction(P,rng);` (corresponds to Step 1. for Running SB).
11+
12+
```bash
13+
NT beta;
14+
if (mode_ == Mode::Original) {
15+
NT den = dot_r - dot_k;
16+
if (std::abs(den) < eps) continue;
17+
beta = std::clamp(dot_r / den, NT(0), NT(1));
18+
}
19+
else {
20+
beta = -dot_k;
21+
}
22+
23+
if (beta > NT(0) && beta <= NT(1) &&
24+
rng.sample_urdist() < beta)
25+
{
26+
p_ = y;
27+
facet_idx_ = facet_new;
28+
A_row_k_ = A_row_r;
29+
Ar_.noalias() -= lambda_hit * Av_;
30+
}
31+
```

examples/sb_sampling/sbtest.py

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
# VolEsti (volume computation and sampling library)
2+
3+
# Copyright (c) 2012-2025 Vissarion Fisikopoulos
4+
# Copyright (c) 2018-2025 Apostolos Chalkis
5+
# Copyright (c) 2025-2025 Iva Janković
6+
7+
# Contributed and/or modified by Iva Janković, as part of Google Summer of Code 2025 program.
8+
9+
# Licensed under GNU LGPL.3, see LICENCE file
10+
11+
import numpy as np
12+
import matplotlib.pyplot as plt
13+
14+
shape = input("Name of the polytope ('cube', 'simplex', 'birkhoff', or anything else): ").strip()
15+
dim = int(input("Number of dimensions: ").strip())
16+
17+
# Loading the data
18+
filename = f"build/sb_{shape}_{dim}_run.txt"
19+
data = np.loadtxt(filename)
20+
n, d = data.shape
21+
eps = 1e-7
22+
23+
# Finding the facet
24+
if shape == "cube":
25+
facet_col = np.random.randint(0, d)
26+
facet_val = np.random.choice([+1.0, -1.0])
27+
desc = f"x_{facet_col}{facet_val}"
28+
mask = np.abs(data[:, facet_col] - facet_val) < eps
29+
30+
elif shape in ("birkhoff", "simplex"):
31+
facet_col = np.random.randint(0, d)
32+
facet_val = 0.0
33+
desc = f"x_{facet_col}≈0"
34+
mask = np.abs(data[:, facet_col] - facet_val) < eps
35+
36+
else:
37+
facet_col = None
38+
desc = "full cloud"
39+
mask = np.ones(n, dtype=bool)
40+
41+
facet_pts = data[mask]
42+
43+
# "Center" of the facet
44+
centroid = facet_pts.mean(axis=0)
45+
46+
# Projection on two random axes
47+
axes = [i for i in range(d) if i != facet_col] or list(range(d))
48+
i, j = np.random.choice(axes, size=2, replace=False)
49+
50+
# Scatter of points in the facet
51+
52+
plt.figure(figsize=(6,6))
53+
plt.scatter(facet_pts[:, i], facet_pts[:, j], s=12, alpha=0.7, label="Samples")
54+
plt.scatter(centroid[i], centroid[j], c='red', marker='*', s=150, label='Center')
55+
plt.xlabel(f"x_{i}")
56+
plt.ylabel(f"x_{j}")
57+
plt.title(f"{shape.capitalize()} {dim}D — facet {desc} with {len(facet_pts)} points ")
58+
plt.legend()
59+
plt.grid(True)
60+
plt.tight_layout()
61+
plt.show()
62+
63+
# Scaling ratio testing
64+
coverage = {}
65+
filename = f"build/sb_{shape}_{dim}_coverage.txt"
66+
with open(filename) as f:
67+
for line in f:
68+
line = line.strip()
69+
if not line:
70+
continue
71+
parts = line.split()
72+
if parts[0] != 'Facet':
73+
continue
74+
facet = int(parts[1])
75+
tail = ' '.join(parts[4:])
76+
pairs = [p.strip().rstrip(',') for p in tail.split(',') if p.strip()]
77+
xs, covs = [], []
78+
for p in pairs:
79+
x_str, cov_str = p.split(':', 1)
80+
x = float(x_str)
81+
cov = float(cov_str)
82+
xs.append(x)
83+
covs.append(cov)
84+
coverage[facet] = (np.array(xs), np.array(covs))
85+
86+
plt.figure(figsize=(8,6))
87+
plt.grid(True)
88+
for facet, (xs, covs) in coverage.items():
89+
plt.plot(xs, covs, label=f'Facet {facet}')
90+
91+
plt.xlabel(r'$x^{\,d}$')
92+
plt.ylabel('Coverage ratio')
93+
plt.grid(True)
94+
plt.title('Scaling coverage per facet (vs $x^{d}$)')
95+
plt.legend(bbox_to_anchor=(1.05, 1), loc='upper left')
96+
plt.tight_layout()
97+
plt.show()
Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
// VolEsti (volume computation and sampling library)
2+
3+
// Copyright (c) 2012-2025 Vissarion Fisikopoulos
4+
// Copyright (c) 2018-2025 Apostolos Chalkis
5+
// Copyright (c) 2025-2025 Iva Janković
6+
7+
// Contributed and/or modified by Iva Janković, as part of Google Summer of Code 2025 program.
8+
9+
// Licensed under GNU LGPL.3, see LICENCE file
10+
11+
#include <iostream>
12+
#include <fstream>
13+
#include <string>
14+
#include <vector>
15+
#include <Eigen/Eigen>
16+
#include <boost/random.hpp>
17+
18+
#include "cartesian_geom/cartesian_kernel.h"
19+
#include "preprocess/feasible_point.hpp"
20+
#include "convex_bodies/hpolytope.h"
21+
#include "random_walks/random_walks.hpp"
22+
#include "random_walks/shake_and_bake_walk.hpp"
23+
#include "generators/known_polytope_generators.h"
24+
#include "diagnostics/scaling_ratio.hpp"
25+
26+
27+
using NT = double;
28+
using Kernel = Cartesian<NT>;
29+
using Point = Kernel::Point;
30+
using RNG = BoostRandomNumberGenerator<boost::random::mt19937, NT>;
31+
using HPoly = HPolytope<Point>;
32+
using Walker1 = ShakeAndBakeWalk::Walk<HPoly, RNG>;
33+
34+
35+
int main(int argc, char* argv[])
36+
{
37+
if (argc < 3) {
38+
std::cerr << "Usage: " << argv[0]
39+
<< " <cube|simplex|birkhoff> <dimension> [epsilon]\n";
40+
return 1;
41+
}
42+
43+
std::string shape = argv[1];
44+
unsigned cli_n = std::stoi(argv[2]);
45+
NT eps_cli = (argc > 3)
46+
? static_cast<NT>(std::stod(argv[3]))
47+
: Walker1::kDefaultEpsilon; // default value if not manually
48+
49+
//Generating polytope
50+
HPoly P;
51+
if (shape == "cube") P = generate_cube<HPoly>(cli_n, false);
52+
else if (shape == "simplex") P = generate_simplex<HPoly>(cli_n, false);
53+
else if (shape == "birkhoff") P = generate_birkhoff<HPoly>(cli_n);
54+
else {
55+
std::cerr << "Unknown polytope type: " << shape << '\n';
56+
return 1;
57+
}
58+
59+
// Walk parameters adjustments
60+
const unsigned true_dim = P.dimension();
61+
unsigned walk_len, n_samples, burn_in_iters;
62+
63+
int mode = (shape == "cube" || shape == "simplex") ? 0
64+
: (shape == "birkhoff") ? 1
65+
: 2;
66+
67+
switch (mode) {
68+
case 0: // cube or simplex
69+
walk_len = 20 * true_dim;
70+
n_samples = 500 * true_dim;
71+
burn_in_iters = 5 * true_dim;
72+
break;
73+
74+
case 1: // birkhoff
75+
walk_len = 100 * true_dim;
76+
n_samples = 2000 * true_dim;
77+
burn_in_iters = 10 * true_dim;
78+
break;
79+
80+
default:
81+
walk_len = 20 * true_dim;
82+
n_samples = 100 * true_dim;
83+
burn_in_iters = 20 * true_dim;
84+
break;
85+
}
86+
87+
std::cout << "Parameters: walk_len=" << walk_len
88+
<< ", n_samples=" << n_samples
89+
<< ", burn_in_iters=" << burn_in_iters
90+
<< " (dim=" << true_dim
91+
<< ") eps=" << eps_cli << '\n';
92+
93+
// Initializing the walk
94+
RNG rng(true_dim);
95+
96+
auto [boundary_pt, facet_idx] = compute_boundary_point<Point>(P, rng, eps_cli);
97+
Walker1 walk1(P, boundary_pt, facet_idx, rng,eps_cli);
98+
const NT tol = walk1.get_epsilon();
99+
100+
const std::string base = "sb_" + shape + "_" + std::to_string(cli_n);
101+
std::ofstream out(base + "_run.txt");
102+
103+
104+
Eigen::Matrix<NT, Eigen::Dynamic, Eigen::Dynamic> samples1(true_dim, n_samples);
105+
std::vector<int> facet_id1(n_samples, -1);
106+
107+
//Burn in
108+
for (int i = 0; i < burn_in_iters; ++i)
109+
walk1.apply(P, walk_len, rng);
110+
111+
// Sampling
112+
for (int i = 0; i < n_samples; ++i) {
113+
walk1.apply(P, walk_len, rng);
114+
const Point& q = walk1.getCurrentPoint();
115+
samples1.col(i) = q.getCoefficients();
116+
117+
// File inscription
118+
for (unsigned d = 0; d < true_dim; ++d)
119+
out << q[d] << (d + 1 < true_dim ? ' ' : '\n');
120+
}
121+
out.close();
122+
123+
std::cout << "Generated " << n_samples << " samples in "
124+
<< walk_len << " steps each.\n";
125+
126+
127+
//Scaling ratio test
128+
auto [scales, coverage, max_dev, avg_dev] = scaling_ratio_boundary_test(P, samples1,tol);
129+
130+
std::cout << "Scaling factors:\n";
131+
for (double s : scales) {
132+
std::cout << s << " ";
133+
}
134+
std::cout << "\n\nCoverage matrix (each row = one facet):\n";
135+
for (int f = 0; f < coverage.rows(); ++f) {
136+
std::cout << "Facet " << f << ": ";
137+
for (int k = 0; k < coverage.cols(); ++k) {
138+
double cov = coverage(f, k);
139+
if (std::isnan(cov))
140+
std::cout << "NaN ";
141+
else
142+
std::cout << cov << " ";
143+
}
144+
std::cout << "\n";
145+
}
146+
147+
//Uniformity deviation analysis
148+
std::cout << "\n";
149+
std::cout << "Facet Max deviation (%) Avg deviation (%)\n";
150+
for (int f = 0; f < max_dev.size(); ++f) {
151+
std::cout << std::setw(6) << f << " "
152+
<< std::fixed << std::setprecision(2)
153+
<< std::setw(18) << max_dev[f] << " "
154+
<< std::setw(22) << avg_dev[f]
155+
<< "\n";
156+
}
157+
158+
return 0;
159+
}

0 commit comments

Comments
 (0)