Skip to content

Commit 47d4556

Browse files
committed
Apply clippy --fix
1 parent b7b2001 commit 47d4556

34 files changed

+273
-321
lines changed

splashsurf/src/allocator.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ unsafe impl<A: GlobalAlloc> GlobalAlloc for CountingAllocator<A> {
2121
self.peak_allocation
2222
.fetch_max(current_allocation, Ordering::AcqRel);
2323
}
24-
return ret;
24+
ret
2525
}
2626

2727
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {

splashsurf/src/convert.rs

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -136,13 +136,11 @@ fn convert_mesh(cmd_args: &ConvertSubcommandArgs) -> Result<(), anyhow::Error> {
136136

137137
/// Returns an error if the file already exists but overwrite is disabled
138138
fn overwrite_check(cmd_args: &ConvertSubcommandArgs) -> Result<(), anyhow::Error> {
139-
if !cmd_args.overwrite {
140-
if cmd_args.output_file.exists() {
141-
return Err(anyhow!(
142-
"Aborting: Output file \"{}\" already exists. Use overwrite flag to ignore this.",
143-
cmd_args.output_file.display()
144-
));
145-
}
139+
if !cmd_args.overwrite && cmd_args.output_file.exists() {
140+
return Err(anyhow!(
141+
"Aborting: Output file \"{}\" already exists. Use overwrite flag to ignore this.",
142+
cmd_args.output_file.display()
143+
));
146144
}
147145

148146
Ok(())

splashsurf/src/io.rs

Lines changed: 10 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,9 @@ pub struct FormatParameters {
2222
}
2323

2424
/// File format parameters for input files
25-
#[derive(Clone, Debug)]
25+
#[derive(Clone, Debug, Default)]
2626
pub struct InputFormatParameters {}
2727

28-
impl Default for InputFormatParameters {
29-
fn default() -> Self {
30-
Self {}
31-
}
32-
}
33-
3428
/// File format parameters for output files
3529
#[derive(Clone, Debug)]
3630
pub struct OutputFormatParameters {
@@ -107,7 +101,7 @@ pub fn read_particle_positions_with_attributes<R: Real, P: AsRef<Path>>(
107101

108102
let vtk_pieces = VtkFile::load_file(input_file)
109103
.map(|f| f.into_pieces())
110-
.with_context(|| format!("Failed to load particle positions from file"))?;
104+
.with_context(|| "Failed to load particle positions from file".to_string())?;
111105

112106
if vtk_pieces.len() > 1 {
113107
warn!("VTK file contains more than one \"piece\". Only the first one will be loaded.");
@@ -176,13 +170,13 @@ pub fn write_particle_positions<R: Real, P: AsRef<Path>>(
176170
.ok_or(anyhow!("Invalid extension of output file"))?;
177171

178172
match extension.to_lowercase().as_str() {
179-
"vtk" => vtk_format::particles_to_vtk(particles, &output_file),
173+
"vtk" => vtk_format::particles_to_vtk(particles, output_file),
180174
"bgeo" => bgeo_format::particles_to_bgeo(
181175
particles,
182-
&output_file,
176+
output_file,
183177
format_params.enable_compression,
184178
),
185-
"json" => json_format::particles_to_json(particles, &output_file),
179+
"json" => json_format::particles_to_json(particles, output_file),
186180
_ => Err(anyhow!(
187181
"Unsupported file format extension \"{}\" for writing particles",
188182
extension
@@ -214,8 +208,8 @@ pub fn read_surface_mesh<R: Real, P: AsRef<Path>>(
214208
.ok_or(anyhow!("Invalid extension of input file"))?;
215209

216210
match extension.to_lowercase().as_str() {
217-
"vtk" => vtk_format::surface_mesh_from_vtk(&input_file),
218-
"ply" => ply_format::surface_mesh_from_ply(&input_file),
211+
"vtk" => vtk_format::surface_mesh_from_vtk(input_file),
212+
"ply" => ply_format::surface_mesh_from_ply(input_file),
219213
_ => Err(anyhow!(
220214
"Unsupported file format extension \"{}\" for reading surface meshes",
221215
extension
@@ -261,9 +255,9 @@ where
261255
.ok_or(anyhow!("Invalid extension of output file"))?;
262256

263257
match extension.to_lowercase().as_str() {
264-
"vtk" => vtk_format::write_vtk(mesh, &output_file, "mesh"),
265-
"ply" => ply_format::mesh_to_ply(mesh, &output_file),
266-
"obj" => obj_format::mesh_to_obj(mesh, &output_file),
258+
"vtk" => vtk_format::write_vtk(mesh, output_file, "mesh"),
259+
"ply" => ply_format::mesh_to_ply(mesh, output_file),
260+
"obj" => obj_format::mesh_to_obj(mesh, output_file),
267261
_ => Err(anyhow!(
268262
"Unsupported file format extension \"{}\"",
269263
extension,

splashsurf/src/logging.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,15 @@ impl<T: Write + Send> ProgressHandler<T> {
2222
fn handle<F: FnOnce(&mut Self) -> R, R>(&mut self, inner_function: F) -> R {
2323
let handle = get_progress_bar();
2424

25-
return match handle {
26-
Some(pb) => pb.suspend(|| return inner_function(self)),
25+
match handle {
26+
Some(pb) => pb.suspend(|| inner_function(self)),
2727
None => inner_function(self),
28-
};
28+
}
2929
}
3030

3131
/// Create a new instance of "Self"
3232
pub fn new(pipe: T) -> Self {
33-
return Self(pipe);
33+
Self(pipe)
3434
}
3535
}
3636

@@ -44,12 +44,12 @@ impl<T: Write + Send + 'static> ProgressHandler<T> {
4444
impl<T: Write + Send> Write for ProgressHandler<T> {
4545
#[inline]
4646
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
47-
return self.handle(|this| this.0.write(buf));
47+
self.handle(|this| this.0.write(buf))
4848
}
4949

5050
#[inline]
5151
fn flush(&mut self) -> std::io::Result<()> {
52-
return self.handle(|this| this.0.flush());
52+
self.handle(|this| this.0.flush())
5353
}
5454
}
5555

splashsurf/src/main.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ fn run_splashsurf() -> Result<(), anyhow::Error> {
9797
splashsurf_lib::profiling::write_to_string()
9898
.unwrap()
9999
.split("\n")
100-
.filter(|l| l.len() > 0)
100+
.filter(|l| !l.is_empty())
101101
.for_each(|l| info!("{}", l));
102102

103103
// Print memory stats if available

splashsurf/src/reconstruction.rs

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -375,27 +375,30 @@ pub fn reconstruct_subcommand(cmd_args: &ReconstructSubcommandArgs) -> Result<()
375375
path.input_file.display()
376376
)
377377
})
378-
.map_err(|err| {
378+
.inspect_err(|err| {
379379
// Already log the error in case there are multiple errors
380-
logging::log_error(&err);
381-
err
380+
logging::log_error(err);
382381
})
383-
.and_then(|_| {
384-
logging::get_progress_bar().map(|pb| pb.inc(1));
385-
Ok(())
382+
.map(|_| {
383+
if let Some(pb) = logging::get_progress_bar() {
384+
pb.inc(1)
385+
}
386386
})
387387
})
388388
} else {
389389
paths.iter().try_for_each(|path| {
390-
reconstruction_pipeline(path, &args).and_then(|_| {
391-
logging::get_progress_bar().map(|pb| pb.inc(1));
392-
Ok(())
390+
reconstruction_pipeline(path, &args).map(|_| {
391+
if let Some(pb) = logging::get_progress_bar() {
392+
pb.inc(1)
393+
}
393394
})
394395
})
395396
};
396397

397398
if paths.len() > 1 {
398-
logging::get_progress_bar().map(|pb| pb.finish());
399+
if let Some(pb) = logging::get_progress_bar() {
400+
pb.finish()
401+
}
399402
logging::set_progress_bar(None);
400403
}
401404

@@ -922,7 +925,7 @@ pub(crate) fn reconstruction_pipeline_generic<I: Index, R: Real>(
922925
.parent()
923926
// Add a trailing separator if the parent is non-empty
924927
.map(|p| p.join(""))
925-
.unwrap_or_else(PathBuf::new);
928+
.unwrap_or_default();
926929
let output_filename = format!(
927930
"raw_{}",
928931
paths.output_file.file_name().unwrap().to_string_lossy()
@@ -1028,7 +1031,7 @@ pub(crate) fn reconstruction_pipeline_generic<I: Index, R: Real>(
10281031
// Global neighborhood search
10291032
let nl = reconstruction
10301033
.particle_neighbors()
1031-
.map(|nl| Cow::Borrowed(nl))
1034+
.map(Cow::Borrowed)
10321035
.unwrap_or_else(||
10331036
{
10341037
let search_radius = params.compact_support_radius;
@@ -1059,8 +1062,8 @@ pub(crate) fn reconstruction_pipeline_generic<I: Index, R: Real>(
10591062
.map(|j| {
10601063
let dist =
10611064
(particle_positions[i] - particle_positions[j]).norm_squared();
1062-
let weight = R::one() - (dist / squared_r).clamp(R::zero(), R::one());
1063-
return weight;
1065+
1066+
R::one() - (dist / squared_r).clamp(R::zero(), R::one())
10641067
})
10651068
.fold(R::zero(), R::add)
10661069
})
@@ -1073,7 +1076,7 @@ pub(crate) fn reconstruction_pipeline_generic<I: Index, R: Real>(
10731076
.expect("interpolator is required")
10741077
.interpolate_scalar_quantity(
10751078
weighted_ncounts.as_slice(),
1076-
&mesh_with_data.vertices(),
1079+
mesh_with_data.vertices(),
10771080
true,
10781081
)
10791082
};

splashsurf_lib/benches/benches/bench_neighborhood.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@ static COMPACT_SUPPORT_RADIUS: f64 = 4.0 * PARTICLE_RADIUS;
1010
//static NUM_PARTICLES: Option<usize> = Some(800);
1111
static NUM_PARTICLES: Option<usize> = None;
1212

13-
static PARTICLE_FILE: &'static str = "../data/bunny_frame_14_7705_particles.vtk";
13+
static PARTICLE_FILE: &str = "../data/bunny_frame_14_7705_particles.vtk";
1414

1515
fn particle_subset(particle_positions: &[Vector3<f32>]) -> &[Vector3<f32>] {
16-
if let Some(n_particles) = NUM_PARTICLES.clone() {
16+
if let Some(n_particles) = NUM_PARTICLES {
1717
&particle_positions[0..n_particles]
1818
} else {
19-
&particle_positions[..]
19+
particle_positions
2020
}
2121
}
2222

@@ -35,7 +35,7 @@ pub fn neighborhood_search_naive(c: &mut Criterion) {
3535
b.iter(|| {
3636
neighborhood_lists.clear();
3737
neighborhood_search::neighborhood_search_naive(
38-
&particle_positions,
38+
particle_positions,
3939
COMPACT_SUPPORT_RADIUS as f32,
4040
&mut neighborhood_lists,
4141
);
@@ -64,7 +64,7 @@ pub fn neighborhood_search_spatial_hashing(c: &mut Criterion) {
6464
neighborhood_lists.clear();
6565
neighborhood_search::neighborhood_search_spatial_hashing::<i32, f32>(
6666
&domain,
67-
&particle_positions,
67+
particle_positions,
6868
COMPACT_SUPPORT_RADIUS as f32,
6969
&mut neighborhood_lists,
7070
);
@@ -93,7 +93,7 @@ pub fn neighborhood_search_spatial_hashing_parallel(c: &mut Criterion) {
9393
neighborhood_lists.clear();
9494
neighborhood_search::neighborhood_search_spatial_hashing_parallel::<i32, f32>(
9595
&domain,
96-
&particle_positions,
96+
particle_positions,
9797
COMPACT_SUPPORT_RADIUS as f32,
9898
&mut neighborhood_lists,
9999
);

splashsurf_lib/benches/benches/bench_subdomain_grid.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ fn parameters_canyon() -> Parameters<f32> {
1212
let compact_support_radius = 4.0 * particle_radius;
1313
let cube_size = 1.5 * particle_radius;
1414

15-
let parameters = Parameters {
15+
Parameters {
1616
particle_radius,
1717
rest_density: 1000.0,
1818
compact_support_radius,
@@ -27,9 +27,7 @@ fn parameters_canyon() -> Parameters<f32> {
2727
},
2828
)),
2929
global_neighborhood_list: false,
30-
};
31-
32-
parameters
30+
}
3331
}
3432

3533
pub fn grid_canyon(c: &mut Criterion) {

splashsurf_lib/examples/minimal_levelset.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ pub fn marching_cubes<R: Real, L: MarchingCubesLevelSet<R>>(
8888
let vertex_index = *edge_to_vertex.entry(edge).or_insert_with(|| {
8989
let vertex_index = vertices.len();
9090

91-
let origin_coords = grid.point_coordinates(&edge.origin());
91+
let origin_coords = grid.point_coordinates(edge.origin());
9292
let target_coords = grid.point_coordinates(&edge.target());
9393

9494
let origin_value = level_set.evaluate(&origin_coords);

splashsurf_lib/src/aabb.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@ where
3030
if points.is_empty() {
3131
Self::zeros()
3232
} else if points.len() == 1 {
33-
Self::from_point(points[0].clone())
33+
Self::from_point(points[0])
3434
} else {
35-
let initial_aabb = Self::from_point(points[0].clone());
35+
let initial_aabb = Self::from_point(points[0]);
3636
points[1..]
3737
.par_iter()
3838
.fold(
@@ -73,7 +73,7 @@ where
7373
#[inline(always)]
7474
pub fn from_point(point: SVector<R, D>) -> Self {
7575
Self {
76-
min: point.clone(),
76+
min: point,
7777
max: point,
7878
}
7979
}

0 commit comments

Comments
 (0)