Skip to content

Commit 613f3dd

Browse files
committed
Support reading attributes from BGEO files
1 parent 590c171 commit 613f3dd

File tree

3 files changed

+189
-64
lines changed

3 files changed

+189
-64
lines changed

splashsurf/src/io.rs

Lines changed: 97 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -82,63 +82,107 @@ pub fn read_particle_positions_with_attributes<R: Real, P: AsRef<Path>>(
8282

8383
profile!("loading particle positions and attributes");
8484

85-
// Check file extension: only VTK is supported for reading attributes at the moment
86-
{
87-
let extension = input_file.extension().ok_or(anyhow!(
88-
"Unable to detect file format of particle input file (file name has to end with supported extension)",
89-
))?.to_str().ok_or(anyhow!("Invalid extension of input file"))?.to_lowercase();
90-
91-
match extension.as_str() {
92-
"vtk" | "vtu" => {}
93-
_ => {
94-
return Err(anyhow!(
95-
"Unsupported file format extension \"{}\" for reading particles and attributes",
96-
extension
97-
));
85+
// Check file extension: only VTK and BGEO is supported for reading attributes at the moment
86+
let extension = input_file.extension().ok_or(anyhow!(
87+
"Unable to detect file format of particle input file (file name has to end with supported extension)",
88+
))?.to_str().ok_or(anyhow!("Invalid extension of input file"))?.to_lowercase();
89+
90+
let attributes_to_interpolate = attribute_names.iter().cloned().collect::<HashSet<_>>();
91+
92+
let (particle_positions, attributes) = match extension.as_str() {
93+
"vtk" | "vtu" => {
94+
let vtk_pieces = VtkFile::load_file(input_file)
95+
.map(|f| f.into_pieces())
96+
.with_context(|| "Error while loading VTK file".to_string())?;
97+
98+
if vtk_pieces.len() > 1 {
99+
warn!(
100+
"VTK file contains more than one \"piece\". Only the first one will be loaded."
101+
);
98102
}
99-
}
100-
}
101-
102-
let vtk_pieces = VtkFile::load_file(input_file)
103-
.map(|f| f.into_pieces())
104-
.with_context(|| "Failed to load particle positions from file".to_string())?;
105103

106-
if vtk_pieces.len() > 1 {
107-
warn!("VTK file contains more than one \"piece\". Only the first one will be loaded.");
108-
}
109-
110-
let first_piece = vtk_pieces
111-
.into_iter()
112-
.next()
113-
.ok_or(anyhow!("VTK file does not contain a supported \"piece\"."))?;
114-
115-
// Load particles
116-
let particle_positions = first_piece.load_as_particles()?;
117-
118-
// Load attributes that should be interpolated
119-
let attributes = {
120-
// Check if all attributes to interpolate are present in the input file
121-
{
122-
let attributes_to_interpolate = attribute_names.iter().cloned().collect::<HashSet<_>>();
123-
let attributes = first_piece
124-
.point_attribute_names()
104+
let first_piece = vtk_pieces
125105
.into_iter()
126-
.collect::<HashSet<_>>();
127-
128-
let missing_attributes = attributes_to_interpolate
129-
.difference(&attributes)
130-
.cloned()
131-
.collect::<Vec<_>>();
132-
if !missing_attributes.is_empty() {
133-
return Err(anyhow!(
134-
"Missing attribute(s) \"{}\" in input file",
135-
missing_attributes.join("\", \""),
136-
));
137-
}
106+
.next()
107+
.ok_or(anyhow!("VTK file does not contain a supported \"piece\"."))?;
108+
109+
// Load particles
110+
let particle_positions = first_piece.load_as_particles()?;
111+
112+
// Load attributes that should be interpolated
113+
let attributes = {
114+
// Check if all attributes to interpolate are present in the input file
115+
{
116+
let available_attributes = first_piece
117+
.point_attribute_names()
118+
.into_iter()
119+
.collect::<HashSet<_>>();
120+
121+
let missing_attributes = attributes_to_interpolate
122+
.difference(&available_attributes)
123+
.cloned()
124+
.collect::<Vec<_>>();
125+
if !missing_attributes.is_empty() {
126+
return Err(anyhow!(
127+
"Missing attribute(s) \"{}\" in input file",
128+
missing_attributes.join("\", \""),
129+
));
130+
}
131+
}
132+
133+
first_piece.load_point_attributes::<R>(attribute_names)
134+
}?;
135+
136+
(particle_positions, attributes)
138137
}
139-
140-
first_piece.load_point_attributes::<R>(attribute_names)
141-
}?;
138+
"bgeo" => {
139+
let bgeo_file = bgeo_format::load_bgeo_file(input_file)
140+
.with_context(|| "Error while loading BGEO file".to_string())?;
141+
142+
let particle_positions = bgeo_format::particles_from_bgeo_file::<R>(&bgeo_file)
143+
.with_context(|| {
144+
"Error while loading particle positions from BGEO file".to_string()
145+
})?;
146+
147+
// Load attributes that should be interpolated
148+
let attributes = {
149+
// Check if all attributes to interpolate are present in the input file
150+
{
151+
let available_attributes = bgeo_file
152+
.attribute_definitions
153+
.iter()
154+
.map(|a| a.name.clone())
155+
.collect::<HashSet<_>>();
156+
157+
let missing_attributes = attributes_to_interpolate
158+
.difference(&available_attributes)
159+
.cloned()
160+
.collect::<Vec<_>>();
161+
if !missing_attributes.is_empty() {
162+
return Err(anyhow!(
163+
"Missing attribute(s) \"{}\" in input file",
164+
missing_attributes.join("\", \""),
165+
));
166+
}
167+
}
168+
169+
let attributes = bgeo_format::attributes_from_bgeo_file(
170+
&bgeo_file,
171+
&Vec::from_iter(attributes_to_interpolate),
172+
)?;
173+
174+
attributes
175+
};
176+
177+
(particle_positions, attributes)
178+
}
179+
_ => {
180+
return Err(anyhow!(
181+
"Unsupported file format extension \"{}\" for reading particles and attributes",
182+
extension
183+
));
184+
}
185+
};
142186

143187
info!(
144188
"Successfully loaded point {} attribute(s): \"{}\"",

splashsurf/src/reconstruction.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -165,8 +165,8 @@ pub struct ReconstructSubcommandArgs {
165165
require_equals = true
166166
)]
167167
pub output_raw_normals: Switch,
168-
/// List of point attribute field names from the input file that should be interpolated to the reconstructed surface. Currently this is only supported for VTK and VTU input files.
169-
#[arg(help_heading = ARGS_INTERP, long)]
168+
/// Interpolate a point attribute field with the given name from the input file to the reconstructed surface. Currently, this is only supported for BGEO, VTK and VTU input files. Specify the argument multiple times for each attribute that should be interpolated.
169+
#[arg(help_heading = ARGS_INTERP, long = "interpolate_attribute", short = 'a', value_name = "ATTRIBUTE_NAME")]
170170
pub interpolate_attributes: Vec<String>,
171171

172172
/// Enable MC specific mesh decimation/simplification which removes bad quality triangles typically generated by MC

splashsurf_lib/src/io/bgeo_format.rs

Lines changed: 90 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,32 +14,33 @@ use std::io;
1414
use std::io::{BufWriter, Read};
1515
use std::path::Path;
1616

17+
use crate::mesh::{AttributeData, MeshAttribute};
1718
use parser::bgeo_parser;
18-
1919
// TODO: Find out why there is a 1.0 float value between position vector and id int in splishsplash output
2020
// TODO: Better error messages, skip nom errors
2121

22-
/// Convenience function for loading particles from a BGEO file
22+
/// Convenience function for loading particles from a BGEO file path
2323
pub fn particles_from_bgeo<R: Real, P: AsRef<Path>>(
2424
bgeo_file: P,
2525
) -> Result<Vec<Vector3<R>>, anyhow::Error> {
2626
// Load positions from BGEO file
2727
let bgeo_file = load_bgeo_file(bgeo_file).context("Error while loading BGEO file")?;
28-
particles_from_bgeo_impl(bgeo_file)
28+
particles_from_bgeo_file(&bgeo_file)
2929
}
3030

31-
fn particles_from_bgeo_impl<R: Real>(
32-
bgeo_file: BgeoFile,
31+
/// Returns particle positions from a loaded BGEO file
32+
pub fn particles_from_bgeo_file<R: Real>(
33+
bgeo_file: &BgeoFile,
3334
) -> Result<Vec<Vector3<R>>, anyhow::Error> {
3435
let position_storage = {
35-
let storage = bgeo_file.positions;
36+
let storage = &bgeo_file.positions;
3637

3738
if let AttributeStorage::Vector(dim, storage) = storage {
38-
assert_eq!(dim, 3);
39+
assert_eq!(*dim, 3);
3940
assert_eq!(storage.len() % dim, 0);
4041
storage
4142
} else {
42-
panic!("Positions are not stored as vectors");
43+
return Err(anyhow!("Positions are not stored as vectors in BGEO file"));
4344
}
4445
};
4546

@@ -58,6 +59,28 @@ fn particles_from_bgeo_impl<R: Real>(
5859
Ok(positions)
5960
}
6061

62+
/// Convenience function that converts the point attributes with the given names from the BGEO file into mesh attributes
63+
pub fn attributes_from_bgeo_file<R: Real>(
64+
bgeo_file: &BgeoFile,
65+
names: &[String],
66+
) -> Result<Vec<MeshAttribute<R>>, anyhow::Error> {
67+
let mut mesh_attributes = Vec::new();
68+
69+
'fields: for field_name in names {
70+
for (name, storage) in bgeo_file.attribute_data.iter() {
71+
if name == field_name {
72+
let attribute_data = storage
73+
.try_into_attribute_data::<R>()
74+
.context(anyhow!("Failed to convert attribute \"{name}\""))?;
75+
mesh_attributes.push(MeshAttribute::new(field_name.clone(), attribute_data));
76+
continue 'fields;
77+
}
78+
}
79+
}
80+
81+
Ok(mesh_attributes)
82+
}
83+
6184
/// Loads and parses a BGEO file to memory
6285
pub fn load_bgeo_file<P: AsRef<Path>>(bgeo_file: P) -> Result<BgeoFile, anyhow::Error> {
6386
let mut buf = Vec::new();
@@ -306,6 +329,64 @@ impl AttributeStorage {
306329
AttributeStorage::Vector(n, v) => v.len() / n,
307330
}
308331
}
332+
333+
/// Tries to convert this BGEO attribute storage into a mesh [`AttributeData`] storage
334+
fn try_into_attribute_data<R: Real>(&self) -> Result<AttributeData<R>, anyhow::Error> {
335+
// TODO: Simplify error handling and de-duplicate with e.g. VTK code
336+
match self {
337+
AttributeStorage::Int(data) => {
338+
let data = data
339+
.iter()
340+
.map(|v| u64::try_from(*v))
341+
.collect::<Result<Vec<_>, _>>()
342+
.context(anyhow!(
343+
"Failed to convert an attribute value from i32 to u64 type"
344+
))?;
345+
Ok(AttributeData::ScalarU64(data))
346+
}
347+
AttributeStorage::Float(data) => {
348+
let data = data
349+
.iter()
350+
.map(|v| {
351+
R::from_f32(*v).ok_or_else(|| {
352+
anyhow!("Cannot convert an attribute value from f32 to Real type")
353+
})
354+
})
355+
.collect::<Result<Vec<_>, _>>()
356+
.context(anyhow!(
357+
"Failed to convert an attribute value from f32 to Real type"
358+
))?;
359+
Ok(AttributeData::ScalarReal(data))
360+
}
361+
AttributeStorage::Vector(n, data) => {
362+
if *n == 3 {
363+
let data = data
364+
.chunks_exact(3)
365+
.map(|v| {
366+
Some(Vector3::new(
367+
R::from_f32(v[0])?,
368+
R::from_f32(v[1])?,
369+
R::from_f32(v[2])?,
370+
))
371+
})
372+
.map(|vec| {
373+
vec.ok_or_else(|| {
374+
anyhow!(
375+
"Failed to convert an attribute vector from f32 to Real type"
376+
)
377+
})
378+
})
379+
.collect::<Result<Vec<_>, _>>()
380+
.context(anyhow!(
381+
"Failed to convert an attribute vector from f32 to Real type"
382+
))?;
383+
Ok(AttributeData::Vector3Real(data))
384+
} else {
385+
Err(anyhow!("Unsupported vector attribute dimension: {}", n))
386+
}
387+
}
388+
}
389+
}
309390
}
310391

311392
/// Parsers used to parse the BGEO format
@@ -863,7 +944,7 @@ fn test_bgeo_write_dam_break() {
863944
.context("Error while parsing the BGEO file contents")
864945
.unwrap();
865946

866-
let particles_read = particles_from_bgeo_impl(bgeo_read).unwrap();
947+
let particles_read = particles_from_bgeo_file(&bgeo_read).unwrap();
867948

868949
assert_eq!(particles_read.len(), 6859);
869950
assert_eq!(particles, particles_read);

0 commit comments

Comments
 (0)