diff --git a/python/src/tiff.rs b/python/src/tiff.rs index 861c519..e334778 100644 --- a/python/src/tiff.rs +++ b/python/src/tiff.rs @@ -37,7 +37,7 @@ impl PyTIFF { .map_err(|err| PyFileNotFoundError::new_err(err.to_string()))?; let mut metadata_reader = TiffMetadataReader::try_open(&metadata_fetch).await.unwrap(); let ifds = metadata_reader - .read_all_ifds(&metadata_fetch) + .read_all_ifds(&metadata_fetch, Default::default()) .await .unwrap(); let tiff = TIFF::new(ifds); diff --git a/src/cog.rs b/src/cog.rs index 99a0f14..545e263 100644 --- a/src/cog.rs +++ b/src/cog.rs @@ -44,7 +44,7 @@ mod test { .await .unwrap(); let ifds = metadata_reader - .read_all_ifds(&prefetch_reader) + .read_all_ifds(&prefetch_reader, Default::default()) .await .unwrap(); let tiff = TIFF::new(ifds); diff --git a/src/geo/affine.rs b/src/geo/affine.rs index e84989b..9fa1fbb 100644 --- a/src/geo/affine.rs +++ b/src/geo/affine.rs @@ -40,21 +40,21 @@ impl AffineTransform { self.5 } - /// Construct a new Affine Transform from the IFD - pub fn from_ifd(ifd: &ImageFileDirectory) -> Option { - if let (Some(model_pixel_scale), Some(model_tiepoint)) = - (&ifd.model_pixel_scale, &ifd.model_tiepoint) - { - Some(Self::new( - model_pixel_scale[0], - 0.0, - model_tiepoint[3], - 0.0, - -model_pixel_scale[1], - model_tiepoint[4], - )) - } else { - None - } - } + // /// Construct a new Affine Transform from the IFD + // pub fn from_ifd(ifd: &ImageFileDirectory) -> Option { + // if let (Some(model_pixel_scale), Some(model_tiepoint)) = + // (&ifd.model_pixel_scale, &ifd.model_tiepoint) + // { + // Some(Self::new( + // model_pixel_scale[0], + // 0.0, + // model_tiepoint[3], + // 0.0, + // -model_pixel_scale[1], + // model_tiepoint[4], + // )) + // } else { + // None + // } + // } } diff --git a/src/ifd.rs b/src/ifd.rs index f0da9c1..e0c2338 100644 --- a/src/ifd.rs +++ b/src/ifd.rs @@ -1,4 +1,5 @@ use std::collections::HashMap; +use std::fmt::Debug; use std::ops::Range; use bytes::Bytes; @@ -6,6 +7,7 @@ use num_enum::TryFromPrimitive; use crate::error::{AsyncTiffError, AsyncTiffResult}; use crate::geo::{GeoKeyDirectory, GeoKeyTag}; +use crate::metadata::ExtraTagsRegistry; use crate::predictor::PredictorInfo; use crate::reader::{AsyncFileReader, Endianness}; use crate::tiff::tags::{ @@ -133,10 +135,7 @@ pub struct ImageFileDirectory { pub(crate) copyright: Option, - // Geospatial tags - pub(crate) geo_key_directory: Option, - pub(crate) model_pixel_scale: Option>, - pub(crate) model_tiepoint: Option>, + pub(crate) extra_tags: ExtraTagsRegistry, // GDAL tags // no_data @@ -149,6 +148,7 @@ impl ImageFileDirectory { pub fn from_tags( tag_data: HashMap, endianness: Endianness, + extra_tags_registry: ExtraTagsRegistry, ) -> AsyncTiffResult { let mut new_subfile_type = None; let mut image_width = None; @@ -183,11 +183,6 @@ impl ImageFileDirectory { let mut sample_format = None; let mut jpeg_tables = None; let mut copyright = None; - let mut geo_key_directory_data = None; - let mut model_pixel_scale = None; - let mut model_tiepoint = None; - let mut geo_ascii_params: Option = None; - let mut geo_double_params: Option> = None; let mut other_tags = HashMap::new(); @@ -252,13 +247,6 @@ impl ImageFileDirectory { Tag::JPEGTables => jpeg_tables = Some(value.into_u8_vec()?.into()), Tag::Copyright => copyright = Some(value.into_string()?), - // Geospatial tags - // http://geotiff.maptools.org/spec/geotiff2.4.html - Tag::GeoKeyDirectoryTag => geo_key_directory_data = Some(value.into_u16_vec()?), - Tag::ModelPixelScaleTag => model_pixel_scale = Some(value.into_f64_vec()?), - Tag::ModelTiepointTag => model_tiepoint = Some(value.into_f64_vec()?), - Tag::GeoAsciiParamsTag => geo_ascii_params = Some(value.into_string()?), - Tag::GeoDoubleParamsTag => geo_double_params = Some(value.into_f64_vec()?), // Tag::GdalNodata // Tags for which the tiff crate doesn't have a hard-coded enum variant Tag::Unknown(DOCUMENT_NAME) => document_name = Some(value.into_string()?), @@ -269,81 +257,6 @@ impl ImageFileDirectory { Ok::<_, TiffError>(()) })?; - let mut geo_key_directory = None; - - // We need to actually parse the GeoKeyDirectory after parsing all other tags because the - // GeoKeyDirectory relies on `GeoAsciiParamsTag` having been parsed. - if let Some(data) = geo_key_directory_data { - let mut chunks = data.chunks(4); - - let header = chunks - .next() - .expect("If the geo key directory exists, a header should exist."); - let key_directory_version = header[0]; - assert_eq!(key_directory_version, 1); - - let key_revision = header[1]; - assert_eq!(key_revision, 1); - - let _key_minor_revision = header[2]; - let number_of_keys = header[3]; - - let mut tags = HashMap::with_capacity(number_of_keys as usize); - for _ in 0..number_of_keys { - let chunk = chunks - .next() - .expect("There should be a chunk for each key."); - - let key_id = chunk[0]; - let tag_name = - GeoKeyTag::try_from_primitive(key_id).expect("Unknown GeoKeyTag id: {key_id}"); - - let tag_location = chunk[1]; - let count = chunk[2]; - let value_offset = chunk[3]; - - if tag_location == 0 { - tags.insert(tag_name, Value::Short(value_offset)); - } else if Tag::from_u16_exhaustive(tag_location) == Tag::GeoAsciiParamsTag { - // If the tag_location points to the value of Tag::GeoAsciiParamsTag, then we - // need to extract a subslice from GeoAsciiParamsTag - - let geo_ascii_params = geo_ascii_params - .as_ref() - .expect("GeoAsciiParamsTag exists but geo_ascii_params does not."); - let value_offset = value_offset as usize; - let mut s = &geo_ascii_params[value_offset..value_offset + count as usize]; - - // It seems that this string subslice might always include the final | - // character? - if s.ends_with('|') { - s = &s[0..s.len() - 1]; - } - - tags.insert(tag_name, Value::Ascii(s.to_string())); - } else if Tag::from_u16_exhaustive(tag_location) == Tag::GeoDoubleParamsTag { - // If the tag_location points to the value of Tag::GeoDoubleParamsTag, then we - // need to extract a subslice from GeoDoubleParamsTag - - let geo_double_params = geo_double_params - .as_ref() - .expect("GeoDoubleParamsTag exists but geo_double_params does not."); - let value_offset = value_offset as usize; - let value = if count == 1 { - Value::Double(geo_double_params[value_offset]) - } else { - let x = geo_double_params[value_offset..value_offset + count as usize] - .iter() - .map(|val| Value::Double(*val)) - .collect(); - Value::List(x) - }; - tags.insert(tag_name, value); - } - } - geo_key_directory = Some(GeoKeyDirectory::from_tags(tags)?); - } - let samples_per_pixel = samples_per_pixel.expect("samples_per_pixel not found"); let planar_configuration = if let Some(planar_configuration) = planar_configuration { planar_configuration @@ -395,9 +308,7 @@ impl ImageFileDirectory { .unwrap_or(vec![SampleFormat::Uint; samples_per_pixel as _]), copyright, jpeg_tables, - geo_key_directory, - model_pixel_scale, - model_tiepoint, + extra_tags: extra_tags_registry, other_tags, }) } @@ -616,23 +527,23 @@ impl ImageFileDirectory { self.copyright.as_deref() } - /// Geospatial tags - /// - pub fn geo_key_directory(&self) -> Option<&GeoKeyDirectory> { - self.geo_key_directory.as_ref() - } + // /// Geospatial tags + // /// + // pub fn geo_key_directory(&self) -> Option<&GeoKeyDirectory> { + // self.geo_key_directory.as_ref() + // } - /// Used in interchangeable GeoTIFF files. - /// - pub fn model_pixel_scale(&self) -> Option<&[f64]> { - self.model_pixel_scale.as_deref() - } + // /// Used in interchangeable GeoTIFF files. + // /// + // pub fn model_pixel_scale(&self) -> Option<&[f64]> { + // self.model_pixel_scale.as_deref() + // } - /// Used in interchangeable GeoTIFF files. - /// - pub fn model_tiepoint(&self) -> Option<&[f64]> { - self.model_tiepoint.as_deref() - } + // /// Used in interchangeable GeoTIFF files. + // /// + // pub fn model_tiepoint(&self) -> Option<&[f64]> { + // self.model_tiepoint.as_deref() + // } /// Tags for which the tiff crate doesn't have a hard-coded enum variant. pub fn other_tags(&self) -> &HashMap { diff --git a/src/metadata/extra_tags.rs b/src/metadata/extra_tags.rs new file mode 100644 index 0000000..0ccbd45 --- /dev/null +++ b/src/metadata/extra_tags.rs @@ -0,0 +1,378 @@ +use crate::error::{AsyncTiffError, AsyncTiffResult}; +use crate::geo::{AffineTransform, GeoKeyDirectory, GeoKeyTag}; +use crate::tiff::tags::Tag; +use crate::tiff::Value; +use futures::stream::Once; +use num_enum::TryFromPrimitive; +use std::collections::{HashMap, HashSet}; +use std::fmt::Debug; +use std::sync::{Arc, LazyLock, OnceLock, RwLock}; + +/// Trait to implement for custom tags, such as Geo, EXIF, OME, etc +/// your type should also implement `Clone` +// Send + Sync are required for Python, where `dyn ExtraTags` needs `Send` and `Sync` +pub trait ExtraTags: ExtraTagsBlankets + std::any::Any + Debug + Send + Sync { + /// a list of tags this entry processes + /// e.g. for Geo this would be [34735, 34736, 34737] + fn tags(&self) -> &'static [Tag]; + /// process a single tag, use interior mutability if needed + fn process_tag(&self, tag: Tag, value: Value) -> AsyncTiffResult<()>; + +} + +// we need to do a little dance to do an object-safe deep clone +// https://stackoverflow.com/a/30353928/14681457 +// also blanket impls for downcasting to any +pub trait ExtraTagsBlankets { + fn clone_arc(&self) -> Arc; + fn as_any_arc(self: Arc) -> Arc; +} + +impl ExtraTagsBlankets for T +where + T: 'static + ExtraTags + Clone, +{ + fn clone_arc(&self) -> Arc { + Arc::new(self.clone()) + } + + fn as_any_arc(self: Arc) -> Arc { + self + } +} + +/// The registry in which extra tags (parsers) are registered +/// This is passed to TODO +#[derive(Debug, Clone)] +pub struct ExtraTagsRegistry(HashMap>); + +impl ExtraTagsRegistry { + /// Create a new, empty `ExtraTagsRegistry` + pub fn new() -> Self { + Self(HashMap::new()) + } + /// Register an ExtraTags so their tags are parsed and stored in the ifd's `extra_tags`` + pub fn register(&mut self, tags: Arc) -> AsyncTiffResult<()> { + // check for duplicates + for tag in tags.tags() { + if self.0.contains_key(tag) { + return Err(AsyncTiffError::General(format!( + "Tag {tag:?} already registered in {self:?}!" + ))); + } + } + // add to self + for tag in tags.tags() { + self.0.insert(*tag, tags.clone()); + } + Ok(()) + } + + /// deep clone so we have different registries per IFD + pub(crate) fn deep_clone(&self) -> Self { + let mut new_registry = ExtraTagsRegistry::new(); + + // we need to do some magic, because we can have multiple tags pointing to the same arc + let mut seen = HashSet::new(); + for extra_tags in self.0.values() { + // only add if this is the first encountered reference to this arc + // (using thin pointer equality: https://stackoverflow.com/a/67114787/14681457 ; https://github.com/rust-lang/rust/issues/46139#issuecomment-346971153) + if seen.insert(Arc::as_ptr(extra_tags) as *const ()) { + if let Err(e) = new_registry.register(extra_tags.clone_arc()) { + panic!("{e}"); + } + } + } + + new_registry + } +} + +impl Default for ExtraTagsRegistry { + fn default() -> Self { + let mut new = Self::new(); + new.register(Arc::new(GeoTags::default())).unwrap(); + new + } +} + +#[derive(Debug, Clone, Default)] +pub struct GeoTags { + // we use a bunch of `OnceLock`s here, because the alternative would be a + // state machine with an `RwLock` which isn't Clone + model_tiepoint: OnceLock>, + model_pixel_scale: OnceLock<[f64; 3]>, + model_transform: OnceLock<[f64; 16]>, + geo_dir: OnceLock, + geo_dir_data: OnceLock>, + geo_ascii_params: OnceLock, + geo_double_params: OnceLock>, +} + +impl GeoTags { + pub fn affine(&self) -> Option { + if let Some(transform) = self.model_transform.get() { + todo!("implement https://docs.ogc.org/is/19-008r4/19-008r4.html#_geotiff_tags_for_coordinate_transformations") + } else if let (Some(model_tiepoint), Some(model_pixel_scale)) = (self.model_tiepoint.get(), self.model_pixel_scale.get()) { + Some(AffineTransform::new(model_pixel_scale[0], 0.0, model_tiepoint[3], 0.0, -model_pixel_scale[1], model_tiepoint[4])) + } else { + None + } + } +} + +#[derive(Debug, Clone)] +pub enum MaybePartialGeoKeyDirectory { + Partial { + geo_key_directory_data: OnceLock>, + geo_ascii_params: OnceLock, + geo_double_params: OnceLock>, + }, + Parsed(GeoKeyDirectory), +} + +impl Default for MaybePartialGeoKeyDirectory { + fn default() -> Self { + Self::Partial { + geo_key_directory_data: OnceLock::new(), + geo_ascii_params: OnceLock::new(), + geo_double_params: OnceLock::new(), + } + } +} + +impl MaybePartialGeoKeyDirectory { + /// maybe parse self, returns `Ok(true)` if parsed this call + fn maybe_parse(&mut self) -> AsyncTiffResult { + match self { + Self::Partial { + geo_key_directory_data, + geo_ascii_params, + geo_double_params, + } if geo_key_directory_data.get().is_some() + && geo_ascii_params.get().is_some() + && geo_double_params.get().is_some() => + { + // take out the data so we can mutate self + let data = geo_key_directory_data.take().unwrap(); + let geo_ascii_params = geo_ascii_params.take().unwrap(); + let geo_double_params = geo_double_params.take().unwrap(); + + let mut chunks = data.chunks(4); + + let header = chunks + .next() + .expect("If the geo key directory exists, a header should exist."); + let key_directory_version = header[0]; + assert_eq!(key_directory_version, 1); + + let key_revision = header[1]; + assert_eq!(key_revision, 1); + + let _key_minor_revision = header[2]; + let number_of_keys = header[3]; + + let mut tags = HashMap::with_capacity(number_of_keys as usize); + for _ in 0..number_of_keys { + let chunk = chunks + .next() + .expect("There should be a chunk for each key."); + + let key_id = chunk[0]; + let tag_name = GeoKeyTag::try_from_primitive(key_id) + .expect("Unknown GeoKeyTag id: {key_id}"); + + let tag_location = chunk[1]; + let count = chunk[2]; + let value_offset = chunk[3]; + + if tag_location == 0 { + tags.insert(tag_name, Value::Short(value_offset)); + } else if Tag::from_u16_exhaustive(tag_location) == Tag::GeoAsciiParamsTag { + // If the tag_location points to the value of + // Tag::GeoAsciiParamsTag, then we need to extract a + // subslice from GeoAsciiParamsTag + let value_offset = value_offset as usize; + let mut s = &geo_ascii_params[value_offset..value_offset + count as usize]; + + // It seems that this string subslice might always + // include the final | character? + if s.ends_with('|') { + s = &s[0..s.len() - 1]; + } + + tags.insert(tag_name, Value::Ascii(s.to_string())); + } else if Tag::from_u16_exhaustive(tag_location) == Tag::GeoDoubleParamsTag { + // If the tag_location points to the value of + // Tag::GeoDoubleParamsTag, then we need to extract a + // subslice from GeoDoubleParamsTag + let value_offset = value_offset as usize; + let value = if count == 1 { + Value::Double(geo_double_params[value_offset]) + } else { + let x = geo_double_params[value_offset..value_offset + count as usize] + .iter() + .map(|val| Value::Double(*val)) + .collect(); + Value::List(x) + }; + tags.insert(tag_name, value); + } + } + *self = MaybePartialGeoKeyDirectory::Parsed(GeoKeyDirectory::from_tags(tags)?); + Ok(true) + } + _ => Ok(false), + } + } +} + +static GEO_TAGS: LazyLock> = LazyLock::new(|| { + vec![ + Tag::ModelTiepointTag, + Tag::ModelPixelScaleTag, + Tag::ModelTransformationTag, + Tag::GeoKeyDirectoryTag, + Tag::GeoAsciiParamsTag, + Tag::GeoDoubleParamsTag, + ] +}); + +impl ExtraTags for GeoTags { + fn tags(&self) -> &'static [Tag] { + &GEO_TAGS + } + + fn process_tag(&self, tag: Tag, value: Value) -> AsyncTiffResult<()> { + match tag { + Tag::ModelTiepointTag => { + // https://docs.ogc.org/is/19-008r4/19-008r4.html#_requirements_class_modeltiepointtag + self.model_tiepoint.set(value.into_f64_vec()?).unwrap() + } + Tag::ModelPixelScaleTag => { + // unwrapping on non-spec compliance? + // https://docs.ogc.org/is/19-008r4/19-008r4.html#_requirements_class_modelpixelscaletag + self.model_pixel_scale.set(value.into_f64_vec()?.try_into().unwrap()).unwrap() + } + Tag::ModelTransformationTag => { + // unwrapping on non-spec compliance? + // https://docs.ogc.org/is/19-008r4/19-008r4.html#_requirements_class_modeltransformationtag + self.model_transform.set(value.into_f64_vec()?.try_into().unwrap()).unwrap() + } + Tag::GeoKeyDirectoryTag => { + self.geo_dir_data.set(value.into_u16_vec()?).unwrap(); + } + Tag::GeoAsciiParamsTag => { + self.geo_ascii_params.set(value.into_string()?).unwrap(); + } + Tag::GeoDoubleParamsTag => { + self.geo_double_params.set(value.into_f64_vec()?).unwrap(); + } + _ => unreachable!(), + } + // self.geo_dir.maybe_parse()?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::LazyLock; + + #[derive(Debug, Clone, PartialEq)] + struct TestyTag; + + static TESTY_TAGS: LazyLock> = LazyLock::new(|| { + vec![ + Tag::from_u16_exhaustive(u16::MAX), + Tag::from_u16_exhaustive(u16::MAX - 1), + ] + }); + + impl ExtraTags for TestyTag { + fn tags(&self) -> &'static [Tag] { + &TESTY_TAGS + } + + fn process_tag( + &self, + tag: Tag, + value: crate::tiff::Value, + ) -> crate::error::AsyncTiffResult<()> { + println!("received {tag:?}: {value:?}"); + Ok(()) + } + } + + #[test] + fn test_register() { + let mut registry = ExtraTagsRegistry::new(); + assert!(registry.0.is_empty()); + let a1: Arc = Arc::new(TestyTag); + registry.register(a1.clone()).unwrap(); + assert_eq!(registry.0.len(), TestyTag.tags().len()); + for tag in a1.tags() { + // very strict equality check + assert!(Arc::ptr_eq(®istry.0[tag], &a1)); + } + } + + #[test] + fn test_overlap_err() { + let mut registry = ExtraTagsRegistry::new(); + assert!(registry.0.is_empty()); + registry.register(Arc::new(TestyTag)).unwrap(); + assert!(matches!( + registry.register(Arc::new(TestyTag)).unwrap_err(), + AsyncTiffError::General(_) + )); + } + + #[test] + fn test_deep_clone() { + let mut registry = ExtraTagsRegistry::new(); + let a1: Arc = Arc::new(TestyTag); + registry.register(a1.clone()).unwrap(); + let r2 = registry.deep_clone(); + for tags in a1.tags().windows(2) { + // all should refer to the same Arc + assert!(Arc::ptr_eq(&r2.0[&tags[0]], &r2.0[&tags[1]])); + // which is different from the previous + assert!(!Arc::ptr_eq(&a1, &r2.0[&tags[0]])); + assert!(!Arc::ptr_eq(&a1, &r2.0[&tags[1]])); + } + } + + #[test] + fn test_geo() { + let registry = ExtraTagsRegistry::default(); + // create a sample hashmap + let hmap = HashMap::from([( + Tag::ModelTiepointTag, + Value::List([0.0, 0.0, 0.0, 350807.4, 5316081.3, 0.0f64].map(|v| Value::Double(v)).into()), + ),( + Tag::ModelPixelScaleTag, + Value::List([100.0, 100.0, 0.0f64].map(|v| Value::Double(v)).into()) + ),( + Tag::GeoKeyDirectoryTag, + Value::List([ + 1, 0, 2, 4, + 1024, 0, 1, 1, + 1025, 0, 1, 1, + 3072, 0, 1, 32660, + 3073, 34737, 25, 0 + ].map(|v| Value::Unsigned(v)).into()) + ),( + Tag::GeoAsciiParamsTag, + Value::Ascii("UTM Zone 60 N with WGS 84|".into()) + ) + ]); + for (k,v) in hmap { + if let Some(ref entry) = registry.0.get(&k) { + entry.process_tag(k, v).unwrap(); + } + } + let geo_dir = registry.0[&GEO_TAGS[0]].clone().as_any_arc().downcast::().unwrap(); + } +} diff --git a/src/metadata/mod.rs b/src/metadata/mod.rs index 3592014..f2792c7 100644 --- a/src/metadata/mod.rs +++ b/src/metadata/mod.rs @@ -40,7 +40,7 @@ //! //! // Read all IFDs out of the source. //! let ifds = metadata_reader -//! .read_all_ifds(&prefetch_reader) +//! .read_all_ifds(&prefetch_reader, Default::default()) //! .await //! .unwrap(); //! # }) @@ -58,8 +58,10 @@ //! fetches the first `N` bytes out of a file. //! +mod extra_tags; mod fetch; mod reader; +pub use extra_tags::{ExtraTags, ExtraTagsRegistry}; pub use fetch::{MetadataFetch, PrefetchBuffer}; pub use reader::{ImageFileDirectoryReader, TiffMetadataReader}; diff --git a/src/metadata/reader.rs b/src/metadata/reader.rs index 6a72ff9..34c67c1 100644 --- a/src/metadata/reader.rs +++ b/src/metadata/reader.rs @@ -5,6 +5,7 @@ use bytes::Bytes; use crate::error::{AsyncTiffError, AsyncTiffResult}; use crate::metadata::fetch::MetadataCursor; +use crate::metadata::ExtraTagsRegistry; use crate::metadata::MetadataFetch; use crate::reader::Endianness; use crate::tiff::tags::{Tag, Type}; @@ -110,12 +111,13 @@ impl TiffMetadataReader { pub async fn read_next_ifd( &mut self, fetch: &F, + extra_tags_registry: ExtraTagsRegistry, ) -> AsyncTiffResult> { if let Some(ifd_start) = self.next_ifd_offset { let ifd_reader = ImageFileDirectoryReader::open(fetch, ifd_start, self.bigtiff, self.endianness) .await?; - let ifd = ifd_reader.read(fetch).await?; + let ifd = ifd_reader.read(fetch, extra_tags_registry).await?; let next_ifd_offset = ifd_reader.finish(fetch).await?; self.next_ifd_offset = next_ifd_offset; Ok(Some(ifd)) @@ -128,9 +130,14 @@ impl TiffMetadataReader { pub async fn read_all_ifds( &mut self, fetch: &F, + extra_tags_registry: ExtraTagsRegistry, ) -> AsyncTiffResult> { let mut ifds = vec![]; - while let Some(ifd) = self.read_next_ifd(fetch).await? { + // deep clone the extra_tags_registry so we can have different values + while let Some(ifd) = self + .read_next_ifd(fetch, extra_tags_registry.deep_clone()) + .await? + { ifds.push(ifd); } Ok(ifds) @@ -220,13 +227,17 @@ impl ImageFileDirectoryReader { /// /// Keep in mind that you'll still need to call [`finish`][Self::finish] to get the byte offset /// of the next IFD. - pub async fn read(&self, fetch: &F) -> AsyncTiffResult { + pub async fn read( + &self, + fetch: &F, + extra_tags_registry: ExtraTagsRegistry, + ) -> AsyncTiffResult { let mut tags = HashMap::with_capacity(self.tag_count as usize); for tag_idx in 0..self.tag_count { let (tag, value) = self.read_tag(fetch, tag_idx).await?; tags.insert(tag, value); } - ImageFileDirectory::from_tags(tags, self.endianness) + ImageFileDirectory::from_tags(tags, self.endianness, extra_tags_registry) } /// Finish this reader, reading the byte offset of the next IFD diff --git a/tests/image_tiff/util.rs b/tests/image_tiff/util.rs index 0b7e0c3..b00a4ad 100644 --- a/tests/image_tiff/util.rs +++ b/tests/image_tiff/util.rs @@ -14,6 +14,9 @@ pub(crate) async fn open_tiff(filename: &str) -> TIFF { let reader = Arc::new(ObjectReader::new(store.clone(), path.as_str().into())) as Arc; let mut metadata_reader = TiffMetadataReader::try_open(&reader).await.unwrap(); - let ifds = metadata_reader.read_all_ifds(&reader).await.unwrap(); + let ifds = metadata_reader + .read_all_ifds(&reader, Default::default()) + .await + .unwrap(); TIFF::new(ifds) } diff --git a/tests/util/mod.rs b/tests/util/mod.rs index 8b3fa07..7eae8b1 100644 --- a/tests/util/mod.rs +++ b/tests/util/mod.rs @@ -17,7 +17,7 @@ pub(crate) async fn open_remote_tiff(url: &str) -> TIFF { .await .unwrap(); let ifds = metadata_reader - .read_all_ifds(&prefetch_reader) + .read_all_ifds(&prefetch_reader, Default::default()) .await .unwrap(); TIFF::new(ifds)