Skip to content

Commit deb5f3b

Browse files
add nodata, scales and offsets attributes to ImageData and PointData classes (#818)
1 parent 9531cbb commit deb5f3b

File tree

6 files changed

+128
-23
lines changed

6 files changed

+128
-23
lines changed

CHANGES.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,17 @@ with Reader("tests/fixtures/cog_tags.tif") as src:
5353
```
5454

5555
* only cast data to `uint8` if colormap values are of type `uint8`
56+
5657
* add `alpha_mask` attribute to `ImageData` class
58+
5759
* allow partial alpha values from alpha band
60+
5861
* better handle non-uint8 alpha band
62+
5963
* remove deprecated `force_binary_mask` option in `reader.read` function **breaking change**
6064

65+
* add `nodata`, `scales` and `offsets` attributes to `ImageData` and `PointData` classes
66+
6167
# 7.8.1 (2025-06-16)
6268

6369
* apply scale/offset to dataset statistics in ImageData object (used for automatic rescaling)

rio_tiler/io/xarray.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -312,6 +312,7 @@ def tile(
312312
crs=dst_crs,
313313
dataset_statistics=stats,
314314
band_names=band_names,
315+
nodata=da.rio.nodata,
315316
)
316317

317318
def part(
@@ -405,6 +406,7 @@ def part(
405406
crs=da.rio.crs,
406407
dataset_statistics=stats,
407408
band_names=band_names,
409+
nodata=da.rio.nodata,
408410
)
409411

410412
output_height = height or img.height
@@ -498,6 +500,7 @@ def preview(
498500
crs=da.rio.crs,
499501
dataset_statistics=stats,
500502
band_names=band_names,
503+
nodata=da.rio.nodata,
501504
)
502505

503506
output_height = height or img.height
@@ -566,6 +569,7 @@ def point(
566569
crs=coord_crs,
567570
band_names=band_names,
568571
pixel_location=(x, y),
572+
nodata=da.rio.nodata,
569573
)
570574

571575
def feature(

rio_tiler/models.py

Lines changed: 61 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
ColorMapType,
3434
GDALColorMapType,
3535
IntervalTuple,
36+
NoData,
3637
NumType,
3738
RIOResampling,
3839
WarpResampling,
@@ -159,6 +160,9 @@ class PointData:
159160
crs: Optional[CRS] = attr.ib(default=None, kw_only=True)
160161
assets: Optional[List] = attr.ib(default=None, kw_only=True)
161162
metadata: Optional[Dict] = attr.ib(factory=dict, kw_only=True)
163+
nodata: Optional[NoData] = attr.ib(default=None, kw_only=True)
164+
scales: Optional[List[NumType]] = attr.ib(kw_only=True)
165+
offsets: Optional[List[NumType]] = attr.ib(kw_only=True)
162166
pixel_location: Optional[Tuple[NumType, NumType]] = attr.ib(
163167
default=None, kw_only=True
164168
)
@@ -179,6 +183,14 @@ def _validate_coordinates(self, attribute, value):
179183
def _default_names(self):
180184
return [f"b{ix + 1}" for ix in range(self.count)]
181185

186+
@scales.default
187+
def _default_scales(self):
188+
return [1.0] * self.count
189+
190+
@offsets.default
191+
def _default_offsets(self):
192+
return [0.0] * self.count
193+
182194
@property
183195
def data(self) -> numpy.ndarray:
184196
"""Return data part of the masked array."""
@@ -236,6 +248,10 @@ def create_from_list(cls, data: Sequence["PointData"]) -> Self:
236248
itertools.chain.from_iterable([pt.band_names for pt in data if pt.band_names])
237249
)
238250

251+
scales = list(itertools.chain.from_iterable([pt.scales for pt in data]))
252+
253+
offsets = list(itertools.chain.from_iterable([pt.offsets for pt in data]))
254+
239255
metadata = dict(
240256
itertools.chain.from_iterable(
241257
[pt.metadata.items() for pt in data if pt.metadata]
@@ -246,6 +262,8 @@ def create_from_list(cls, data: Sequence["PointData"]) -> Self:
246262
arr,
247263
assets=assets,
248264
band_names=band_names,
265+
offsets=offsets,
266+
scales=scales,
249267
coordinates=data[0].coordinates,
250268
crs=data[0].crs,
251269
metadata=metadata,
@@ -311,6 +329,9 @@ class ImageData:
311329
)
312330
crs: Optional[CRS] = attr.ib(default=None, kw_only=True)
313331
metadata: Optional[Dict] = attr.ib(factory=dict, kw_only=True)
332+
nodata: Optional[NoData] = attr.ib(default=None, kw_only=True)
333+
scales: Optional[List[NumType]] = attr.ib(kw_only=True)
334+
offsets: Optional[List[NumType]] = attr.ib(kw_only=True)
314335
band_names: Optional[List[str]] = attr.ib(kw_only=True)
315336
dataset_statistics: Optional[Sequence[Tuple[float, float]]] = attr.ib(
316337
default=None, kw_only=True
@@ -322,6 +343,14 @@ class ImageData:
322343
def _default_names(self):
323344
return [f"b{ix + 1}" for ix in range(self.count)]
324345

346+
@scales.default
347+
def _default_scales(self):
348+
return [1.0] * self.count
349+
350+
@offsets.default
351+
def _default_offsets(self):
352+
return [0.0] * self.count
353+
325354
@alpha_mask.validator
326355
def _check_alpha_mask(self, attribute, value):
327356
"""Make sure alpha mask has valid shame and datatype."""
@@ -411,7 +440,14 @@ def from_bytes(cls, data: bytes) -> Self:
411440
array,
412441
crs=dataset.crs,
413442
bounds=dataset.bounds,
443+
band_names=[
444+
dataset.descriptions[ix - 1] or f"b{idx}" for idx in indexes
445+
],
414446
dataset_statistics=dataset_statistics,
447+
nodata=dataset.nodata,
448+
scales=list(dataset.scales),
449+
offsets=list(dataset.offsets),
450+
metadata=dataset.tags(),
415451
)
416452

417453
@classmethod
@@ -464,6 +500,10 @@ def create_from_list(cls, data: Sequence["ImageData"]) -> Self:
464500
)
465501
)
466502

503+
scales = list(itertools.chain.from_iterable([img.scales for img in data]))
504+
505+
offsets = list(itertools.chain.from_iterable([img.offsets for img in data]))
506+
467507
stats = list(
468508
itertools.chain.from_iterable(
469509
[img.dataset_statistics for img in data if img.dataset_statistics]
@@ -486,6 +526,8 @@ def create_from_list(cls, data: Sequence["ImageData"]) -> Self:
486526
dataset_statistics=dataset_statistics,
487527
cutline_mask=cutline_mask,
488528
metadata=metadata,
529+
scales=scales,
530+
offsets=offsets,
489531
)
490532

491533
def data_as_image(self) -> numpy.ndarray:
@@ -540,6 +582,10 @@ def rescale(
540582
out_range=dtype_ranges[out_dtype],
541583
).astype(out_dtype)
542584

585+
# reset scales/offsets
586+
self.scales = [1.0] * self.count
587+
self.offsets = [0.0] * self.count
588+
543589
return self
544590

545591
def apply_color_formula(self, color_formula: Optional[str]) -> Self:
@@ -560,6 +606,10 @@ def apply_color_formula(self, color_formula: Optional[str]) -> Self:
560606
self.alpha_mask, in_range=dtype_ranges[str(self.alpha_mask.dtype)]
561607
).astype("uint8")
562608

609+
# reset scales/offsets
610+
self.scales = [1.0] * self.count
611+
self.offsets = [0.0] * self.count
612+
563613
return self
564614

565615
def apply_colormap(self, colormap: ColorMapType) -> "ImageData":
@@ -643,6 +693,9 @@ def resize(
643693
crs=self.crs,
644694
bounds=self.bounds,
645695
band_names=self.band_names,
696+
nodata=self.nodata,
697+
scales=self.scales,
698+
offsets=self.offsets,
646699
metadata=self.metadata,
647700
dataset_statistics=self.dataset_statistics,
648701
alpha_mask=alpha_mask,
@@ -660,6 +713,9 @@ def clip(self, bbox: BBox) -> "ImageData":
660713
crs=self.crs,
661714
bounds=bbox,
662715
band_names=self.band_names,
716+
nodata=self.nodata,
717+
scales=self.scales,
718+
offsets=self.offsets,
663719
metadata=self.metadata,
664720
dataset_statistics=self.dataset_statistics,
665721
alpha_mask=self.alpha_mask[row_slice, col_slice].copy()
@@ -777,14 +833,15 @@ def to_raster(self, dst_path: str, *, driver: str = "GTIFF", **kwargs: Any) -> N
777833
if "crs" not in kwargs and self.crs:
778834
kwargs.update({"crs": self.crs})
779835

780-
write_nodata = "nodata" in kwargs
836+
write_nodata = self.nodata is not None
781837
count, height, width = self.array.shape
782838

783839
output_profile = {
784840
"dtype": self.array.dtype,
785841
"count": count if write_nodata else count + 1,
786842
"height": height,
787843
"width": width,
844+
"nodata": self.nodata,
788845
}
789846
output_profile.update(kwargs)
790847

@@ -930,6 +987,9 @@ def reproject(
930987
crs=dst_crs,
931988
bounds=bounds,
932989
band_names=self.band_names,
990+
nodata=self.nodata,
991+
scales=self.scales,
992+
offsets=self.offsets,
933993
metadata=self.metadata,
934994
dataset_statistics=self.dataset_statistics,
935995
alpha_mask=alpha_mask,

rio_tiler/reader.py

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -276,33 +276,33 @@ def read(
276276
# We only add dataset statistics if we have them for all the indexes
277277
dataset_statistics = stats if len(stats) == len(indexes) else None
278278

279+
scales = numpy.array(dataset.scales)[numpy.array(indexes) - 1]
280+
offsets = numpy.array(dataset.offsets)[numpy.array(indexes) - 1]
279281
if unscale:
280282
data = data.astype("float32", casting="unsafe")
281-
282-
# reshaped to match data
283-
scales = numpy.array(dataset.scales)[numpy.array(indexes) - 1].reshape(
284-
(-1, 1, 1)
285-
)
286-
offsets = numpy.array(dataset.offsets)[numpy.array(indexes) - 1].reshape(
287-
(-1, 1, 1)
288-
)
289-
290-
numpy.multiply(data, scales, out=data, casting="unsafe")
291-
numpy.add(data, offsets, out=data, casting="unsafe")
283+
numpy.multiply(data, scales.reshape((-1, 1, 1)), out=data, casting="unsafe")
284+
numpy.add(data, offsets.reshape((-1, 1, 1)), out=data, casting="unsafe")
292285

293286
# apply scale/offsets to stats
294287
if dataset_statistics:
295-
scales = numpy.array(dataset.scales)[numpy.array(indexes) - 1].reshape(
296-
(-1, 1)
288+
stats_array = numpy.array(dataset_statistics)
289+
numpy.multiply(
290+
stats_array,
291+
scales.reshape((-1, 1)),
292+
out=stats_array,
293+
casting="unsafe",
297294
)
298-
offsets = numpy.array(dataset.offsets)[numpy.array(indexes) - 1].reshape(
299-
(-1, 1)
295+
numpy.add(
296+
stats_array,
297+
offsets.reshape((-1, 1)),
298+
out=stats_array,
299+
casting="unsafe",
300300
)
301-
stats_array = numpy.array(dataset_statistics)
302-
numpy.multiply(stats_array, scales, out=stats_array, casting="unsafe")
303-
numpy.add(stats_array, offsets, out=stats_array, casting="unsafe")
304301
dataset_statistics = [tuple(s) for s in stats_array.tolist()]
305302

303+
scales = numpy.zeros(len(indexes)) + 1.0
304+
offsets = numpy.zeros(len(indexes))
305+
306306
if post_process:
307307
data = post_process(data)
308308

@@ -317,6 +317,9 @@ def read(
317317
band_names=[dataset.descriptions[ix - 1] or f"b{idx}" for idx in indexes],
318318
dataset_statistics=dataset_statistics,
319319
metadata=dataset.tags(),
320+
nodata=nodata,
321+
scales=scales.tolist(),
322+
offsets=offsets.tolist(),
320323
)
321324

322325

@@ -522,6 +525,9 @@ def part(
522525
bounds=bounds,
523526
crs=img.crs,
524527
band_names=img.band_names,
528+
nodata=img.nodata,
529+
scales=img.scales,
530+
offsets=img.offsets,
525531
dataset_statistics=img.dataset_statistics,
526532
metadata=img.metadata,
527533
)
@@ -660,6 +666,9 @@ def point(
660666
band_names=img.band_names,
661667
coordinates=coordinates,
662668
crs=coord_crs,
663-
metadata=dataset.tags(),
664669
pixel_location=(col, row),
670+
nodata=img.nodata,
671+
scales=img.scales,
672+
offsets=img.offsets,
673+
metadata=img.metadata,
665674
)

tests/test_io_rasterio.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1177,13 +1177,17 @@ def test_unscale_stats():
11771177
"""check if scale/offset were applied on stats."""
11781178
with Reader(COG_SCALE_STATS) as src:
11791179
img = src.read(unscale=True)
1180+
assert img.scales == [1.0, 1.0]
1181+
assert img.offsets == [0.0, 0.0]
11801182
stats = img.statistics()
11811183
minb1, maxb1 = stats["b1"].min, stats["b1"].max
11821184

11831185
assert pytest.approx(img.dataset_statistics[0][0]) == pytest.approx(minb1)
11841186
assert pytest.approx(img.dataset_statistics[0][1]) == pytest.approx(maxb1)
11851187

11861188
img = src.read()
1189+
assert img.scales == [0.0001, 0.001]
1190+
assert img.offsets == [1000.0, 2000.0]
11871191
stats = img.statistics()
11881192
minb1, maxb1 = stats["b1"].min, stats["b1"].max
11891193

0 commit comments

Comments
 (0)