Skip to content

Commit 368145f

Browse files
committed
normalize typerror when check_scalar fails, and add tests for it
1 parent 2540eab commit 368145f

File tree

19 files changed

+136
-48
lines changed

19 files changed

+136
-48
lines changed

examples/custom_dtype.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,10 @@ def cast_scalar(self, data: object) -> ml_dtypes.int2:
169169
"""
170170
if self._check_scalar(data):
171171
return ml_dtypes.int2(data)
172-
msg = f"Cannot convert object with type {type(data)} to a 2-bit integer."
172+
msg = (
173+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
174+
f"data type {self}."
175+
)
173176
raise TypeError(msg)
174177

175178
def default_scalar(self) -> ml_dtypes.int2:

src/zarr/core/dtype/npy/bool.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -246,8 +246,11 @@ def cast_scalar(self, data: object) -> np.bool_:
246246
"""
247247
if self._check_scalar(data):
248248
return np.bool_(data)
249-
msg = f"Cannot convert object with type {type(data)} to a numpy boolean."
250-
raise TypeError(msg)
249+
msg = (
250+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
251+
f"data type {self}."
252+
)
253+
raise TypeError(msg) # pragma: no cover
251254

252255
def default_scalar(self) -> np.bool_:
253256
"""

src/zarr/core/dtype/npy/bytes.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -458,7 +458,10 @@ def cast_scalar(self, data: object) -> np.bytes_:
458458

459459
if self._check_scalar(data):
460460
return self._cast_scalar_unchecked(data)
461-
msg = f"Cannot convert object with type {type(data)} to a NumPy bytes scalar."
461+
msg = (
462+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
463+
f"data type {self}."
464+
)
462465
raise TypeError(msg)
463466

464467
def default_scalar(self) -> np.bytes_:
@@ -849,7 +852,10 @@ def cast_scalar(self, data: object) -> np.void:
849852
"""
850853
if self._check_scalar(data):
851854
return self._cast_scalar_unchecked(data)
852-
msg = f"Cannot convert object with type {type(data)} to a NumPy void scalar."
855+
msg = (
856+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
857+
f"data type {self}."
858+
)
853859
raise TypeError(msg)
854860

855861
def default_scalar(self) -> np.void:
@@ -1263,5 +1269,8 @@ def cast_scalar(self, data: object) -> bytes:
12631269

12641270
if self._check_scalar(data):
12651271
return self._cast_scalar_unchecked(data)
1266-
msg = f"Cannot convert object with type {type(data)} to bytes."
1272+
msg = (
1273+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
1274+
f"data type {self}."
1275+
)
12671276
raise TypeError(msg)

src/zarr/core/dtype/npy/complex.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,10 @@ def cast_scalar(self, data: object) -> TComplexScalar_co:
277277
"""
278278
if self._check_scalar(data):
279279
return self._cast_scalar_unchecked(data)
280-
msg = f"Cannot convert object with type {type(data)} to a numpy float scalar."
280+
msg = (
281+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
282+
f"data type {self}."
283+
)
281284
raise TypeError(msg)
282285

283286
def default_scalar(self) -> TComplexScalar_co:

src/zarr/core/dtype/npy/float.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
DTypeJSON,
1212
HasEndianness,
1313
HasItemSize,
14-
ScalarTypeValidationError,
1514
check_dtype_spec_v2,
1615
)
1716
from zarr.core.dtype.npy.common import (
@@ -235,8 +234,11 @@ def cast_scalar(self, data: object) -> TFloatScalar_co:
235234
"""
236235
if self._check_scalar(data):
237236
return self._cast_scalar_unchecked(data)
238-
msg = f"Cannot convert object with type {type(data)} to a NumPy float scalar."
239-
raise ScalarTypeValidationError(msg)
237+
msg = (
238+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
239+
f"data type {self}."
240+
)
241+
raise TypeError(msg)
240242

241243
def default_scalar(self) -> TFloatScalar_co:
242244
"""

src/zarr/core/dtype/npy/int.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,10 @@ def cast_scalar(self, data: object) -> TIntScalar_co:
166166

167167
if self._check_scalar(data):
168168
return self._cast_scalar_unchecked(data)
169-
msg = f"Cannot convert object with type {type(data)} to a NumPy integer."
169+
msg = (
170+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
171+
f"data type {self}."
172+
)
170173
raise TypeError(msg)
171174

172175
def default_scalar(self) -> TIntScalar_co:

src/zarr/core/dtype/npy/string.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -344,7 +344,7 @@ def from_json_scalar(self, data: JSON, *, zarr_format: ZarrFormat) -> np.str_:
344344
return self.to_native_dtype().type(data)
345345
raise TypeError(f"Invalid type: {data}. Expected a string.") # pragma: no cover
346346

347-
def _check_scalar(self, data: object) -> TypeGuard[str | np.str_ | bytes | int]:
347+
def _check_scalar(self, data: object) -> TypeGuard[SupportsStr]:
348348
"""
349349
Check that the input is a valid scalar value for this data type.
350350
@@ -355,11 +355,11 @@ def _check_scalar(self, data: object) -> TypeGuard[str | np.str_ | bytes | int]:
355355
356356
Returns
357357
-------
358-
TypeGuard[str | np.str_ | bytes | int]
358+
TypeGuard[SupportsStr]
359359
Whether the input is a valid scalar value for this data type.
360360
"""
361361
# this is generous for backwards compatibility
362-
return isinstance(data, str | np.str_ | bytes | int)
362+
return isinstance(data, SupportsStr)
363363

364364
def cast_scalar(self, data: object) -> np.str_:
365365
"""
@@ -383,13 +383,13 @@ def cast_scalar(self, data: object) -> np.str_:
383383
# >>> x.dtype
384384
# dtype('U11')
385385

386-
if isinstance(data, int):
387-
return self.to_native_dtype().type(str(data)[: self.length])
388-
else:
389-
return self.to_native_dtype().type(data[: self.length])
390-
raise TypeError(
391-
f"Cannot convert object with type {type(data)} to a NumPy unicode string scalar."
386+
return self.to_native_dtype().type(str(data)[: self.length])
387+
388+
msg = (
389+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
390+
f"data type {self}."
392391
)
392+
raise TypeError(msg) # pragma: no-cover
393393

394394
@property
395395
def item_size(self) -> int:
@@ -711,7 +711,11 @@ def cast_scalar(self, data: object) -> str:
711711
"""
712712
if self._check_scalar(data):
713713
return self._cast_scalar_unchecked(data)
714-
raise TypeError(f"Cannot convert object with type {type(data)} to a Python string.")
714+
msg = (
715+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
716+
f"data type {self}."
717+
)
718+
raise TypeError(msg)
715719

716720

717721
if _NUMPY_SUPPORTS_VLEN_STRING:

src/zarr/core/dtype/npy/structured.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,10 @@ def cast_scalar(self, data: object) -> np.void:
399399

400400
if self._check_scalar(data):
401401
return self._cast_scalar_unchecked(data)
402-
msg = f"Cannot convert object with type {type(data)} to a NumPy structured scalar."
402+
msg = (
403+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
404+
f"data type {self}."
405+
)
403406
raise TypeError(msg)
404407

405408
def default_scalar(self) -> np.void:

src/zarr/core/dtype/npy/time.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -554,7 +554,10 @@ def cast_scalar(self, data: object) -> np.timedelta64:
554554
"""
555555
if self._check_scalar(data):
556556
return self._cast_scalar_unchecked(data)
557-
msg = f"Cannot convert object with type {type(data)} to a numpy timedelta64 scalar."
557+
msg = (
558+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
559+
f"data type {self}."
560+
)
558561
raise TypeError(msg)
559562

560563
def default_scalar(self) -> np.timedelta64:
@@ -838,7 +841,10 @@ def cast_scalar(self, data: object) -> np.datetime64:
838841
"""
839842
if self._check_scalar(data):
840843
return self._cast_scalar_unchecked(data)
841-
msg = f"Cannot convert object with type {type(data)} to a numpy datetime scalar."
844+
msg = (
845+
f"Cannot convert object {data!r} with type {type(data)} to a scalar compatible with the "
846+
f"data type {self}."
847+
)
842848
raise TypeError(msg)
843849

844850
def default_scalar(self) -> np.datetime64:

tests/test_dtype/conftest.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,4 @@ class TestB(TestExample):
6565
for fixture_name in metafunc.fixturenames:
6666
if hasattr(metafunc.cls, fixture_name):
6767
params = getattr(metafunc.cls, fixture_name)
68-
if len(params) == 0:
69-
msg = f"{metafunc.cls}.{fixture_name} is empty. Please provide a non-empty sequence of values."
70-
raise ValueError(msg)
71-
metafunc.parametrize(fixture_name, params, scope="class")
68+
metafunc.parametrize(fixture_name, params, scope="class", ids=str)

0 commit comments

Comments
 (0)