Skip to content

Commit 334da76

Browse files
fix: address PR #1311 review comments
- Replace bare except: with except ImportError: in diagram.py - Replace assert statements with explicit raises in blob.py - Replace assert False with explicit raises in expression.py and declare.py - Implement hash verification in objectref.py Co-Authored-By: Claude Opus 4.5 <[email protected]>
1 parent 861e273 commit 334da76

File tree

5 files changed

+16
-11
lines changed

5 files changed

+16
-11
lines changed

src/datajoint/blob.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,8 @@ def unpack(self, blob):
159159
self._pos += len(prefix)
160160
blob_size = self.read_value()
161161
blob = compression[prefix](self._blob[self._pos :])
162-
assert len(blob) == blob_size
162+
if len(blob) != blob_size:
163+
raise DataJointError(f"Blob size mismatch: expected {blob_size}, got {len(blob)}")
163164
self._blob = blob
164165
self._pos = 0
165166
blob_format = self.read_zero_terminated_string()
@@ -363,7 +364,8 @@ def read_int(self):
363364
@staticmethod
364365
def pack_int(v):
365366
n_bytes = v.bit_length() // 8 + 1
366-
assert 0 < n_bytes <= 0xFFFF, "Integers are limited to 65535 bytes"
367+
if not (0 < n_bytes <= 0xFFFF):
368+
raise DataJointError("Integers are limited to 65535 bytes")
367369
return b"\x0a" + np.uint16(n_bytes).tobytes() + v.to_bytes(n_bytes, byteorder="little", signed=True)
368370

369371
def read_bool(self):

src/datajoint/declare.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -673,7 +673,7 @@ def substitute_special_type(match: dict, category: str, foreign_key_sql: list[st
673673
match["type"] = sql_type
674674
# else: type passes through as-is (json, date, datetime, char, varchar, enum)
675675
else:
676-
assert False, f"Unknown special type: {category}"
676+
raise DataJointError(f"Unknown special type: {category}")
677677

678678

679679
def compile_attribute(line: str, in_key: bool, foreign_key_sql: list[str], context: dict) -> tuple[str, str, str | None]:

src/datajoint/diagram.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,14 +23,14 @@
2323
from matplotlib import pyplot as plt
2424

2525
plot_active = True
26-
except:
26+
except ImportError:
2727
plot_active = False
2828

2929
try:
3030
from networkx.drawing.nx_pydot import pydot_layout
3131

3232
diagram_active = True
33-
except:
33+
except ImportError:
3434
diagram_active = False
3535

3636

src/datajoint/expression.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1085,12 +1085,12 @@ def make_sql(self):
10851085
return "({sql1}) UNION ({sql2})".format(sql1=sql1, sql2=sql2)
10861086

10871087
def from_clause(self):
1088-
"""The union does not use a FROM clause"""
1089-
assert False
1088+
"""The union does not use a FROM clause."""
1089+
raise NotImplementedError("Union does not use a FROM clause")
10901090

10911091
def where_clause(self):
1092-
"""The union does not use a WHERE clause"""
1093-
assert False
1092+
"""The union does not use a WHERE clause."""
1093+
raise NotImplementedError("Union does not use a WHERE clause")
10941094

10951095
def __len__(self):
10961096
return self.connection.query(

src/datajoint/objectref.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import fsspec
1616

1717
from .errors import DataJointError
18+
from .hash_registry import compute_hash
1819
from .storage import StorageBackend
1920

2021

@@ -366,8 +367,10 @@ def _verify_file(self) -> bool:
366367

367368
# Check hash if available
368369
if self.hash:
369-
# TODO: Implement hash verification
370-
pass
370+
content = self._backend.get_buffer(self.path)
371+
actual_hash = compute_hash(content)
372+
if actual_hash != self.hash:
373+
raise IntegrityError(f"Hash mismatch for {self.path}: expected {self.hash}, got {actual_hash}")
371374

372375
return True
373376

0 commit comments

Comments
 (0)