Skip to content

Commit 08838f6

Browse files
committed
Fix ruff-format: code formatting adjustments
Apply ruff formatter changes for consistent code style.
1 parent 095753f commit 08838f6

File tree

4 files changed

+19
-21
lines changed

4 files changed

+19
-21
lines changed

src/datajoint/heading.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,8 @@ def blobs(self):
138138
@property
139139
def non_blobs(self):
140140
return [
141-
k for k, v in self.attributes.items()
141+
k
142+
for k, v in self.attributes.items()
142143
if not (v.is_blob or v.is_attachment or v.is_filepath or v.is_object or v.json)
143144
]
144145

@@ -348,8 +349,7 @@ def _init_from_database(self):
348349
)
349350
):
350351
raise DataJointError(
351-
"Json, Blob, attachment, filepath, or object attributes "
352-
"are not allowed in the primary key"
352+
"Json, Blob, attachment, filepath, or object attributes " "are not allowed in the primary key"
353353
)
354354

355355
if attr["string"] and attr["default"] is not None and attr["default"] not in sql_literals:

src/datajoint/settings.py

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -394,8 +394,7 @@ def get_object_storage_spec(self) -> dict[str, Any]:
394394
supported_protocols = ("file", "s3", "gcs", "azure")
395395
if protocol not in supported_protocols:
396396
raise DataJointError(
397-
f"Invalid object_storage.protocol: {protocol}. "
398-
f'Supported protocols: {", ".join(supported_protocols)}'
397+
f"Invalid object_storage.protocol: {protocol}. " f'Supported protocols: {", ".join(supported_protocols)}'
399398
)
400399

401400
# Build spec dict
@@ -413,13 +412,15 @@ def get_object_storage_spec(self) -> dict[str, Any]:
413412
raise DataJointError("object_storage.endpoint and object_storage.bucket are required for S3")
414413
if not os_settings.access_key or not os_settings.secret_key:
415414
raise DataJointError("object_storage.access_key and object_storage.secret_key are required for S3")
416-
spec.update({
417-
"endpoint": os_settings.endpoint,
418-
"bucket": os_settings.bucket,
419-
"access_key": os_settings.access_key,
420-
"secret_key": os_settings.secret_key.get_secret_value() if os_settings.secret_key else None,
421-
"secure": os_settings.secure,
422-
})
415+
spec.update(
416+
{
417+
"endpoint": os_settings.endpoint,
418+
"bucket": os_settings.bucket,
419+
"access_key": os_settings.access_key,
420+
"secret_key": os_settings.secret_key.get_secret_value() if os_settings.secret_key else None,
421+
"secure": os_settings.secure,
422+
}
423+
)
423424
elif protocol == "gcs":
424425
if not os_settings.bucket:
425426
raise DataJointError("object_storage.bucket is required for GCS")

src/datajoint/storage.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ def encode_pk_value(value: Any) -> str:
5959
# String handling
6060
s = str(value)
6161
# Check if path-safe (no special characters)
62-
unsafe_chars = "/\\:*?\"<>|"
62+
unsafe_chars = '/\\:*?"<>|'
6363
if any(c in s for c in unsafe_chars) or len(s) > 100:
6464
# URL-encode unsafe strings or truncate long ones
6565
if len(s) > 100:

src/datajoint/table.py

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -307,9 +307,7 @@ def _process_object_value(self, name: str, value, row: dict) -> str:
307307
# Extract primary key values from row
308308
primary_key = {k: row[k] for k in self.primary_key if k in row}
309309
if not primary_key:
310-
raise DataJointError(
311-
"Primary key values must be provided before object attributes for insert."
312-
)
310+
raise DataJointError("Primary key values must be provided before object attributes for insert.")
313311

314312
# Determine input type and extract extension
315313
is_dir = False
@@ -337,8 +335,7 @@ def _process_object_value(self, name: str, value, row: dict) -> str:
337335
size = source_path.stat().st_size
338336
else:
339337
raise DataJointError(
340-
f"Invalid value type for object attribute {name}. "
341-
"Expected file path, folder path, or (ext, stream) tuple."
338+
f"Invalid value type for object attribute {name}. " "Expected file path, folder path, or (ext, stream) tuple."
342339
)
343340

344341
# Get storage spec for path building
@@ -913,8 +910,7 @@ def __make_placeholder(self, name, value, ignore_extra_fields=False, row=None):
913910
# Object type - upload to object storage and return JSON metadata
914911
if row is None:
915912
raise DataJointError(
916-
f"Object attribute {name} requires full row context for insert. "
917-
"This is an internal error."
913+
f"Object attribute {name} requires full row context for insert. " "This is an internal error."
918914
)
919915
value = self._process_object_value(name, value, row)
920916
elif attr.numeric:
@@ -960,7 +956,8 @@ def check_fields(fields):
960956
row_dict = dict(row)
961957
attributes = [
962958
self.__make_placeholder(name, row[name], ignore_extra_fields, row=row_dict)
963-
for name in self.heading if name in row
959+
for name in self.heading
960+
if name in row
964961
]
965962
else: # positional
966963
try:

0 commit comments

Comments
 (0)