@@ -307,9 +307,7 @@ def _process_object_value(self, name: str, value, row: dict) -> str:
307307 # Extract primary key values from row
308308 primary_key = {k : row [k ] for k in self .primary_key if k in row }
309309 if not primary_key :
310- raise DataJointError (
311- "Primary key values must be provided before object attributes for insert."
312- )
310+ raise DataJointError ("Primary key values must be provided before object attributes for insert." )
313311
314312 # Determine input type and extract extension
315313 is_dir = False
@@ -337,8 +335,7 @@ def _process_object_value(self, name: str, value, row: dict) -> str:
337335 size = source_path .stat ().st_size
338336 else :
339337 raise DataJointError (
340- f"Invalid value type for object attribute { name } . "
341- "Expected file path, folder path, or (ext, stream) tuple."
338+ f"Invalid value type for object attribute { name } . " "Expected file path, folder path, or (ext, stream) tuple."
342339 )
343340
344341 # Get storage spec for path building
@@ -913,8 +910,7 @@ def __make_placeholder(self, name, value, ignore_extra_fields=False, row=None):
913910 # Object type - upload to object storage and return JSON metadata
914911 if row is None :
915912 raise DataJointError (
916- f"Object attribute { name } requires full row context for insert. "
917- "This is an internal error."
913+ f"Object attribute { name } requires full row context for insert. " "This is an internal error."
918914 )
919915 value = self ._process_object_value (name , value , row )
920916 elif attr .numeric :
@@ -960,7 +956,8 @@ def check_fields(fields):
960956 row_dict = dict (row )
961957 attributes = [
962958 self .__make_placeholder (name , row [name ], ignore_extra_fields , row = row_dict )
963- for name in self .heading if name in row
959+ for name in self .heading
960+ if name in row
964961 ]
965962 else : # positional
966963 try :
0 commit comments