@@ -68,26 +68,15 @@ def key_source(self):
6868
6969 def _rename_attributes (table , props ):
7070 return (
71- table .proj (
72- ** {
73- attr : ref
74- for attr , ref in props ["attr_map" ].items ()
75- if attr != ref
76- }
77- )
71+ table .proj (** {attr : ref for attr , ref in props ["attr_map" ].items () if attr != ref })
7872 if props ["aliased" ]
7973 else table .proj ()
8074 )
8175
8276 if self ._key_source is None :
83- parents = self .target .parents (
84- primary = True , as_objects = True , foreign_key_info = True
85- )
77+ parents = self .target .parents (primary = True , as_objects = True , foreign_key_info = True )
8678 if not parents :
87- raise DataJointError (
88- "A table must have dependencies "
89- "from its primary key for auto-populate to work"
90- )
79+ raise DataJointError ("A table must have dependencies " "from its primary key for auto-populate to work" )
9180 self ._key_source = _rename_attributes (* parents [0 ])
9281 for q in parents [1 :]:
9382 self ._key_source *= _rename_attributes (* q )
@@ -139,11 +128,7 @@ def make(self, key):
139128 :raises NotImplementedError: If the derived class does not implement the required methods.
140129 """
141130
142- if not (
143- hasattr (self , "make_fetch" )
144- and hasattr (self , "make_insert" )
145- and hasattr (self , "make_compute" )
146- ):
131+ if not (hasattr (self , "make_fetch" ) and hasattr (self , "make_insert" ) and hasattr (self , "make_compute" )):
147132 # user must implement `make`
148133 raise NotImplementedError (
149134 "Subclasses of AutoPopulate must implement the method `make` "
@@ -189,8 +174,7 @@ def _jobs_to_do(self, restrictions):
189174 """
190175 if self .restriction :
191176 raise DataJointError (
192- "Cannot call populate on a restricted table. "
193- "Instead, pass conditions to populate() as arguments."
177+ "Cannot call populate on a restricted table. " "Instead, pass conditions to populate() as arguments."
194178 )
195179 todo = self .key_source
196180
@@ -206,11 +190,7 @@ def _jobs_to_do(self, restrictions):
206190 raise DataJointError (
207191 "The populate target lacks attribute %s "
208192 "from the primary key of key_source"
209- % next (
210- name
211- for name in todo .heading .primary_key
212- if name not in self .target .heading
213- )
193+ % next (name for name in todo .heading .primary_key if name not in self .target .heading )
214194 )
215195 except StopIteration :
216196 pass
@@ -259,12 +239,8 @@ def populate(
259239
260240 valid_order = ["original" , "reverse" , "random" ]
261241 if order not in valid_order :
262- raise DataJointError (
263- "The order argument must be one of %s" % str (valid_order )
264- )
265- jobs = (
266- self .connection .schemas [self .target .database ].jobs if reserve_jobs else None
267- )
242+ raise DataJointError ("The order argument must be one of %s" % str (valid_order ))
243+ jobs = self .connection .schemas [self .target .database ].jobs if reserve_jobs else None
268244
269245 if reserve_jobs :
270246 # Define a signal handler for SIGTERM
@@ -275,16 +251,12 @@ def handler(signum, frame):
275251 old_handler = signal .signal (signal .SIGTERM , handler )
276252
277253 if keys is None :
278- keys = (self ._jobs_to_do (restrictions ) - self .target ).fetch (
279- "KEY" , limit = limit
280- )
254+ keys = (self ._jobs_to_do (restrictions ) - self .target ).fetch ("KEY" , limit = limit )
281255
282256 # exclude "error", "ignore" or "reserved" jobs
283257 if reserve_jobs :
284258 exclude_key_hashes = (
285- jobs
286- & {"table_name" : self .target .table_name }
287- & 'status in ("error", "ignore", "reserved")'
259+ jobs & {"table_name" : self .target .table_name } & 'status in ("error", "ignore", "reserved")'
288260 ).fetch ("key_hash" )
289261 keys = [key for key in keys if key_hash (key ) not in exclude_key_hashes ]
290262
@@ -311,11 +283,7 @@ def handler(signum, frame):
311283 )
312284
313285 if processes == 1 :
314- for key in (
315- tqdm (keys , desc = self .__class__ .__name__ )
316- if display_progress
317- else keys
318- ):
286+ for key in tqdm (keys , desc = self .__class__ .__name__ ) if display_progress else keys :
319287 status = self ._populate1 (key , jobs , ** populate_kwargs )
320288 if status is True :
321289 success_list .append (1 )
@@ -328,14 +296,8 @@ def handler(signum, frame):
328296 self .connection .close () # disconnect parent process from MySQL server
329297 del self .connection ._conn .ctx # SSLContext is not pickleable
330298 with (
331- mp .Pool (
332- processes , _initialize_populate , (self , jobs , populate_kwargs )
333- ) as pool ,
334- (
335- tqdm (desc = "Processes: " , total = nkeys )
336- if display_progress
337- else contextlib .nullcontext ()
338- ) as progress_bar ,
299+ mp .Pool (processes , _initialize_populate , (self , jobs , populate_kwargs )) as pool ,
300+ tqdm (desc = "Processes: " , total = nkeys ) if display_progress else contextlib .nullcontext () as progress_bar ,
339301 ):
340302 for status in pool .imap (_call_populate1 , keys , chunksize = 1 ):
341303 if status is True :
@@ -357,9 +319,7 @@ def handler(signum, frame):
357319 "error_list" : error_list ,
358320 }
359321
360- def _populate1 (
361- self , key , jobs , suppress_errors , return_exception_objects , make_kwargs = None
362- ):
322+ def _populate1 (self , key , jobs , suppress_errors , return_exception_objects , make_kwargs = None ):
363323 """
364324 populates table for one source key, calling self.make inside a transaction.
365325 :param jobs: the jobs table or None if not reserve_jobs
@@ -372,9 +332,7 @@ def _populate1(
372332 # use the legacy `_make_tuples` callback.
373333 make = self ._make_tuples if hasattr (self , "_make_tuples" ) else self .make
374334
375- if jobs is not None and not jobs .reserve (
376- self .target .table_name , self ._job_key (key )
377- ):
335+ if jobs is not None and not jobs .reserve (self .target .table_name , self ._job_key (key )):
378336 return False
379337
380338 # if make is a generator, it transaction can be delayed until the final stage
@@ -399,23 +357,16 @@ def _populate1(
399357 # tripartite make - transaction is delayed until the final stage
400358 gen = make (dict (key ), ** (make_kwargs or {}))
401359 fetched_data = next (gen )
402- fetch_hash = deepdiff .DeepHash (
403- fetched_data , ignore_iterable_order = False
404- )[fetched_data ]
360+ fetch_hash = deepdiff .DeepHash (fetched_data , ignore_iterable_order = False )[fetched_data ]
405361 computed_result = next (gen ) # perform the computation
406362 # fetch and insert inside a transaction
407363 self .connection .start_transaction ()
408364 gen = make (dict (key ), ** (make_kwargs or {})) # restart make
409365 fetched_data = next (gen )
410366 if (
411- fetch_hash
412- != deepdiff .DeepHash (fetched_data , ignore_iterable_order = False )[
413- fetched_data
414- ]
367+ fetch_hash != deepdiff .DeepHash (fetched_data , ignore_iterable_order = False )[fetched_data ]
415368 ): # raise error if fetched data has changed
416- raise DataJointError (
417- "Referential integrity failed! The `make_fetch` data has changed"
418- )
369+ raise DataJointError ("Referential integrity failed! The `make_fetch` data has changed" )
419370 gen .send (computed_result ) # insert
420371
421372 except (KeyboardInterrupt , SystemExit , Exception ) as error :
@@ -427,9 +378,7 @@ def _populate1(
427378 exception = error .__class__ .__name__ ,
428379 msg = ": " + str (error ) if str (error ) else "" ,
429380 )
430- logger .debug (
431- f"Error making { key } -> { self .target .full_table_name } - { error_message } "
432- )
381+ logger .debug (f"Error making { key } -> { self .target .full_table_name } - { error_message } " )
433382 if jobs is not None :
434383 # show error name and error message (if any)
435384 jobs .error (
@@ -468,9 +417,7 @@ def progress(self, *restrictions, display=False):
468417 total - remaining ,
469418 total ,
470419 100 - 100 * remaining / (total + 1e-12 ),
471- datetime .datetime .strftime (
472- datetime .datetime .now (), "%Y-%m-%d %H:%M:%S"
473- ),
420+ datetime .datetime .strftime (datetime .datetime .now (), "%Y-%m-%d %H:%M:%S" ),
474421 ),
475422 )
476423 return remaining , total
0 commit comments