Skip to content

Commit 1aa30f4

Browse files
committed
lint with ruff
1 parent a321f92 commit 1aa30f4

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

60 files changed

+641
-2048
lines changed

src/datajoint/admin.py

Lines changed: 7 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -20,18 +20,14 @@ def set_password(new_password=None, connection=None, update_config=None):
2020
logger.warning("Failed to confirm the password! Aborting password change.")
2121
return
2222

23-
if version.parse(
24-
connection.query("select @@version;").fetchone()[0]
25-
) >= version.parse("5.7"):
23+
if version.parse(connection.query("select @@version;").fetchone()[0]) >= version.parse("5.7"):
2624
# SET PASSWORD is deprecated as of MySQL 5.7 and removed in 8+
2725
connection.query("ALTER USER user() IDENTIFIED BY '%s';" % new_password)
2826
else:
2927
connection.query("SET PASSWORD = PASSWORD('%s')" % new_password)
3028
logger.info("Password updated.")
3129

32-
if update_config or (
33-
update_config is None and user_choice("Update local setting?") == "yes"
34-
):
30+
if update_config or (update_config is None and user_choice("Update local setting?") == "yes"):
3531
config["database.password"] = new_password
3632
config.save_local(verbose=True)
3733

@@ -67,17 +63,10 @@ def kill(restriction=None, connection=None, order_by=None):
6763
while True:
6864
print(" ID USER HOST STATE TIME INFO")
6965
print("+--+ +----------+ +-----------+ +-----------+ +-----+")
70-
cur = (
71-
{k.lower(): v for k, v in elem.items()}
72-
for elem in connection.query(query, as_dict=True)
73-
)
66+
cur = ({k.lower(): v for k, v in elem.items()} for elem in connection.query(query, as_dict=True))
7467
for process in cur:
7568
try:
76-
print(
77-
"{id:>4d} {user:<12s} {host:<12s} {state:<12s} {time:>7d} {info}".format(
78-
**process
79-
)
80-
)
69+
print("{id:>4d} {user:<12s} {host:<12s} {state:<12s} {time:>7d} {info}".format(**process))
8170
except TypeError:
8271
print(process)
8372
response = input('process to kill or "q" to quit > ')
@@ -111,15 +100,11 @@ def kill_quick(restriction=None, connection=None):
111100
if connection is None:
112101
connection = conn()
113102

114-
query = (
115-
"SELECT * FROM information_schema.processlist WHERE id <> CONNECTION_ID()"
116-
+ ("" if restriction is None else " AND (%s)" % restriction)
103+
query = "SELECT * FROM information_schema.processlist WHERE id <> CONNECTION_ID()" + (
104+
"" if restriction is None else " AND (%s)" % restriction
117105
)
118106

119-
cur = (
120-
{k.lower(): v for k, v in elem.items()}
121-
for elem in connection.query(query, as_dict=True)
122-
)
107+
cur = ({k.lower(): v for k, v in elem.items()} for elem in connection.query(query, as_dict=True))
123108
nkill = 0
124109
for process in cur:
125110
connection.query("kill %d" % process["id"])

src/datajoint/attribute_adapter.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -45,20 +45,14 @@ def get_adapter(context, adapter_name):
4545
try:
4646
adapter = context[adapter_name]
4747
except KeyError:
48-
raise DataJointError(
49-
"Attribute adapter '{adapter_name}' is not defined.".format(
50-
adapter_name=adapter_name
51-
)
52-
)
48+
raise DataJointError("Attribute adapter '{adapter_name}' is not defined.".format(adapter_name=adapter_name))
5349
if not isinstance(adapter, AttributeAdapter):
5450
raise DataJointError(
5551
"Attribute adapter '{adapter_name}' must be an instance of datajoint.AttributeAdapter".format(
5652
adapter_name=adapter_name
5753
)
5854
)
59-
if not isinstance(adapter.attribute_type, str) or not re.match(
60-
r"^\w", adapter.attribute_type
61-
):
55+
if not isinstance(adapter.attribute_type, str) or not re.match(r"^\w", adapter.attribute_type):
6256
raise DataJointError(
6357
"Invalid attribute type {type} in attribute adapter '{adapter_name}'".format(
6458
type=adapter.attribute_type, adapter_name=adapter_name

src/datajoint/autopopulate.py

Lines changed: 20 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -68,26 +68,15 @@ def key_source(self):
6868

6969
def _rename_attributes(table, props):
7070
return (
71-
table.proj(
72-
**{
73-
attr: ref
74-
for attr, ref in props["attr_map"].items()
75-
if attr != ref
76-
}
77-
)
71+
table.proj(**{attr: ref for attr, ref in props["attr_map"].items() if attr != ref})
7872
if props["aliased"]
7973
else table.proj()
8074
)
8175

8276
if self._key_source is None:
83-
parents = self.target.parents(
84-
primary=True, as_objects=True, foreign_key_info=True
85-
)
77+
parents = self.target.parents(primary=True, as_objects=True, foreign_key_info=True)
8678
if not parents:
87-
raise DataJointError(
88-
"A table must have dependencies "
89-
"from its primary key for auto-populate to work"
90-
)
79+
raise DataJointError("A table must have dependencies " "from its primary key for auto-populate to work")
9180
self._key_source = _rename_attributes(*parents[0])
9281
for q in parents[1:]:
9382
self._key_source *= _rename_attributes(*q)
@@ -139,11 +128,7 @@ def make(self, key):
139128
:raises NotImplementedError: If the derived class does not implement the required methods.
140129
"""
141130

142-
if not (
143-
hasattr(self, "make_fetch")
144-
and hasattr(self, "make_insert")
145-
and hasattr(self, "make_compute")
146-
):
131+
if not (hasattr(self, "make_fetch") and hasattr(self, "make_insert") and hasattr(self, "make_compute")):
147132
# user must implement `make`
148133
raise NotImplementedError(
149134
"Subclasses of AutoPopulate must implement the method `make` "
@@ -189,8 +174,7 @@ def _jobs_to_do(self, restrictions):
189174
"""
190175
if self.restriction:
191176
raise DataJointError(
192-
"Cannot call populate on a restricted table. "
193-
"Instead, pass conditions to populate() as arguments."
177+
"Cannot call populate on a restricted table. " "Instead, pass conditions to populate() as arguments."
194178
)
195179
todo = self.key_source
196180

@@ -206,11 +190,7 @@ def _jobs_to_do(self, restrictions):
206190
raise DataJointError(
207191
"The populate target lacks attribute %s "
208192
"from the primary key of key_source"
209-
% next(
210-
name
211-
for name in todo.heading.primary_key
212-
if name not in self.target.heading
213-
)
193+
% next(name for name in todo.heading.primary_key if name not in self.target.heading)
214194
)
215195
except StopIteration:
216196
pass
@@ -259,12 +239,8 @@ def populate(
259239

260240
valid_order = ["original", "reverse", "random"]
261241
if order not in valid_order:
262-
raise DataJointError(
263-
"The order argument must be one of %s" % str(valid_order)
264-
)
265-
jobs = (
266-
self.connection.schemas[self.target.database].jobs if reserve_jobs else None
267-
)
242+
raise DataJointError("The order argument must be one of %s" % str(valid_order))
243+
jobs = self.connection.schemas[self.target.database].jobs if reserve_jobs else None
268244

269245
if reserve_jobs:
270246
# Define a signal handler for SIGTERM
@@ -275,16 +251,12 @@ def handler(signum, frame):
275251
old_handler = signal.signal(signal.SIGTERM, handler)
276252

277253
if keys is None:
278-
keys = (self._jobs_to_do(restrictions) - self.target).fetch(
279-
"KEY", limit=limit
280-
)
254+
keys = (self._jobs_to_do(restrictions) - self.target).fetch("KEY", limit=limit)
281255

282256
# exclude "error", "ignore" or "reserved" jobs
283257
if reserve_jobs:
284258
exclude_key_hashes = (
285-
jobs
286-
& {"table_name": self.target.table_name}
287-
& 'status in ("error", "ignore", "reserved")'
259+
jobs & {"table_name": self.target.table_name} & 'status in ("error", "ignore", "reserved")'
288260
).fetch("key_hash")
289261
keys = [key for key in keys if key_hash(key) not in exclude_key_hashes]
290262

@@ -311,11 +283,7 @@ def handler(signum, frame):
311283
)
312284

313285
if processes == 1:
314-
for key in (
315-
tqdm(keys, desc=self.__class__.__name__)
316-
if display_progress
317-
else keys
318-
):
286+
for key in tqdm(keys, desc=self.__class__.__name__) if display_progress else keys:
319287
status = self._populate1(key, jobs, **populate_kwargs)
320288
if status is True:
321289
success_list.append(1)
@@ -328,14 +296,8 @@ def handler(signum, frame):
328296
self.connection.close() # disconnect parent process from MySQL server
329297
del self.connection._conn.ctx # SSLContext is not pickleable
330298
with (
331-
mp.Pool(
332-
processes, _initialize_populate, (self, jobs, populate_kwargs)
333-
) as pool,
334-
(
335-
tqdm(desc="Processes: ", total=nkeys)
336-
if display_progress
337-
else contextlib.nullcontext()
338-
) as progress_bar,
299+
mp.Pool(processes, _initialize_populate, (self, jobs, populate_kwargs)) as pool,
300+
tqdm(desc="Processes: ", total=nkeys) if display_progress else contextlib.nullcontext() as progress_bar,
339301
):
340302
for status in pool.imap(_call_populate1, keys, chunksize=1):
341303
if status is True:
@@ -357,9 +319,7 @@ def handler(signum, frame):
357319
"error_list": error_list,
358320
}
359321

360-
def _populate1(
361-
self, key, jobs, suppress_errors, return_exception_objects, make_kwargs=None
362-
):
322+
def _populate1(self, key, jobs, suppress_errors, return_exception_objects, make_kwargs=None):
363323
"""
364324
populates table for one source key, calling self.make inside a transaction.
365325
:param jobs: the jobs table or None if not reserve_jobs
@@ -372,9 +332,7 @@ def _populate1(
372332
# use the legacy `_make_tuples` callback.
373333
make = self._make_tuples if hasattr(self, "_make_tuples") else self.make
374334

375-
if jobs is not None and not jobs.reserve(
376-
self.target.table_name, self._job_key(key)
377-
):
335+
if jobs is not None and not jobs.reserve(self.target.table_name, self._job_key(key)):
378336
return False
379337

380338
# if make is a generator, it transaction can be delayed until the final stage
@@ -399,23 +357,16 @@ def _populate1(
399357
# tripartite make - transaction is delayed until the final stage
400358
gen = make(dict(key), **(make_kwargs or {}))
401359
fetched_data = next(gen)
402-
fetch_hash = deepdiff.DeepHash(
403-
fetched_data, ignore_iterable_order=False
404-
)[fetched_data]
360+
fetch_hash = deepdiff.DeepHash(fetched_data, ignore_iterable_order=False)[fetched_data]
405361
computed_result = next(gen) # perform the computation
406362
# fetch and insert inside a transaction
407363
self.connection.start_transaction()
408364
gen = make(dict(key), **(make_kwargs or {})) # restart make
409365
fetched_data = next(gen)
410366
if (
411-
fetch_hash
412-
!= deepdiff.DeepHash(fetched_data, ignore_iterable_order=False)[
413-
fetched_data
414-
]
367+
fetch_hash != deepdiff.DeepHash(fetched_data, ignore_iterable_order=False)[fetched_data]
415368
): # raise error if fetched data has changed
416-
raise DataJointError(
417-
"Referential integrity failed! The `make_fetch` data has changed"
418-
)
369+
raise DataJointError("Referential integrity failed! The `make_fetch` data has changed")
419370
gen.send(computed_result) # insert
420371

421372
except (KeyboardInterrupt, SystemExit, Exception) as error:
@@ -427,9 +378,7 @@ def _populate1(
427378
exception=error.__class__.__name__,
428379
msg=": " + str(error) if str(error) else "",
429380
)
430-
logger.debug(
431-
f"Error making {key} -> {self.target.full_table_name} - {error_message}"
432-
)
381+
logger.debug(f"Error making {key} -> {self.target.full_table_name} - {error_message}")
433382
if jobs is not None:
434383
# show error name and error message (if any)
435384
jobs.error(
@@ -468,9 +417,7 @@ def progress(self, *restrictions, display=False):
468417
total - remaining,
469418
total,
470419
100 - 100 * remaining / (total + 1e-12),
471-
datetime.datetime.strftime(
472-
datetime.datetime.now(), "%Y-%m-%d %H:%M:%S"
473-
),
420+
datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%M:%S"),
474421
),
475422
)
476423
return remaining, total

0 commit comments

Comments
 (0)