Skip to content

Commit dd3a249

Browse files
committed
Merge branch 'master' of https://github.com/jverswijver/datajoint-python into remove_checksum
2 parents eac203f + 8f9e81c commit dd3a249

File tree

12 files changed

+54
-74
lines changed

12 files changed

+54
-74
lines changed

.github/workflows/development.yaml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,6 @@ jobs:
3636
include:
3737
- py_ver: "3.7"
3838
mysql_ver: "5.7"
39-
- py_ver: "3.6"
40-
mysql_ver: "5.7"
4139
steps:
4240
- uses: actions/checkout@v2
4341
- name: Set up Python ${{matrix.py_ver}}
@@ -106,4 +104,4 @@ jobs:
106104
with:
107105
branch: gh-pages
108106
directory: gh-pages
109-
github_token: ${{secrets.GITHUB_TOKEN}}
107+
github_token: ${{secrets.GITHUB_TOKEN}}

CHANGELOG.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
## Release notes
22

3-
### 0.13.6 -- TBD
3+
### 0.13.6 -- Jun 13, 2022
44
* Add - Config option to set threshold for when to stop using checksums for filepath stores. PR #1025
5-
* Add - unified package level logger for package
6-
* Update - swap various datajoint messages, warnings, ect. to use the new logger.
5+
* Add - unified package level logger for package (#667) PR #1031
6+
* Update - swap various datajoint messages, warnings, etc. to use the new logger. (#667) PR #1031
77

88
### 0.13.5 -- May 19, 2022
99
* Update - Import ABC from collections.abc for Python 3.10 compatibility
1010
* Bugfix - Fix multiprocessing value error (#1013) PR #1026
1111

12-
### 0.13.4 -- March, 28 2022
12+
### 0.13.4 -- Mar, 28 2022
1313
* Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995
1414
* Bugfix - Add missing `jobs` argument for multiprocessing PR #997
1515
* Add - Test for multiprocessing PR #1008

datajoint/autopopulate.py

Lines changed: 21 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from .errors import DataJointError, LostConnectionError
1010
import signal
1111
import multiprocessing as mp
12+
import contextlib
1213

1314
# noinspection PyExceptionInherit,PyCallingNonCallable
1415

@@ -158,7 +159,7 @@ def populate(
158159
max_calls=None,
159160
display_progress=False,
160161
processes=1,
161-
make_kwargs=None
162+
make_kwargs=None,
162163
):
163164
"""
164165
``table.populate()`` calls ``table.make(key)`` for every primary key in
@@ -206,14 +207,14 @@ def handler(signum, frame):
206207
elif order == "random":
207208
random.shuffle(keys)
208209

209-
logger.info("Found %d keys to populate" % len(keys))
210+
logger.debug("Found %d keys to populate" % len(keys))
210211

211212
keys = keys[:max_calls]
212213
nkeys = len(keys)
213214
if not nkeys:
214215
return
215216

216-
processes = min(*(_ for _ in (processes, nkeys, mp.cpu_count()) if _))
217+
processes = min(_ for _ in (processes, nkeys, mp.cpu_count()) if _)
217218

218219
error_list = []
219220
populate_kwargs = dict(
@@ -235,17 +236,16 @@ def handler(signum, frame):
235236
del self.connection._conn.ctx # SSLContext is not pickleable
236237
with mp.Pool(
237238
processes, _initialize_populate, (self, jobs, populate_kwargs)
238-
) as pool:
239-
if display_progress:
240-
with tqdm(desc="Processes: ", total=nkeys) as pbar:
241-
for error in pool.imap(_call_populate1, keys, chunksize=1):
242-
if error is not None:
243-
error_list.append(error)
244-
pbar.update()
245-
else:
246-
for error in pool.imap(_call_populate1, keys):
247-
if error is not None:
248-
error_list.append(error)
239+
) as pool, (
240+
tqdm(desc="Processes: ", total=nkeys)
241+
if display_progress
242+
else contextlib.nullcontext()
243+
) as progress_bar:
244+
for error in pool.imap(_call_populate1, keys, chunksize=1):
245+
if error is not None:
246+
error_list.append(error)
247+
if display_progress:
248+
progress_bar.update()
249249
self.connection.connect() # reconnect parent process to MySQL server
250250

251251
# restore original signal handler:
@@ -275,7 +275,7 @@ def _populate1(
275275
if jobs is not None:
276276
jobs.complete(self.target.table_name, self._job_key(key))
277277
else:
278-
logger.debug("Populating: " + str(key))
278+
logger.debug(f"Making {key} -> {self.target.full_table_name}")
279279
self.__class__._allow_insert = True
280280
try:
281281
make(dict(key), **(make_kwargs or {}))
@@ -288,6 +288,9 @@ def _populate1(
288288
exception=error.__class__.__name__,
289289
msg=": " + str(error) if str(error) else "",
290290
)
291+
logger.debug(
292+
f"Error making {key} -> {self.target.full_table_name} - {error_message}"
293+
)
291294
if jobs is not None:
292295
# show error name and error message (if any)
293296
jobs.error(
@@ -303,6 +306,9 @@ def _populate1(
303306
return key, error if return_exception_objects else error_message
304307
else:
305308
self.connection.commit_transaction()
309+
logger.debug(
310+
f"Success making {key} -> {self.target.full_table_name}"
311+
)
306312
if jobs is not None:
307313
jobs.complete(self.target.table_name, self._job_key(key))
308314
finally:

datajoint/connection.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@
2121
query_log_max_length = 300
2222

2323

24+
cache_key = "query_cache" # the key to lookup the query_cache folder in dj.config
25+
26+
2427
def get_host_hook(host_input):
2528
if "://" in host_input:
2629
plugin_name = host_input.split("://")[0]
@@ -220,7 +223,7 @@ def connect(self):
220223
k: v
221224
for k, v in self.conn_info.items()
222225
if k not in ["ssl_input", "host_input"]
223-
}
226+
},
224227
)
225228
except client.err.InternalError:
226229
self._conn = client.connect(
@@ -236,7 +239,7 @@ def connect(self):
236239
or k == "ssl"
237240
and self.conn_info["ssl_input"] is None
238241
)
239-
}
242+
},
240243
)
241244
self._conn.autocommit(True)
242245

@@ -254,13 +257,12 @@ def set_query_cache(self, query_cache=None):
254257
def purge_query_cache(self):
255258
"""Purges all query cache."""
256259
if (
257-
"query_cache" in config
258-
and isinstance(config["query_cache"], str)
259-
and pathlib.Path(config["query_cache"]).is_dir()
260+
isinstance(config.get(cache_key), str)
261+
and pathlib.Path(config[cache_key]).is_dir()
260262
):
261-
path_iter = pathlib.Path(config["query_cache"]).glob("**/*")
262-
for path in path_iter:
263-
path.unlink()
263+
for path in pathlib.Path(config[cache_key]).iterdir():
264+
if not path.is_dir():
265+
path.unlink()
264266

265267
def close(self):
266268
self._conn.close()
@@ -313,15 +315,15 @@ def query(
313315
"Only SELECT queries are allowed when query caching is on."
314316
)
315317
if use_query_cache:
316-
if not config["query_cache"]:
318+
if not config[cache_key]:
317319
raise errors.DataJointError(
318-
"Provide filepath dj.config['query_cache'] when using query caching."
320+
f"Provide filepath dj.config['{cache_key}'] when using query caching."
319321
)
320322
hash_ = uuid_from_buffer(
321323
(str(self._query_cache) + re.sub(r"`\$\w+`", "", query)).encode()
322324
+ pack(args)
323325
)
324-
cache_path = pathlib.Path(config["query_cache"]) / str(hash_)
326+
cache_path = pathlib.Path(config[cache_key]) / str(hash_)
325327
try:
326328
buffer = cache_path.read_bytes()
327329
except FileNotFoundError:

datajoint/diagram.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from .errors import DataJointError
1111
from .table import lookup_class_name
1212

13-
logger = logging.getLogger(__name__.split(".")[0])
1413

1514
try:
1615
from matplotlib import pyplot as plt
@@ -27,6 +26,7 @@
2726
diagram_active = False
2827

2928

29+
logger = logging.getLogger(__name__.split(".")[0])
3030
user_table_classes = (Manual, Lookup, Computed, Imported, Part)
3131

3232

datajoint/logging.py

Lines changed: 2 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,9 @@
55

66
logger = logging.getLogger(__name__.split(".")[0])
77

8-
log_level = os.environ.get("DJ_LOG_LEVEL", "warning").upper()
8+
log_level = os.getenv("DJ_LOG_LEVEL", "info").upper()
99

10-
log_format = logging.Formatter(
11-
"[%(asctime)s][%(funcName)s][%(levelname)s]: %(message)s"
12-
)
10+
log_format = logging.Formatter("[%(asctime)s][%(levelname)s]: %(message)s")
1311

1412
stream_handler = logging.StreamHandler() # default handler
1513
stream_handler.setFormatter(log_format)
@@ -32,26 +30,3 @@ def excepthook(exc_type, exc_value, exc_traceback):
3230

3331

3432
sys.excepthook = excepthook
35-
36-
37-
# https://github.com/tqdm/tqdm/issues/313#issuecomment-267959111
38-
class TqdmToLogger(io.StringIO):
39-
"""
40-
Output stream for TQDM which will output to logger module instead of
41-
the StdOut.
42-
"""
43-
44-
logger = None
45-
level = None
46-
buf = ""
47-
48-
def __init__(self, logger, level=None):
49-
super(TqdmToLogger, self).__init__()
50-
self.logger = logger
51-
self.level = level or logging.INFO
52-
53-
def write(self, buf):
54-
self.buf = buf.strip("\r\n\t ")
55-
56-
def flush(self):
57-
self.logger.log(self.level, self.buf)

datajoint/schemas.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def activate(
134134
)
135135
)
136136
# create database
137-
logger.info("Creating schema `{name}`.".format(name=schema_name))
137+
logger.debug("Creating schema `{name}`.".format(name=schema_name))
138138
try:
139139
self.connection.query(
140140
"CREATE DATABASE `{name}`".format(name=schema_name)
@@ -360,12 +360,12 @@ def drop(self, force=False):
360360
)
361361
== "yes"
362362
):
363-
logger.info("Dropping `{database}`.".format(database=self.database))
363+
logger.debug("Dropping `{database}`.".format(database=self.database))
364364
try:
365365
self.connection.query(
366366
"DROP DATABASE `{database}`".format(database=self.database)
367367
)
368-
logger.info(
368+
logger.debug(
369369
"Schema `{database}` was dropped successfully.".format(
370370
database=self.database
371371
)

datajoint/settings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -241,8 +241,8 @@ def __getitem__(self, key):
241241
return self._conf[key]
242242

243243
def __setitem__(self, key, value):
244-
logger.log(
245-
logging.INFO, "Setting {0:s} to {1:s}".format(str(key), str(value))
244+
logger.debug(
245+
logging.DEBUG, "Setting {0:s} to {1:s}".format(str(key), str(value))
246246
)
247247
if validators[key](value):
248248
self._conf[key] = value

datajoint/table.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ def update1(self, row):
310310
raise DataJointError("Update cannot be applied to a restricted table.")
311311
key = {k: row[k] for k in self.primary_key}
312312
if len(self & key) != 1:
313-
raise DataJointError("Update entry must exist.")
313+
raise DataJointError("Update can only be applied to one existing entry.")
314314
# UPDATE query
315315
row = [
316316
self.__make_placeholder(k, v)

docs-parts/intro/Releases_lang1.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
1-
0.13.6 -- TBD
1+
### 0.13.6 -- Jun 13, 2022
22
----------------------
33
* Add - Config option to set threshold for when to stop using checksums for filepath stores. PR #1025
4-
* Add - unified package level logger for package
5-
* Update - swap various datajoint messages, warnings, ect. to use the new logger.
4+
* Add - unified package level logger for package (#667) PR #1031
5+
* Update - swap various datajoint messages, warnings, etc. to use the new logger. (#667) PR #1031
66

77
0.13.5 -- May 19, 2022
88
----------------------
99
* Update - Import ABC from collections.abc for Python 3.10 compatibility
1010
* Bugfix - Fix multiprocessing value error (#1013) PR #1026
1111

12-
0.13.4 -- March 28, 2022
12+
0.13.4 -- Mar 28, 2022
1313
----------------------
1414
* Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995
1515
* Bugfix - Add missing ``jobs`` argument for multiprocessing PR #997

0 commit comments

Comments
 (0)