Skip to content

Commit e30d7b3

Browse files
Merge pull request #868 from dimitri-yatsenko/cascade-delete
Adding plugin support
2 parents dd45f61 + a1b7df0 commit e30d7b3

39 files changed

+321
-170
lines changed

.github/workflows/development.yaml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ jobs:
2121
mysql_ver: "5.7"
2222
- py_ver: "3.6"
2323
mysql_ver: "5.7"
24-
- py_ver: "3.5"
25-
mysql_ver: "5.7"
2624
steps:
2725
- uses: actions/checkout@v2
2826
- name: Set up Python ${{matrix.py_ver}}
@@ -50,4 +48,4 @@ jobs:
5048
- name: Run style tests
5149
run: |
5250
flake8 --ignore=E121,E123,E126,E226,E24,E704,W503,W504,E722,F401,W605 datajoint \
53-
--count --max-complexity=62 --max-line-length=127 --statistics
51+
--count --max-complexity=62 --max-line-length=127 --statistics

CHANGELOG.md

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,17 @@
11
## Release notes
22

3-
### 0.13.0 -- Jan 11, 2020
3+
### 0.13.0 -- Feb 15, 2021
44
* Re-implement query transpilation into SQL, fixing issues (#386, #449, #450, #484). PR #754
55
* Re-implement cascading deletes for better performance. PR #839.
6-
* Add table method `.update1` to update an existing row in its table.
6+
* Add table method `.update1` to update a row in the table with new values PR #763
77
* Python datatypes are now enabled by default in blobs (#761). PR #785
88
* Added permissive join and restriction operators `@` and `^` (#785) PR #754
9+
* Support DataJoint datatype and connection plugins (#715, #729) PR 730, #735
10+
* add `dj.key_hash` alias to `dj.hash.key_hash`
11+
* default enable_python_native_blobs to True
12+
* Drop support for Python 3.5
913

10-
### 0.12.8 -- Dec 22, 2020
14+
### 0.12.8 -- Jan 12, 2021
1115
* table.children, .parents, .descendents, and ancestors can return queryable objects. PR #833
1216
* Load dependencies before querying dependencies. (#179) PR #833
1317
* Fix display of part tables in `schema.save`. (#821) PR #833

README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,10 @@ Some Python datatypes such as dicts were coerced into numpy recarrays and then f
3232
However, since some Python types were coerced into MATLAB types, old blobs and new blobs may now be fetched as different types of objects even if they were inserted the same way.
3333
For example, new `dict` objects will be returned as `dict` while the same types of objects inserted with `datajoint 0.11` will be recarrays.
3434

35-
Since this is a big change, we chose to disable full blob support by default as a temporary precaution, which will be removed in version 0.13.
35+
Since this is a big change, we chose to temporarily disable this feature by default in DataJoint for Python 0.12.x, allowing users to adjust their code if necessary.
36+
From 13.x, the flag will default to True (on), and will ultimately be removed when corresponding decode support for the new format is added to datajoint-matlab (see: datajoint-matlab #222, datajoint-python #765).
3637

37-
You may enable it by setting the `enable_python_native_blobs` flag in `dj.config`.
38+
The flag is configured by setting the `enable_python_native_blobs` flag in `dj.config`.
3839

3940
```python
4041
import datajoint as dj
@@ -68,7 +69,7 @@ as structured arrays, whereas new record inserted in DataJoint 0.12 with
6869
appropriate native python type (dict, etc).
6970
Furthermore, DataJoint for MATLAB does not yet support unpacking native Python datatypes.
7071

71-
With `dj.config["enable_python_native_blobs"]` set to `False` (default),
72+
With `dj.config["enable_python_native_blobs"]` set to `False`,
7273
any attempt to insert any datatype other than a numpy array will result in an exception.
7374
This is meant to get users to read this message in order to allow proper testing
7475
and migration of pre-0.12 pipelines to 0.12 in a safe manner.

datajoint.pub

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
-----BEGIN PUBLIC KEY-----
2+
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDUMOo2U7YQ1uOrKU/IreM3AQP2
3+
AXJC3au+S9W+dilxHcJ3e98bRVqrFeOofcGeRPoNc38fiLmLDUiBskJeVrpm29Wo
4+
AkH6yhZWk1o8NvGMhK4DLsJYlsH6tZuOx9NITKzJuOOH6X1I5Ucs7NOSKnmu7g5g
5+
WTT5kCgF5QAe5JN8WQIDAQAB
6+
-----END PUBLIC KEY-----

datajoint/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
'Not', 'AndList', 'U', 'Diagram', 'Di', 'ERD',
2525
'set_password', 'kill',
2626
'MatCell', 'MatStruct', 'AttributeAdapter',
27-
'errors', 'DataJointError', 'key']
27+
'errors', 'DataJointError', 'key', 'key_hash']
2828

2929
from .version import __version__
3030
from .settings import config
@@ -38,6 +38,7 @@
3838
from .admin import set_password, kill
3939
from .blob import MatCell, MatStruct
4040
from .fetch import key
41+
from .hash import key_hash
4142
from .attribute_adapter import AttributeAdapter
4243
from . import errors
4344
from .errors import DataJointError

datajoint/attribute_adapter.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import re
22
from .errors import DataJointError, _support_adapted_types
3+
from .plugin import type_plugins
34

45

56
class AttributeAdapter:
@@ -38,10 +39,11 @@ def get_adapter(context, adapter_name):
3839
raise DataJointError('Support for Adapted Attribute types is disabled.')
3940
adapter_name = adapter_name.lstrip('<').rstrip('>')
4041
try:
41-
adapter = context[adapter_name]
42+
adapter = (context[adapter_name] if adapter_name in context
43+
else type_plugins[adapter_name]['object'].load())
4244
except KeyError:
4345
raise DataJointError(
44-
"Attribute adapter '{adapter_name}' is not defined.".format(adapter_name=adapter_name)) from None
46+
"Attribute adapter '{adapter_name}' is not defined.".format(adapter_name=adapter_name))
4547
if not isinstance(adapter, AttributeAdapter):
4648
raise DataJointError(
4749
"Attribute adapter '{adapter_name}' must be an instance of datajoint.AttributeAdapter".format(

datajoint/autopopulate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def _rename_attributes(table, props):
4040
parents = self.target.parents(primary=True, as_objects=True, foreign_key_info=True)
4141
if not parents:
4242
raise DataJointError(
43-
'A relation must have primary dependencies for auto-populate to work') from None
43+
'A relation must have primary dependencies for auto-populate to work')
4444
self._key_source = _rename_attributes(*parents[0])
4545
for q in parents[1:]:
4646
self._key_source *= _rename_attributes(*q)

datajoint/blob.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,10 @@
1111
import uuid
1212
import numpy as np
1313
from .errors import DataJointError
14-
from .utils import OrderedDict
1514
from .settings import config
1615

1716

18-
mxClassID = OrderedDict((
17+
mxClassID = dict((
1918
# see http://www.mathworks.com/help/techdoc/apiref/mxclassid.html
2019
('mxUNKNOWN_CLASS', None),
2120
('mxCELL_CLASS', None),
@@ -346,8 +345,8 @@ def pack_set(self, t):
346345
len_u64(it) + it for it in (self.pack_blob(i) for i in t))
347346

348347
def read_dict(self):
349-
return OrderedDict((self.read_blob(self.read_value()), self.read_blob(self.read_value()))
350-
for _ in range(self.read_value()))
348+
return dict((self.read_blob(self.read_value()), self.read_blob(self.read_value()))
349+
for _ in range(self.read_value()))
351350

352351
def pack_dict(self, d):
353352
return b"\4" + len_u64(d) + b"".join(

datajoint/connection.py

Lines changed: 44 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,36 @@
1515
from .dependencies import Dependencies
1616
from .blob import pack, unpack
1717
from .hash import uuid_from_buffer
18+
from .plugin import connection_plugins
1819

1920
logger = logging.getLogger(__name__)
2021
query_log_max_length = 300
2122

2223

24+
def get_host_hook(host_input):
25+
if '://' in host_input:
26+
plugin_name = host_input.split('://')[0]
27+
try:
28+
return connection_plugins[plugin_name]['object'].load().get_host(host_input)
29+
except KeyError:
30+
raise errors.DataJointError(
31+
"Connection plugin '{}' not found.".format(plugin_name))
32+
else:
33+
return host_input
34+
35+
36+
def connect_host_hook(connection_obj):
37+
if '://' in connection_obj.conn_info['host_input']:
38+
plugin_name = connection_obj.conn_info['host_input'].split('://')[0]
39+
try:
40+
connection_plugins[plugin_name]['object'].load().connect_host(connection_obj)
41+
except KeyError:
42+
raise errors.DataJointError(
43+
"Connection plugin '{}' not found.".format(plugin_name))
44+
else:
45+
connection_obj.connect()
46+
47+
2348
def translate_query_error(client_error, query):
2449
"""
2550
Take client error and original query and return the corresponding DataJoint exception.
@@ -82,7 +107,8 @@ def conn(host=None, user=None, password=None, *, init_fun=None, reset=False, use
82107
#encrypted-connection-options).
83108
"""
84109
if not hasattr(conn, 'connection') or reset:
85-
host = host if host is not None else config['database.host']
110+
host_input = host if host is not None else config['database.host']
111+
host = get_host_hook(host_input)
86112
user = user if user is not None else config['database.user']
87113
password = password if password is not None else config['database.password']
88114
if user is None: # pragma: no cover
@@ -91,7 +117,8 @@ def conn(host=None, user=None, password=None, *, init_fun=None, reset=False, use
91117
password = getpass(prompt="Please enter DataJoint password: ")
92118
init_fun = init_fun if init_fun is not None else config['connection.init_function']
93119
use_tls = use_tls if use_tls is not None else config['database.use_tls']
94-
conn.connection = Connection(host, user, password, None, init_fun, use_tls)
120+
conn.connection = Connection(host, user, password, None, init_fun, use_tls,
121+
host_input=host_input)
95122
return conn.connection
96123

97124

@@ -113,6 +140,10 @@ def fetchall(self):
113140
def fetchone(self):
114141
return next(self._iter)
115142

143+
@property
144+
def rowcount(self):
145+
return len(self._data)
146+
116147

117148
class Connection:
118149
"""
@@ -128,7 +159,9 @@ class Connection:
128159
:param init_fun: connection initialization function (SQL)
129160
:param use_tls: TLS encryption option
130161
"""
131-
def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None):
162+
163+
def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None,
164+
host_input=None):
132165
if ':' in host:
133166
# the port in the hostname overrides the port argument
134167
host, port = host.split(':')
@@ -139,11 +172,12 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None)
139172
if use_tls is not False:
140173
self.conn_info['ssl'] = use_tls if isinstance(use_tls, dict) else {'ssl': {}}
141174
self.conn_info['ssl_input'] = use_tls
175+
self.conn_info['host_input'] = host_input
142176
self.init_fun = init_fun
143177
print("Connecting {user}@{host}:{port}".format(**self.conn_info))
144178
self._conn = None
145179
self._query_cache = None
146-
self.connect()
180+
connect_host_hook(self)
147181
if self.is_connected:
148182
logger.info("Connected {user}@{host}:{port}".format(**self.conn_info))
149183
self.connection_id = self.query('SELECT connection_id()').fetchone()[0]
@@ -172,15 +206,15 @@ def connect(self):
172206
"STRICT_ALL_TABLES,NO_ENGINE_SUBSTITUTION",
173207
charset=config['connection.charset'],
174208
**{k: v for k, v in self.conn_info.items()
175-
if k != 'ssl_input'})
209+
if k not in ['ssl_input', 'host_input']})
176210
except client.err.InternalError:
177211
self._conn = client.connect(
178212
init_command=self.init_fun,
179213
sql_mode="NO_ZERO_DATE,NO_ZERO_IN_DATE,ERROR_FOR_DIVISION_BY_ZERO,"
180214
"STRICT_ALL_TABLES,NO_ENGINE_SUBSTITUTION",
181215
charset=config['connection.charset'],
182216
**{k: v for k, v in self.conn_info.items()
183-
if not(k == 'ssl_input' or
217+
if not(k in ['ssl_input', 'host_input'] or
184218
k == 'ssl' and self.conn_info['ssl_input'] is None)})
185219
self._conn.autocommit(True)
186220

@@ -223,7 +257,7 @@ def _execute_query(cursor, query, args, suppress_warnings):
223257
warnings.simplefilter("ignore")
224258
cursor.execute(query, args)
225259
except client.err.Error as err:
226-
raise translate_query_error(err, query) from None
260+
raise translate_query_error(err, query)
227261

228262
def query(self, query, args=(), *, as_dict=False, suppress_warnings=True, reconnect=None):
229263
"""
@@ -247,7 +281,7 @@ def query(self, query, args=(), *, as_dict=False, suppress_warnings=True, reconn
247281
try:
248282
buffer = cache_path.read_bytes()
249283
except FileNotFoundError:
250-
pass # proceed to the normal query
284+
pass # proceed to query the database
251285
else:
252286
return EmulatedCursor(unpack(buffer))
253287

@@ -262,10 +296,10 @@ def query(self, query, args=(), *, as_dict=False, suppress_warnings=True, reconn
262296
if not reconnect:
263297
raise
264298
warnings.warn("MySQL server has gone away. Reconnecting to the server.")
265-
self.connect()
299+
connect_host_hook(self)
266300
if self._in_transaction:
267301
self.cancel_transaction()
268-
raise errors.LostConnectionError("Connection was lost during a transaction.") from None
302+
raise errors.LostConnectionError("Connection was lost during a transaction.")
269303
logger.debug("Re-executing")
270304
cursor = self._conn.cursor(cursor=cursor_class)
271305
self._execute_query(cursor, query, args, suppress_warnings)

datajoint/declare.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,6 @@
99
from .errors import DataJointError, _support_filepath_types, FILEPATH_FEATURE_SWITCH
1010
from .attribute_adapter import get_adapter
1111

12-
from .utils import OrderedDict
13-
1412
UUID_DATA_TYPE = 'binary(16)'
1513
MAX_TABLE_NAME_LENGTH = 64
1614
CONSTANT_LITERALS = {'CURRENT_TIMESTAMP'} # SQL literals to be used without quotes (case insensitive)
@@ -46,7 +44,7 @@ def match_type(attribute_type):
4644
try:
4745
return next(category for category, pattern in TYPE_PATTERN.items() if pattern.match(attribute_type))
4846
except StopIteration:
49-
raise DataJointError("Unsupported attribute type {type}".format(type=attribute_type)) from None
47+
raise DataJointError("Unsupported attribute type {type}".format(type=attribute_type))
5048

5149

5250
logger = logging.getLogger(__name__)
@@ -136,7 +134,7 @@ def compile_foreign_key(line, context, attributes, primary_key, attr_sql, foreig
136134
try:
137135
result = foreign_key_parser_old.parseString(line)
138136
except pp.ParseBaseException as err:
139-
raise DataJointError('Parsing error in line "%s". %s.' % (line, err)) from None
137+
raise DataJointError('Parsing error in line "%s". %s.' % (line, err))
140138
else:
141139
obsolete = True
142140
try:
@@ -303,7 +301,7 @@ def _make_attribute_alter(new, old, primary_key):
303301
name_regexp = re.compile(r"^`(?P<name>\w+)`")
304302
original_regexp = re.compile(r'COMMENT "{\s*(?P<name>\w+)\s*}')
305303
matched = ((name_regexp.match(d), original_regexp.search(d)) for d in new)
306-
new_names = OrderedDict((d.group('name'), n and n.group('name')) for d, n in matched)
304+
new_names = dict((d.group('name'), n and n.group('name')) for d, n in matched)
307305
old_names = [name_regexp.search(d).group('name') for d in old]
308306

309307
# verify that original names are only used once
@@ -433,7 +431,7 @@ def compile_attribute(line, in_key, foreign_key_sql, context):
433431
match = attribute_parser.parseString(line + '#', parseAll=True)
434432
except pp.ParseException as err:
435433
raise DataJointError('Declaration error in position {pos} in line:\n {line}\n{msg}'.format(
436-
line=err.args[0], pos=err.args[1], msg=err.args[2])) from None
434+
line=err.args[0], pos=err.args[1], msg=err.args[2]))
437435
match['comment'] = match['comment'].rstrip('#')
438436
if 'default' not in match:
439437
match['default'] = ''

0 commit comments

Comments
 (0)