Skip to content

Commit 2e513c8

Browse files
authored
Resolve params to dict for Connection.execute() & deprecation of bind and autoload parameters (#615)
* Resolve params to dict for Connection.execute() * Alter MetaData usage to instead bind to the right object or function instead, as it no longer supports bind * Ensure autoload parameters have been replaced with autoload_with by setting warning to toss an error * Update errors and changelog * Resolve autocommit with DML to use begin for explicit transaction
1 parent 7adae7b commit 2e513c8

File tree

7 files changed

+61
-44
lines changed

7 files changed

+61
-44
lines changed

doc/changes/unreleased.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,10 @@ This allows us to use the latest dependencies, which do not have open vulnerabil
99

1010
## Refactoring
1111

12-
- #610: Altered string input into `Connection.execute()` to be handled properly
12+
- #610: Altered string input into `Connection.execute()` to be handled properly with `sql.text()`
13+
- #614: Altered params input into `Connection.execute()` to be handled properly with `dict`
14+
- #616: Altered usage of MetaData which was binding to a connection to instead bind in the needed object or function
15+
- #617: Enacted warning for the deprecation of the `autoload` parameter and requirement of `bind`
1316

1417
## Internal
1518

pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,10 @@ addopts = "--tb native -v -r fxX"
9494
filterwarnings = [
9595
# before end of #413, switch this from a text specific issue to a general error on a warning to ensure known deprecation issues have been resolved
9696
"error:.*Passing a string.*",
97+
"error:.* a single dictionary.*",
98+
"error:.*The MetaData.bind argument.*",
99+
"error:.*The autoload parameter.*",
100+
"error:.*schema methods that invoke SQL against.*",
97101
# this is used for turbodbc and pyodbc as historically we'd like to remove them.
98102
# thus far, it seems like this is not a strict requirement for the migration,
99103
# so we will ignore them.

sqlalchemy_exasol/base.py

Lines changed: 18 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@
5353
from sqlalchemy import (
5454
event,
5555
schema,
56-
select,
5756
sql,
5857
)
5958
from sqlalchemy import types as sqltypes
@@ -910,7 +909,7 @@ def get_table_names(self, connection, schema, **kw):
910909
else:
911910
sql_statement += ":schema ORDER BY table_name"
912911
result = connection.execute(
913-
sql.text(sql_statement), schema=self.denormalize_name(schema)
912+
sql.text(sql_statement), {"schema": self.denormalize_name(schema)}
914913
)
915914
tables = [self.normalize_name(row[0]) for row in result]
916915
return tables
@@ -944,7 +943,7 @@ def get_view_names(self, connection, schema=None, **kw):
944943
else:
945944
sql_statement += ":schema ORDER BY view_name"
946945
result = connection.execute(
947-
sql.text(sql_statement), schema=self.denormalize_name(schema)
946+
sql.text(sql_statement), {"schema": self.denormalize_name(schema)}
948947
)
949948
return [self.normalize_name(row[0]) for row in result]
950949

@@ -958,8 +957,10 @@ def get_view_definition(self, connection, view_name, schema=None, **kw):
958957
sql_stmnt += ":schema"
959958
result = connection.execute(
960959
sql.text(sql_stmnt),
961-
view_name=self.denormalize_name(view_name),
962-
schema=self.denormalize_name(schema),
960+
{
961+
"view_name": self.denormalize_name(view_name),
962+
"schema": self.denormalize_name(schema),
963+
},
963964
).scalar()
964965
return result if result else None
965966

@@ -999,8 +1000,10 @@ def _get_columns(self, connection, table_name, schema=None, **kw):
9991000
)
10001001
result = connection.execute(
10011002
sql.text(sql_statement),
1002-
schema=self.denormalize_name(schema),
1003-
table=self.denormalize_name(table_name),
1003+
{
1004+
"schema": self.denormalize_name(schema),
1005+
"table": self.denormalize_name(table_name),
1006+
},
10041007
)
10051008
return list(result)
10061009

@@ -1103,8 +1106,10 @@ def _get_pk_constraint(self, connection, table_name, schema, **kw):
11031106
)
11041107
result = connection.execute(
11051108
sql.text(sql_statement),
1106-
schema=self.denormalize_name(schema),
1107-
table=table_name,
1109+
{
1110+
"schema": self.denormalize_name(schema),
1111+
"table": self.denormalize_name(table_name),
1112+
},
11081113
)
11091114
pkeys = []
11101115
constraint_name = None
@@ -1132,8 +1137,10 @@ def _get_foreign_keys(self, connection, table_name, schema=None, **kw):
11321137
)
11331138
result = connection.execute(
11341139
sql.text(sql_statement),
1135-
schema=self.denormalize_name(schema),
1136-
table=self.denormalize_name(table_name),
1140+
{
1141+
"schema": self.denormalize_name(schema),
1142+
"table": self.denormalize_name(table_name),
1143+
},
11371144
)
11381145
return list(result)
11391146

test/integration/exasol/test_exasol.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,8 @@ def define_tables(cls, metadata):
9191

9292
def test_distribute_by_constraint(self):
9393
try:
94-
Table("t", MetaData(testing.db), autoload=True)
94+
with testing.db.connect() as conn:
95+
Table("t", MetaData(), autoload_with=conn)
9596
except:
9697
assert False
9798
# TODO: check that reflected table object is identical

test/integration/exasol/test_large_metadata.py

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -54,24 +54,25 @@ def test_reflect_table_object(self):
5454
)
5555
c.execute(sql.text(table_ddl))
5656

57-
meta = MetaData(bind=config.db)
58-
table_name = self.get_table_name(table_count, column_count, 0)
59-
start = time.time()
60-
# insert into table
61-
Table(table_name, meta, autoload=True, schema=self.schema)
62-
end = time.time()
63-
print(
64-
"table load timer: attempt: 1, table_count: %s, column_count: %s, time: %s"
65-
% (table_count, column_count, (end - start))
66-
)
67-
start = time.time()
68-
# insert into table
69-
Table(table_name, meta, autoload=True, schema=self.schema)
70-
end = time.time()
71-
print(
72-
"table load timer: attempt: 2, table_count: %s, column_count: %s, time: %s"
73-
% (table_count, column_count, (end - start))
74-
)
57+
with config.db.connect() as conn:
58+
meta = MetaData()
59+
table_name = self.get_table_name(table_count, column_count, 0)
60+
start = time.time()
61+
# insert into table
62+
Table(table_name, meta, autoload_with=conn, schema=self.schema)
63+
end = time.time()
64+
print(
65+
"table load timer: attempt: 1, table_count: %s, column_count: %s, time: %s"
66+
% (table_count, column_count, (end - start))
67+
)
68+
start = time.time()
69+
# insert into table
70+
Table(table_name, meta, autoload_with=conn, schema=self.schema)
71+
end = time.time()
72+
print(
73+
"table load timer: attempt: 2, table_count: %s, column_count: %s, time: %s"
74+
% (table_count, column_count, (end - start))
75+
)
7576

7677
def test_reflect_metadata_object(self):
7778
for table_count in table_counts:
@@ -89,9 +90,10 @@ def test_reflect_metadata_object(self):
8990
self.schema, table_name, column_count
9091
)
9192
c.execute(sql.text(table_ddl))
92-
meta = MetaData(bind=config.db)
93+
meta = MetaData()
9394
start = time.time()
94-
meta.reflect()
95+
with config.db.connect() as conn:
96+
meta.reflect(bind=conn)
9597
end = time.time()
9698
print(
9799
"all tables (MetaData.reflect) load timer: table_count: %s, column_count: %s, time: %s"

test/integration/exasol/test_regression.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -115,10 +115,11 @@ def teardown_class(cls):
115115
engine = config.db
116116

117117
def _drop_tables(schema):
118-
metadata = MetaData(engine, schema=schema)
119-
metadata.reflect()
120-
to_be_deleted = [metadata.tables[name] for name in metadata.tables]
121-
metadata.drop_all(engine, to_be_deleted)
118+
metadata = MetaData(schema=schema)
119+
with engine.connect() as conn:
120+
metadata.reflect(bind=conn)
121+
to_be_deleted = [metadata.tables[name] for name in metadata.tables]
122+
metadata.drop_all(engine, to_be_deleted)
122123

123124
def _drop_views(schema, views):
124125
with engine.connect() as conn:

test/integration/sqlalchemy/test_suite.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ class RowFetchTest(_RowFetchTest):
2929
RATIONAL = cleandoc(
3030
"""
3131
PyExasol currently does not support/allow duplicate names in the results set.
32-
33-
See also:
32+
33+
See also:
3434
* pyexasol.statement.ExaStatement._check_duplicate_col_names
3535
"""
3636
)
@@ -66,7 +66,7 @@ def define_views(cls, metadata):
6666
RATIONALE = cleandoc(
6767
"""
6868
The Exasol dialect does not check against views for `has_table`, see also `Inspector.has_table()`.
69-
69+
7070
This behaviour is subject to change with sqlalchemy 2.0.
7171
See also:
7272
* https://github.com/sqlalchemy/sqlalchemy/blob/3fc6c40ea77c971d3067dab0fdf57a5b5313b69b/lib/sqlalchemy/engine/reflection.py#L415
@@ -116,7 +116,7 @@ class RowCountTest(_RowCountTest):
116116
the cursor result always will be set to the rowcount = -1 in this case.
117117
This also is a valid behaviour according to the python DBAPI specification.
118118
For more details see also:
119-
* https://peps.python.org/pep-0249/
119+
* https://peps.python.org/pep-0249/
120120
* https://peps.python.org/pep-0249/#rowcount
121121
* https://peps.python.org/pep-0249/#id21
122122
* https://peps.python.org/pep-0249/#executemany
@@ -126,7 +126,7 @@ class RowCountTest(_RowCountTest):
126126
TURBODBC_RATIONALE = cleandoc(
127127
"""
128128
The currently used turbodbc driver returns invalid results.
129-
Attention:
129+
Attention:
130130
* turbodbc maintenance is paused until if it is clear if there is still demand for it
131131
* If this tests will succeed in the future consider repining the turbodbc driver
132132
dependency in order to provide support for this "features".
@@ -292,9 +292,8 @@ def test_integrity_error(self):
292292
@requirements.duplicate_key_raises_integrity_error
293293
def test_integrity_error_raw_sql(self):
294294
insert = text("INSERT INTO MANUAL_PK VALUES (1, 'd1')")
295-
with config.db.connect() as conn:
295+
with config.db.begin() as conn:
296296
conn.execute(insert)
297-
298297
assert_raises(exc.IntegrityError, conn.execute, insert)
299298

300299

0 commit comments

Comments
 (0)