Skip to content

Commit 22e5aaa

Browse files
author
Jesse
authored
SQLAlchemy: fix has_table so it honours schema= argument (#174)
--------- Signed-off-by: Jesse Whitehouse <[email protected]>
1 parent 207dd7c commit 22e5aaa

File tree

3 files changed

+51
-3
lines changed

3 files changed

+51
-3
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,12 @@
33
## 2.7.x (Unreleased)
44

55
- Add support for Cloud Fetch (#146, #151, #154)
6+
- SQLAlchemy has_table function now honours schema= argument and adds catalog= argument (#174)
67
- Fix: Revised SQLAlchemy dialect and examples for compatibility with SQLAlchemy==1.3.x (#173)
78
- Fix: oauth would fail if expired credentials appeared in ~/.netrc (#122)
89
- Fix: Python HTTP proxies were broken after switch to urllib3 (#158)
910
- Other: Connector now logs operation handle guids as hexadecimal instead of bytes (#170)
11+
- Add support for Cloud Fetch
1012

1113
## 2.7.0 (2023-06-26)
1214

src/databricks/sqlalchemy/dialect/__init__.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -267,17 +267,22 @@ def do_rollback(self, dbapi_connection):
267267
# Databricks SQL Does not support transactions
268268
pass
269269

270-
def has_table(self, connection, table_name, schema=None, **kwargs) -> bool:
270+
def has_table(
271+
self, connection, table_name, schema=None, catalog=None, **kwargs
272+
) -> bool:
271273
"""SQLAlchemy docstrings say dialect providers must implement this method"""
272274

273-
schema = schema or "default"
275+
_schema = schema or self.schema
276+
_catalog = catalog or self.catalog
274277

275278
# DBR >12.x uses underscores in error messages
276279
DBR_LTE_12_NOT_FOUND_STRING = "Table or view not found"
277280
DBR_GT_12_NOT_FOUND_STRING = "TABLE_OR_VIEW_NOT_FOUND"
278281

279282
try:
280-
res = connection.execute(f"DESCRIBE TABLE {table_name}")
283+
res = connection.execute(
284+
f"DESCRIBE TABLE {_catalog}.{_schema}.{table_name}"
285+
)
281286
return True
282287
except DatabaseError as e:
283288
if DBR_GT_12_NOT_FOUND_STRING in str(

tests/e2e/sqlalchemy/test_basic.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -340,3 +340,44 @@ def test_get_table_names_smoke_test(samples_engine: Engine):
340340
with samples_engine.connect() as conn:
341341
_names = samples_engine.table_names(schema="nyctaxi", connection=conn)
342342
_names is not None, "get_table_names did not succeed"
343+
344+
345+
def test_has_table_across_schemas(db_engine: Engine, samples_engine: Engine):
346+
"""For this test to pass these conditions must be met:
347+
- Table samples.nyctaxi.trips must exist
348+
- Table samples.tpch.customer must exist
349+
- The `catalog` and `schema` environment variables must be set and valid
350+
"""
351+
352+
with samples_engine.connect() as conn:
353+
354+
# 1) Check for table within schema declared at engine creation time
355+
assert samples_engine.dialect.has_table(connection=conn, table_name="trips")
356+
357+
# 2) Check for table within another schema in the same catalog
358+
assert samples_engine.dialect.has_table(
359+
connection=conn, table_name="customer", schema="tpch"
360+
)
361+
362+
# 3) Check for a table within a different catalog
363+
other_catalog = os.environ.get("catalog")
364+
other_schema = os.environ.get("schema")
365+
366+
# Create a table in a different catalog
367+
with db_engine.connect() as conn:
368+
conn.execute("CREATE TABLE test_has_table (numbers_are_cool INT);")
369+
370+
try:
371+
# Verify that this table is not found in the samples catalog
372+
assert not samples_engine.dialect.has_table(
373+
connection=conn, table_name="test_has_table"
374+
)
375+
# Verify that this table is found in a separate catalog
376+
assert samples_engine.dialect.has_table(
377+
connection=conn,
378+
table_name="test_has_table",
379+
schema=other_schema,
380+
catalog=other_catalog,
381+
)
382+
finally:
383+
conn.execute("DROP TABLE test_has_table;")

0 commit comments

Comments
 (0)