diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 681432fa..676c2e0f 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -24,5 +24,3 @@ updates: - "*" exclude-patterns: - "sqlalchemy" - - "turbodbc" - diff --git a/.github/workflows/slow-checks.yml b/.github/workflows/slow-checks.yml index ea2d96e6..fd38ff9d 100644 --- a/.github/workflows/slow-checks.yml +++ b/.github/workflows/slow-checks.yml @@ -11,7 +11,7 @@ jobs: contents: read tests: - name: Integration-Tests (${{matrix.connector}}, Python-${{ matrix.python-version }}, Exasol-${{ matrix.exasol-version}}) + name: Integration-Tests (Python-${{ matrix.python-version }}, test:${{ matrix.integration-group }}, Exasol-${{ matrix.exasol-version}}, ${{matrix.connector}}) needs: [ build-matrix ] runs-on: ubuntu-24.04 permissions: @@ -31,17 +31,20 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install via apt - run: sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev + - name: Start test db + run: poetry run -- nox -s db:start -- --db-version ${{ matrix.exasol-version }} - name: Run Tests and Collect Coverage run: | export SQLALCHEMY_WARN_20=1 - poetry run -- nox -s test:integration -- --coverage --db-version ${{ matrix.exasol-version }} + poetry run -- nox -s test:${{ matrix.integration-group }} - name: Upload Artifacts uses: actions/upload-artifact@v5 with: - name: coverage-python${{ matrix.python-version }}-${{matrix.connector}}-exasol${{ matrix.exasol-version }}slow + name: coverage-python${{ matrix.python-version }}-${{ matrix.integration-group }}-exasol${{ matrix.exasol-version }}-${{matrix.connector}}-slow path: .coverage include-hidden-files: true + + - name: Stop test db + run: poetry run -- nox -s db:stop diff --git a/.gitignore b/.gitignore index 2203f7d3..fe93ed04 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ .lint* .test-results.db .html-documentation -odbcconfig/odbcinst.ini _build/ diff --git a/README.rst b/README.rst index 16529385..3f085ee1 100644 --- a/README.rst +++ b/README.rst @@ -42,32 +42,15 @@ SQLAlchemy Dialect for EXASOL DB :target: https://pypi.org/project/sqlalchemy_exasol :alt: PyPI - Downloads - Getting Started with SQLAlchemy-Exasol -------------------------------------- -SQLAlchemy-Exasol supports multiple dialects, primarily differentiated by whether they are ODBC or Websocket-based. - -Choosing a Dialect -++++++++++++++++++ - -We recommend using the Websocket-based dialect due to its simplicity. -ODBC-based dialects demand a thorough understanding of (Unix)ODBC, and the setup is considerably more complex. - -.. warning:: - - The maintenance of Turbodbc & pyodbc support is currently paused, and it is planned to be phased out in future versions. - +SQLAlchemy-Exasol supports a Websocket-based dialect. System Requirements ------------------- -- Exasol >= 7.1 (e.g. `docker-db `_ or a `cloud instance `_) +- Exasol >= 7.1 (e.g. `docker-db `_ or a `Exasol SaaS instance `_) - Python >= 3.10 -.. note:: - - For ODBC-Based Dialects, additional libraries required for ODBC are necessary - (for further details, checkout the `developer guide`_). - Features -------- @@ -80,5 +63,4 @@ Check out sqlalchemy-exasols's [User Guide(https://exasol.github.io/sqlalchemy-e .. _developer guide: https://github.com/exasol/sqlalchemy-exasol/blob/master/doc/developer_guide/developer_guide.rst .. _test_docker_image: https://github.com/exasol/docker-db -.. _odbc_driver: https://docs.exasol.com/db/latest/connect_exasol/drivers/odbc/odbc_linux.htm .. _test_drive: https://cloud.exasol.com/signup diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index 79e701b8..69d612a4 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -1 +1,27 @@ # Unreleased + +In this release, the ODBC-based dialects pyodbc and Turbodbc were dropped. Please +switch over to using the websocket dialect. Connection +strings should be altered to start with `exa+websocket://`. + +## Refactoring + +- #621: Added `future=true` to `create_engine` to use the 2.0 API +- #623: Started switch to `sqlalchemy` 2.x (CI tested with 2.0.43) + - All unit, `exasol`, and `regression` tests are working + - Several tests from `sqlalchemy` are failing, have been marked as skipped, and require investigation + - Reinstated the ArgSignatureTest which ensures that all visit_XYZ() in `_sql.Compiler` subclasses have `**kw` +- #626: Reinstated `sqlalchemy` tests: + - `TrueDivTest.test_floordiv_integer` and `TrueDivTest.test_floordiv_integer_bound` by providing an override in `EXACompiler.visit_floordiv_binary` + - `TrueDivTest.test_truediv_numeric` by providing `ExaDecimal` to the `EXADialect_pyodbc.colspecs` list + - a few tests from `ComponentReflectionTest` as `define_reflected_tables` is overridden based on what Exasol supports +- #631: Updated `EXADialect.has_table` to search for both tables and views, fixed passing of schema=None to dialect methods, and reinstated `sqlalchemy` tests: + - `ReturningGuardsTest` are used to indicate that the Exasol dialect, which does not natively support the [RETURNING clause](https://docs.sqlalchemy.org/en/20/glossary.html#term-RETURNING), is set up per the API specifications + - `ComponentReflectionTest.test_not_existing_table` is used to indicate that specific `EXADialect` methods (i.e. `get_columns`) check to see if the requested table/view exists and if not, they will now toss a `NoSuchTableError` exception +- #403: Dropped support for Turbodbc +- #404: Dropped support for pyodbc +- #654: Reinstated `sqlalchemy` tests after minor modifications to work for Exasol: + - `ComponentReflectionTest.test_get_multi_columns` + - `ComponentReflectionTest.test_get_multi_foreign_keys` + - `ComponentReflectionTest.test_get_multi_pk_constraint` + - `ComponentReflectionTest.test_get_view_definition_does_not_exist` diff --git a/doc/developer_guide/developer_guide.rst b/doc/developer_guide/developer_guide.rst index e2dc2ea9..0144582d 100644 --- a/doc/developer_guide/developer_guide.rst +++ b/doc/developer_guide/developer_guide.rst @@ -1,6 +1,8 @@ Development ============ This chapter contains information helpful when you want to engage in development of this project. +``sqlalchemy-exasol`` provides an Exasol dialect for ``sqlalchemy``. For further information +on the creation of a dialect, see `SQLAlchemy's README.dialects.rst `__. Prerequisites ------------- @@ -16,25 +18,9 @@ Tools * Prerequisites_ -Libraries -+++++++++ -* unixodbc -* unixodbc-dev -* libboost-date-time-dev -* libboost-locale-dev -* libboost-system-dev - - -Example: Install of required system libraries on Ubuntu - -.. code-block:: - - sudo apt-get install unixodbc unixodbc-dev libboost-date-time-dev libboost-locale-dev libboost-system-dev - - Locale +++++++ -Make sure the local is setup appropriately. +Make sure the locale is set up appropriately. Example: Setting up an english locale @@ -123,28 +109,10 @@ Tests .. code-block:: - # make sure you are using the virtual environment poetry has setup for this project + # make sure you are using the virtual environment poetry has set up for this project poetry shell -#. Run all tests with `pyodbc` connector - - .. code-block:: - - nox - - or - - .. code-block:: - - nox -s "verify(connector='pyodbc')" - -#. Run all tests with `turbodbc` connector - - .. code-block:: - - nox -s "verify(connector='turbodbc')" - .. attention:: If something still is not working or unclear, you may want to look into the CI/CD action_ files. diff --git a/doc/index.rst b/doc/index.rst index 681b446a..e76fc250 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -22,14 +22,13 @@ Documentation of SQLAlchemy-Exasol :link: faq :link-type: ref - Frequently asked questsions. + Frequently asked questions. .. toctree:: :maxdepth: 1 :hidden: - user_guide + user_guide/index developer_guide/index faq changes/changelog - diff --git a/doc/user_guide.rst b/doc/user_guide/index.rst similarity index 72% rename from doc/user_guide.rst rename to doc/user_guide/index.rst index d6e1ebf2..d3feafdd 100644 --- a/doc/user_guide.rst +++ b/doc/user_guide/index.rst @@ -46,29 +46,20 @@ Getting Started # sqlalchemy 2.x, be passed through `sqlalchemy.sql.text`. result = con.execute(sql.text("select 42 from dual")).fetchall() - # engine.begin() is for DML & DDL, as we don't want to rely on autocommit + # For easier usage of transactions, you can use engine.begin() for DML & DDL, + # if you don't want to rely on autocommit. However, you can with + # `engine.connect()` commit as you go with `con.commit()` or use, for more + # complex scenarios: + # with engine.connect() as con: + # with conn.begin(): + # ... with engine.begin() as con: ... -.. warning:: - - To use an ODBC-based dialect, you must specify it as an extra during installation. - Maintenance for these ODBC-based dialects is paused, and it is planned to remove them - in future versions. - - .. code-block:: shell - - pip install "sqlalchemy-exasol[pydobc]" - pip install "sqlalchemy-exasol[turbodbc]" - - Further Examples ~~~~~~~~~~~~~~~~ -Websocket-based Dialect ------------------------- - .. code-block:: python from sqlalchemy import create_engine, sql @@ -117,50 +108,6 @@ Websocket-based Dialect ... -.. note:: - Validation with fingerprint is only supported in the Websocket-based dialect, and not - the ODBC-based dialects (Pyodbc, Turbodbc). - - -Pyodbc-based Dialect ---------------------- - -.. code-block:: python - - from sqlalchemy import create_engine, sql - - user = "sys" - password = "exasol" - host = "127.0.0.1" - port = "8563 - schema = "my_schema - - url = f"exa+pyodbc://{user}:{password}@{host}:{port}/{schema}?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC" - engine = create_engine(url) - query = "select 42 from dual" - with engine.connect() as con: - result = con.execute(sql.text(query)).fetchall() - - -Turbo-based Dialect ---------------------- - -.. code-block:: python - - from sqlalchemy import create_engine, sql - - user = "sys" - password = "exasol" - host = "127.0.0.1" - port = "8563 - schema = "my_schema - - url = f"exa+turbodbc://{user}:{password}@{host}:{port}/{schema}?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC" - engine = create_engine(url) - query = "select 42 from dual" - with engine.connect() as con: - result = con.execute(sql.text(query)).fetchall() - General Notes ~~~~~~~~~~~~~ diff --git a/driver/libexaodbc-uo2214lv2.so b/driver/libexaodbc-uo2214lv2.so deleted file mode 100755 index 4723af9e..00000000 Binary files a/driver/libexaodbc-uo2214lv2.so and /dev/null differ diff --git a/exasol/odbc.py b/exasol/odbc.py deleted file mode 100644 index ace1e625..00000000 --- a/exasol/odbc.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -import os -from collections.abc import ( - Iterable, - Iterator, - MutableMapping, -) -from contextlib import contextmanager -from pathlib import Path -from tempfile import TemporaryDirectory -from textwrap import dedent - -from pyodbc import Connection - -PROJECT_ROOT = Path(__file__).parent / ".." - -ODBC_DRIVER = PROJECT_ROOT / "driver" / "libexaodbc-uo2214lv2.so" - -ODBCINST_INI_TEMPLATE = dedent( - """ - [ODBC] - #Trace = yes - #TraceFile =~/odbc.trace - - [EXAODBC] - #Driver location will be appended in build environment: - DRIVER={driver} - - """ -) - - -def transaction(connection: Connection, sql_statements: Iterable[str]) -> None: - cur = connection.cursor() - for statement in sql_statements: - cur.execute(statement) - cur.commit() - cur.close() - - -@contextmanager -def environment(env_vars: dict[str, str]) -> Iterator[MutableMapping[str, str]]: - _env = os.environ.copy() - os.environ.update(env_vars) - yield os.environ - os.environ.clear() - os.environ.update(_env) - - -@contextmanager -def temporary_odbc_config(config: str) -> Iterator[Path]: - with TemporaryDirectory() as tmp_dir: - config_dir = Path(tmp_dir) / "odbcconfig" - config_dir.mkdir(exist_ok=True) - config_file = config_dir / "odbcinst.ini" - with open(config_file, "w") as f: - f.write(config) - yield config_file - - -@contextmanager -def odbcconfig(driver: Path) -> Iterator[tuple[Path, MutableMapping[str, str]]]: - with temporary_odbc_config(ODBCINST_INI_TEMPLATE.format(driver=driver)) as cfg: - env_vars = {"ODBCSYSINI": f"{cfg.parent.resolve()}"} - with environment(env_vars) as env: - yield cfg, env diff --git a/noxconfig.py b/noxconfig.py index 18f876d0..128a3439 100644 --- a/noxconfig.py +++ b/noxconfig.py @@ -51,7 +51,7 @@ class Config(BaseConfig): environment_name: str = "test" db_port: int = 8563 bucketfs_port: int = 2580 - connectors: list[str] = ["pyodbc", "turbodbc", "websocket"] + connectors: list[str] = ["websocket"] plugins: list = [StartDB, StopDB] diff --git a/noxfile.py b/noxfile.py index f37c40e5..541782b4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,16 +18,11 @@ import nox -from nox import Session -from nox.sessions import SessionRunner - -from exasol.odbc import ( - ODBC_DRIVER, - odbcconfig, -) # imports all nox task provided by the toolbox from exasol.toolbox.nox.tasks import * # type: ignore +from nox import Session +from nox.sessions import SessionRunner # default actions to be run if nothing is explicitly specified with the -s option nox.options.sessions = ["project:fix"] @@ -127,19 +122,17 @@ def parser() -> ArgumentParser: ) return p - with odbcconfig(ODBC_DRIVER) as (config, env): - args = parser().parse_args(session.posargs) - connector = args.connector - session.run( - *_coverage_command(), - "pytest", - "--dropfirst", - "--db", - f"exasol-{connector}", - f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", - external=True, - env=env, - ) + args = parser().parse_args(session.posargs) + connector = args.connector + session.run( + *_coverage_command(), + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", + external=True, + ) @nox.session(name="test:exasol", python=False) @@ -158,19 +151,17 @@ def parser() -> ArgumentParser: ) return p - with odbcconfig(ODBC_DRIVER) as (config, env): - args = parser().parse_args(session.posargs) - connector = args.connector - session.run( - *_coverage_command(), - "pytest", - "--dropfirst", - "--db", - f"exasol-{connector}", - f"{PROJECT_ROOT / 'test' / 'integration' / 'exasol'}", - external=True, - env=env, - ) + args = parser().parse_args(session.posargs) + connector = args.connector + session.run( + *_coverage_command(), + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'exasol'}", + external=True, + ) @nox.session(name="test:regression", python=False) @@ -241,28 +232,26 @@ def report_skipped(session: Session) -> None: with TemporaryDirectory() as tmp_dir: for connector in PROJECT_CONFIG.connectors: report = Path(tmp_dir) / f"test-report{connector}.json" - with odbcconfig(ODBC_DRIVER) as (config, env): - session.run( - "pytest", - "--dropfirst", - "--db", - f"exasol-{connector}", - f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", - "--json-report", - f"--json-report-file={report}", - external=True, - env=env, - ) - session.run( - "python", - f"{SCRIPTS / 'report.py'}", - "-f", - "csv", - "--output", - f"skipped-tests-{connector}.csv", - f"{connector}", - f"{report}", - ) + session.run( + "pytest", + "--dropfirst", + "--db", + f"exasol-{connector}", + f"{PROJECT_ROOT / 'test' / 'integration' / 'sqlalchemy'}", + "--json-report", + f"--json-report-file={report}", + external=True, + ) + session.run( + "python", + f"{SCRIPTS / 'report.py'}", + "-f", + "csv", + "--output", + f"skipped-tests-{connector}.csv", + f"{connector}", + f"{report}", + ) def _connector_matrix(config: Config): @@ -291,4 +280,5 @@ def full_matrix(session: Session) -> None: matrix = _python_matrix(PROJECT_CONFIG) matrix.update(_exasol_matrix(PROJECT_CONFIG)) matrix.update(_connector_matrix(PROJECT_CONFIG)) + matrix["integration-group"] = ["exasol", "regression", "sqla"] print(json.dumps(matrix)) diff --git a/odbcconfig/odbcinst.ini.template b/odbcconfig/odbcinst.ini.template deleted file mode 100644 index 1e259a6a..00000000 --- a/odbcconfig/odbcinst.ini.template +++ /dev/null @@ -1,7 +0,0 @@ -[ODBC] -#Trace=yes -#TraceFile=~/odbc.trace - -[EXAODBC] -#Driver location needs to be added in build environment: -DRIVER= diff --git a/poetry.lock b/poetry.lock index 0cf5c92f..3cb4474b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3100,80 +3100,6 @@ cffi = [ docs = ["sphinx (<7)", "sphinx_rtd_theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -[[package]] -name = "pyodbc" -version = "5.3.0" -description = "DB API module for ODBC" -optional = false -python-versions = ">=3.9" -groups = ["main", "dev"] -files = [ - {file = "pyodbc-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6682cdec78f1302d0c559422c8e00991668e039ed63dece8bf99ef62173376a5"}, - {file = "pyodbc-5.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9cd3f0a9796b3e1170a9fa168c7e7ca81879142f30e20f46663b882db139b7d2"}, - {file = "pyodbc-5.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46185a1a7f409761716c71de7b95e7bbb004390c650d00b0b170193e3d6224bb"}, - {file = "pyodbc-5.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:349a9abae62a968b98f6bbd23d2825151f8d9de50b3a8f5f3271b48958fdb672"}, - {file = "pyodbc-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ac23feb7ddaa729f6b840639e92f83ff0ccaa7072801d944f1332cd5f5b05f47"}, - {file = "pyodbc-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8aa396c6d6af52ccd51b8c8a5bffbb46fd44e52ce07ea4272c1d28e5e5b12722"}, - {file = "pyodbc-5.3.0-cp310-cp310-win32.whl", hash = "sha256:46869b9a6555ff003ed1d8ebad6708423adf2a5c88e1a578b9f029fb1435186e"}, - {file = "pyodbc-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:705903acf6f43c44fc64e764578d9a88649eb21bf7418d78677a9d2e337f56f2"}, - {file = "pyodbc-5.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:c68d9c225a97aedafb7fff1c0e1bfe293093f77da19eaf200d0e988fa2718d16"}, - {file = "pyodbc-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ebc3be93f61ea0553db88589e683ace12bf975baa954af4834ab89f5ee7bf8ae"}, - {file = "pyodbc-5.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9b987a25a384f31e373903005554230f5a6d59af78bce62954386736a902a4b3"}, - {file = "pyodbc-5.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676031723aac7dcbbd2813bddda0e8abf171b20ec218ab8dfb21d64a193430ea"}, - {file = "pyodbc-5.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5c30c5cd40b751f77bbc73edd32c4498630939bcd4e72ee7e6c9a4b982cc5ca"}, - {file = "pyodbc-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2035c7dfb71677cd5be64d3a3eb0779560279f0a8dc6e33673499498caa88937"}, - {file = "pyodbc-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5cbe4d753723c8a8f65020b7a259183ef5f14307587165ce37e8c7e251951852"}, - {file = "pyodbc-5.3.0-cp311-cp311-win32.whl", hash = "sha256:d255f6b117d05cfc046a5201fdf39535264045352ea536c35777cf66d321fbb8"}, - {file = "pyodbc-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:f1ad0e93612a6201621853fc661209d82ff2a35892b7d590106fe8f97d9f1f2a"}, - {file = "pyodbc-5.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:0df7ff47fab91ea05548095b00e5eb87ed88ddf4648c58c67b4db95ea4913e23"}, - {file = "pyodbc-5.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5ebf6b5d989395efe722b02b010cb9815698a4d681921bf5db1c0e1195ac1bde"}, - {file = "pyodbc-5.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:197bb6ddafe356a916b8ee1b8752009057fce58e216e887e2174b24c7ab99269"}, - {file = "pyodbc-5.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6ccb5315ec9e081f5cbd66f36acbc820ad172b8fa3736cf7f993cdf69bd8a96"}, - {file = "pyodbc-5.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5dd3d5e469f89a3112cf8b0658c43108a4712fad65e576071e4dd44d2bd763c7"}, - {file = "pyodbc-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b180bc5e49b74fd40a24ef5b0fe143d0c234ac1506febe810d7434bf47cb925b"}, - {file = "pyodbc-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e3c39de3005fff3ae79246f952720d44affc6756b4b85398da4c5ea76bf8f506"}, - {file = "pyodbc-5.3.0-cp312-cp312-win32.whl", hash = "sha256:d32c3259762bef440707098010035bbc83d1c73d81a434018ab8c688158bd3bb"}, - {file = "pyodbc-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe77eb9dcca5fc1300c9121f81040cc9011d28cff383e2c35416e9ec06d4bc95"}, - {file = "pyodbc-5.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:afe7c4ac555a8d10a36234788fc6cfc22a86ce37fc5ba88a1f75b3e6696665dc"}, - {file = "pyodbc-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7e9ab0b91de28a5ab838ac4db0253d7cc8ce2452efe4ad92ee6a57b922bf0c24"}, - {file = "pyodbc-5.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6132554ffbd7910524d643f13ce17f4a72f3a6824b0adef4e9a7f66efac96350"}, - {file = "pyodbc-5.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1629af4706e9228d79dabb4863c11cceb22a6dab90700db0ef449074f0150c0d"}, - {file = "pyodbc-5.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ceaed87ba2ea848c11223f66f629ef121f6ebe621f605cde9cfdee4fd9f4b68"}, - {file = "pyodbc-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3cc472c8ae2feea5b4512e23b56e2b093d64f7cbc4b970af51da488429ff7818"}, - {file = "pyodbc-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c79df54bbc25bce9f2d87094e7b39089c28428df5443d1902b0cc5f43fd2da6f"}, - {file = "pyodbc-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c2eb0b08e24fe5c40c7ebe9240c5d3bd2f18cd5617229acee4b0a0484dc226f2"}, - {file = "pyodbc-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:01166162149adf2b8a6dc21a212718f205cabbbdff4047dc0c415af3fd85867e"}, - {file = "pyodbc-5.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:363311bd40320b4a61454bebf7c38b243cd67c762ed0f8a5219de3ec90c96353"}, - {file = "pyodbc-5.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3f1bdb3ce6480a17afaaef4b5242b356d4997a872f39e96f015cabef00613797"}, - {file = "pyodbc-5.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7713c740a10f33df3cb08f49a023b7e1e25de0c7c99650876bbe717bc95ee780"}, - {file = "pyodbc-5.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf18797a12e70474e1b7f5027deeeccea816372497e3ff2d46b15bec2d18a0cc"}, - {file = "pyodbc-5.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:08b2439500e212625471d32f8fde418075a5ddec556e095e5a4ba56d61df2dc6"}, - {file = "pyodbc-5.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:729c535341bb09c476f219d6f7ab194bcb683c4a0a368010f1cb821a35136f05"}, - {file = "pyodbc-5.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c67e7f2ce649155ea89beb54d3b42d83770488f025cf3b6f39ca82e9c598a02e"}, - {file = "pyodbc-5.3.0-cp314-cp314-win32.whl", hash = "sha256:a48d731432abaee5256ed6a19a3e1528b8881f9cb25cb9cf72d8318146ea991b"}, - {file = "pyodbc-5.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:58635a1cc859d5af3f878c85910e5d7228fe5c406d4571bffcdd281375a54b39"}, - {file = "pyodbc-5.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:754d052030d00c3ac38da09ceb9f3e240e8dd1c11da8906f482d5419c65b9ef5"}, - {file = "pyodbc-5.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f927b440c38ade1668f0da64047ffd20ec34e32d817f9a60d07553301324b364"}, - {file = "pyodbc-5.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:25c4cfb2c08e77bc6e82f666d7acd52f0e52a0401b1876e60f03c73c3b8aedc0"}, - {file = "pyodbc-5.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc834567c2990584b9726cba365834d039380c9dbbcef3030ddeb00c6541b943"}, - {file = "pyodbc-5.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8339d3094858893c1a68ee1af93efc4dff18b8b65de54d99104b99af6306320d"}, - {file = "pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74528fe148980d0c735c0ebb4a4dc74643ac4574337c43c1006ac4d09593f92d"}, - {file = "pyodbc-5.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d89a7f2e24227150c13be8164774b7e1f9678321a4248f1356a465b9cc17d31e"}, - {file = "pyodbc-5.3.0-cp314-cp314t-win32.whl", hash = "sha256:af4d8c9842fc4a6360c31c35508d6594d5a3b39922f61b282c2b4c9d9da99514"}, - {file = "pyodbc-5.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bfeb3e34795d53b7d37e66dd54891d4f9c13a3889a8f5fe9640e56a82d770955"}, - {file = "pyodbc-5.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:13656184faa3f2d5c6f19b701b8f247342ed581484f58bf39af7315c054e69db"}, - {file = "pyodbc-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0263323fc47082c2bf02562f44149446bbbfe91450d271e44bffec0c3143bfb1"}, - {file = "pyodbc-5.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:452e7911a35ee12a56b111ac5b596d6ed865b83fcde8427127913df53132759e"}, - {file = "pyodbc-5.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b35b9983ad300e5aea82b8d1661fc9d3afe5868de527ee6bd252dd550e61ecd6"}, - {file = "pyodbc-5.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e981db84fee4cebec67f41bd266e1e7926665f1b99c3f8f4ea73cd7f7666e381"}, - {file = "pyodbc-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:25b6766e56748eb1fc1d567d863e06cbb7b7c749a41dfed85db0031e696fa39a"}, - {file = "pyodbc-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2eb7151ed0a1959cae65b6ac0454f5c8bbcd2d8bafeae66483c09d58b0c7a7fc"}, - {file = "pyodbc-5.3.0-cp39-cp39-win32.whl", hash = "sha256:fc5ac4f2165f7088e74ecec5413b5c304247949f9702c8853b0e43023b4187e8"}, - {file = "pyodbc-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:c25dc9c41f61573bdcf61a3408c34b65e4c0f821b8f861ca7531b1353b389804"}, - {file = "pyodbc-5.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:101313a21d2654df856a60e4a13763e4d9f6c5d3fd974bcf3fc6b4e86d1bbe8e"}, - {file = "pyodbc-5.3.0.tar.gz", hash = "sha256:2fe0e063d8fb66efd0ac6dc39236c4de1a45f17c33eaded0d553d21c199f4d05"}, -] - [[package]] name = "pyparsing" version = "3.2.5" @@ -4264,81 +4190,99 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.54" +version = "2.0.44" description = "Database Abstraction Library" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"}, - {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"}, - {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"}, - {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"}, - {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"}, - {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"}, - {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"}, - {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"}, - {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"}, - {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"}, - {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"}, - {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"}, - {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"}, - {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"}, - {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"}, - {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"}, - {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"}, - {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"}, - {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"}, - {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"}, - {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:471733aabb2e4848d609141a9e9d56a427c0a038f4abf65dd19d7a21fd563632"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bf7d383a35e668b984c805470518b635d48b95a3c57cb03f37eaa3551b5f9f"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf4bb6b3d6228fcf3a71b50231199fb94d2dd2611b66d33be0578ea3e6c2726"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e998cf7c29473bd077704cea3577d23123094311f59bdc4af551923b168332b1"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ebac3f0b5732014a126b43c2b7567f2f0e0afea7d9119a3378bde46d3dcad88e"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win32.whl", hash = "sha256:3255d821ee91bdf824795e936642bbf43a4c7cedf5d1aed8d24524e66843aa74"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win_amd64.whl", hash = "sha256:78e6c137ba35476adb5432103ae1534f2f5295605201d946a4198a0dea4b38e7"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fc44e5965ea46909a416fff0af48a219faefd5773ab79e5f8a5fcd5d62b2667"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc8b3850d2a601ca2320d081874033684e246d28e1c5e89db0864077cfc8f5a9"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d733dec0614bb8f4bcb7c8af88172b974f685a31dc3a65cca0527e3120de5606"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22be14009339b8bc16d6b9dc8780bacaba3402aa7581658e246114abbd2236e3"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:357bade0e46064f88f2c3a99808233e67b0051cdddf82992379559322dfeb183"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4848395d932e93c1595e59a8672aa7400e8922c39bb9b0668ed99ac6fa867822"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win32.whl", hash = "sha256:2f19644f27c76f07e10603580a47278abb2a70311136a7f8fd27dc2e096b9013"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win_amd64.whl", hash = "sha256:1df4763760d1de0dfc8192cc96d8aa293eb1a44f8f7a5fbe74caf1b551905c5e"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7027414f2b88992877573ab780c19ecb54d3a536bef3397933573d6b5068be4"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fe166c7d00912e8c10d3a9a0ce105569a31a3d0db1a6e82c4e0f4bf16d5eca9"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3caef1ff89b1caefc28f0368b3bde21a7e3e630c2eddac16abd9e47bd27cc36a"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc2856d24afa44295735e72f3c75d6ee7fdd4336d8d3a8f3d44de7aa6b766df2"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11bac86b0deada30b6b5f93382712ff0e911fe8d31cb9bf46e6b149ae175eff0"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d18cd0e9a0f37c9f4088e50e3839fcb69a380a0ec957408e0b57cff08ee0a26"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win32.whl", hash = "sha256:9e9018544ab07614d591a26c1bd4293ddf40752cc435caf69196740516af7100"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win_amd64.whl", hash = "sha256:8e0e4e66fd80f277a8c3de016a81a554e76ccf6b8d881ee0b53200305a8433f6"}, + {file = "sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05"}, + {file = "sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = ">=1", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""] -aiosqlite = ["aiosqlite ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\"", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17) ; python_version >= \"3\""] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2) ; python_version >= \"3\"", "mariadb (>=1.0.1,!=1.1.2) ; python_version >= \"3\""] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql", "pymssql"] -mssql-pyodbc = ["pyodbc", "pyodbc"] -mypy = ["mypy (>=0.910) ; python_version >= \"3\"", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0) ; python_version >= \"3\"", "mysqlclient (>=1.4.0,<2) ; python_version < \"3\""] -mysql-connector = ["mysql-connector-python", "mysql-connector-python"] -oracle = ["cx_oracle (>=7) ; python_version >= \"3\"", "cx_oracle (>=7,<8) ; python_version < \"3\""] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg ; python_version >= \"3\"", "asyncpg ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\"", "greenlet (!=0.4.17) ; python_version >= \"3\""] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0) ; python_version >= \"3\"", "pg8000 (>=1.16.6,!=1.29.0) ; python_version >= \"3\""] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql (<1) ; python_version < \"3\"", "pymysql ; python_version >= \"3\""] -sqlcipher = ["sqlcipher3_binary ; python_version >= \"3\""] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "standard-imghdr" @@ -4508,22 +4452,6 @@ files = [ {file = "tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0"}, ] -[[package]] -name = "turbodbc" -version = "4.5.4" -description = "turbodbc is a Python DB API 2.0 compatible ODBC driver" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"turbodbc\"" -files = [ - {file = "turbodbc-4.5.4.tar.gz", hash = "sha256:ceb717749cc85f845d2564bfdb993480428bbc32bf80464c1ddb34a1b419ae7f"}, -] - -[package.extras] -arrow = ["pyarrow (>=1.0,<8.1.0)"] -numpy = ["numpy (>=1.19.0)"] - [[package]] name = "twine" version = "6.2.0" @@ -4594,7 +4522,6 @@ files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] -markers = {main = "python_version == \"3.10\""} [[package]] name = "typing-inspection" @@ -4833,11 +4760,7 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] -[extras] -pyodbc = ["pyodbc"] -turbodbc = ["turbodbc"] - [metadata] lock-version = "2.1" python-versions = ">=3.10,<4" -content-hash = "f98f676c0803836de45f4ec55f7938b1b0de2fdcec788bf68bde0dbc6e03894c" +content-hash = "95a92f66ec9ac4c9ac92bb267ee28818477e43ad23b29ef43fb51bcf0446e6e1" diff --git a/pyproject.toml b/pyproject.toml index 6e6e2d75..80659a52 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,8 +4,8 @@ version = "5.2.0" requires-python = ">=3.10,<4" description = "EXASOL dialect for SQLAlchemy" authors = [ - {name="Exasol AG", email="opensource@exasol.com"}, - {name="Blue Yonder GmbH"}, + { name = "Exasol AG", email = "opensource@exasol.com" }, + { name = "Blue Yonder GmbH" }, ] readme = "README.rst" license = "BSD" @@ -33,7 +33,6 @@ build-backend = "poetry.core.masonry.api" requires-poetry = ">=2.1.0" packages = [ { include = "sqlalchemy_exasol" }, - { include = "exasol" } ] classifiers = [ "Development Status :: 5 - Production/Stable", @@ -58,46 +57,32 @@ include = [ ] exclude = [] - [tool.poetry.dependencies] packaging = ">=21.3" pyexasol = ">=0.26.0,<2" -sqlalchemy = ">=1.4,<2" - -turbodbc = { version = "==4.5.4", optional = true } -pyodbc = { version = ">=4.0.34,<6", optional = true } - -[tool.poetry.extras] -pyodbc = ["pyodbc"] -turbodbc = ["turbodbc"] +sqlalchemy = "^2.0.0" [tool.poetry.group.dev.dependencies] nox = ">=2022.1.7" pytest-json-report = ">=1.5.0" -# The excluded versions mirror the excluded versions of sqla 1.3.X. -# The limitation/issue pytest <6 is tracked in https://github.com/exasol/sqlalchemy-exasol/issues/144 pytest = ">=6,<9" rich = "^13.3.1" exasol-integration-test-docker-environment = "^4.0.0" -pyodbc = ">=4.0.34,<6" pytest-exasol-backend = "^1.0.0" exasol-toolbox = "^1.12.0" [tool.poetry.plugins."sqlalchemy.dialects"] -"exa.pyodbc" = "sqlalchemy_exasol.pyodbc:EXADialect_pyodbc" -"exa.turbodbc" = "sqlalchemy_exasol.turbodbc:EXADialect_turbodbc" "exa.websocket" = "sqlalchemy_exasol.websocket:EXADialect_websocket" [tool.pytest.ini_options] addopts = "--tb native -v -r fxX" filterwarnings = [ -"error", -"ignore::pytest.PytestUnraisableExceptionWarning", -# this is used for turbodbc and pyodbc as historically we'd like to remove them. -# thus far, it seems like this is not a strict requirement for the migration, -# so we will ignore them. -"ignore::sqlalchemy_exasol.warnings.SqlaExasolDeprecationWarning" + "error", + "ignore::pytest.PytestUnraisableExceptionWarning", +] +markers = [ + "backend: backend tests.", ] [tool.black] @@ -110,7 +95,7 @@ profile = "black" force_grid_wrap = 2 [tool.pylint.master] -fail-under = 5.4 +fail-under = 4.9 [tool.pylint.format] max-line-length = 88 @@ -118,11 +103,6 @@ max-module-lines = 800 [[tool.mypy.overrides]] module = [ - "exasol.odbc", - "sqlalchemy_exasol.base", - "sqlalchemy_exasol.pyodbc", - # not supporting mypy > 1.10.0; https://docs.sqlalchemy.org/en/20/orm/extensions/mypy.html - "sqlalchemy.*", "test.*", ] ignore_errors = true diff --git a/scripts/report.py b/scripts/report.py index d5c049a0..f069425f 100644 --- a/scripts/report.py +++ b/scripts/report.py @@ -13,7 +13,6 @@ ) from typing import ( Any, - Dict, TextIO, ) @@ -41,7 +40,7 @@ def skipped_test_from(obj: dict[str, Any]) -> Test: filename=filename, lineno=obj["lineno"], ) - except KeyError as ex: + except KeyError: pass # Assumption: Every skipped test should have at least one 'longrepr' in a stage assert False @@ -49,7 +48,7 @@ def skipped_test_from(obj: dict[str, Any]) -> Test: def _create_parser() -> ArgumentParser: parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) - parser.add_argument("connector", choices=["pyodbc", "turbodbc"]) + parser.add_argument("connector", choices=["websocket"]) parser.add_argument("test_results") parser.add_argument("-o", "--output", type=FileType("w"), default="-") parser.add_argument("-f", "--format", choices=["human", "csv"], default="csv") diff --git a/setup.cfg b/setup.cfg index 2495b781..6ea5c9f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,9 +1,6 @@ [db] -exasol-pyodbc=exa+pyodbc://sys:exasol@localhost:8563/TEST?CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE -exasol-turbodbc=exa+turbodbc://sys:exasol@localhost:8563/TEST?CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE -exasol-websocket=exa+websocket://sys:exasol@localhost:8563/TEST?CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC&SSLCertificate=SSL_VERIFY_NONE +exasol-websocket=exa+websocket://sys:exasol@localhost:8563/TEST?CONNECTIONCALL=en_US.UTF-8&SSLCertificate=SSL_VERIFY_NONE [sqla_testing] requirement_cls=sqlalchemy_exasol.requirements:Requirements profile_file=.profiles.txt - diff --git a/sqlalchemy_exasol/__init__.py b/sqlalchemy_exasol/__init__.py index a56a430c..630a1c21 100644 --- a/sqlalchemy_exasol/__init__.py +++ b/sqlalchemy_exasol/__init__.py @@ -1,13 +1,13 @@ from sqlalchemy_exasol import ( base, - pyodbc, + websocket, ) from sqlalchemy_exasol.version import VERSION __version__ = VERSION # default dialect -base.dialect = pyodbc.dialect # type: ignore +base.dialect = websocket.dialect # type: ignore __all__ = ( "BLOB", diff --git a/sqlalchemy_exasol/base.py b/sqlalchemy_exasol/base.py index 1c32507b..4a238162 100644 --- a/sqlalchemy_exasol/base.py +++ b/sqlalchemy_exasol/base.py @@ -1,4 +1,4 @@ -"""Support for the EXASOL database. +"""Support for the Exasol database. Auto Increment Behavior ----------------------- @@ -30,24 +30,29 @@ Identifier Casing ----------------- -EXASol mimics the behavior of Oracle. Thus, for this dialect implementation +Exasol mimics the behavior of Oracle. Thus, for this dialect implementation the Oracle dialect was taken as a reference. -In EXASol, the data dictionary represents all case insensitive identifier names +In Exasol, the data dictionary represents all case insensitive identifier names using UPPERCASE text.SQLAlchemy on the other hand considers an all-lower case -identifiers to be case insensitive. The Oracle dialect converts identifier to +identifiers to be case-insensitive. The Oracle dialect converts identifier to and from those two formats during schema level communication, such as reflection of tables and indexes. It is recommended to work with all lowercase identifiers on the SQLAlchemy side. -These are treated as case insensitve identifiers by SQLAlchemy. The EXASol -dialect takes care of converting them to the internal case insensitive +These are treated as case-insensitive identifiers by SQLAlchemy. The Exasol +dialect takes care of converting them to the internal case-insensitive representation (all uppercase). """ import logging import re +from collections.abc import MutableMapping from contextlib import closing +from typing import ( + Any, + Type, +) import sqlalchemy.exc from sqlalchemy import ( @@ -69,6 +74,7 @@ ) from sqlalchemy.sql import compiler from sqlalchemy.sql.elements import quoted_name +from sqlalchemy.sql.type_api import TypeEngine from .constraints import DistributeByConstraint @@ -540,7 +546,7 @@ "zone", } -colspecs = {} +colspecs: MutableMapping[type[TypeEngine[Any]], type[TypeEngine[Any]]] = {} ischema_names = { "BOOLEAN": sqltypes.BOOLEAN, @@ -549,7 +555,7 @@ "DATE": sqltypes.DATE, "DECIMAL": sqltypes.DECIMAL, "DOUBLE": sqltypes.FLOAT, - # EXASOL mapps DOUBLE, DOUBLE PRECISION, FLOAT to DOUBLE PRECISION + # Exasol maps DOUBLE, DOUBLE PRECISION, FLOAT to DOUBLE PRECISION # internally but returns 'DOUBLE' as type when asking the DB catalog # INTERVAL DAY [(p)] TO SECOND [(fp)] TODO: missing support for EXA Datatype, check Oracle Engine # INTERVAL YEAR[(p)] TO MONTH TODO: missing support for EXA Datatype, check Oracle Engine @@ -590,7 +596,7 @@ def limit_clause(self, select, **kw): return text - def for_update_clause(self, select): + def for_update_clause(self, select, **kw): # Exasol has no "FOR UPDATE" util.warn("EXASolution does not support SELECT ... FOR UPDATE") return "" @@ -601,9 +607,14 @@ def default_from(self): """ return " FROM DUAL" - def visit_empty_set_expr(self, type_): + def visit_empty_set_expr(self, type_, **kw): return "SELECT 1 FROM DUAL WHERE 1!=1" + def visit_floordiv_binary(self, binary, operator, **kw): + return "FLOOR(%s)" % ( + self.process(binary.left, **kw) + " / " + self.process(binary.right, **kw) + ) + class EXADDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): @@ -627,7 +638,7 @@ def get_column_specification(self, column, **kwargs): return colspec def create_table_constraints(self, table, _include_foreign_key_constraints=None): - # EXASOL does not support FK constraints that reference + # Exasol does not support FK constraints that reference # the table being created. Thus, these need to be created # via ALTER TABLE after table creation # TODO: FKs that reference other tables could be inlined @@ -640,37 +651,35 @@ def create_table_constraints(self, table, _include_foreign_key_constraints=None) event.listen(table, "after_create", AddConstraint(c)) return super().create_table_constraints(table) - def visit_add_constraint(self, create): + def visit_add_constraint(self, create, **kw): if isinstance(create.element, DistributeByConstraint): return "ALTER TABLE {} {}".format( self.preparer.format_table(create.element.table), self.process(create.element), ) - else: - return super().visit_add_constraint(create) + return super().visit_add_constraint(create) - def visit_drop_constraint(self, drop): + def visit_drop_constraint(self, drop, **kw): if isinstance(drop.element, DistributeByConstraint): return "ALTER TABLE %s DROP DISTRIBUTION KEYS" % ( self.preparer.format_table(drop.element.table) ) - else: - return super().visit_drop_constraint(drop) + return super().visit_drop_constraint(drop) - def visit_distribute_by_constraint(self, constraint): + def visit_distribute_by_constraint(self, constraint, **kw): return "DISTRIBUTE BY " + ",".join(c.name for c in constraint.columns) def define_constraint_remote_table(self, constraint, table, preparer): """Format the remote table clause of a CREATE CONSTRAINT clause.""" return preparer.format_table(table, use_schema=True) - def visit_create_index(self, create): + def visit_create_index(self, create, **kw): """Exasol manages indexes internally""" raise sqlalchemy.exc.CompileError( "Not Supported: " + self.visit_create_index.__doc__ ) - def visit_drop_index(self, drop): + def visit_drop_index(self, drop, **kw): """Exasol manages indexes internally""" raise sqlalchemy.exc.CompileError( "Not Supported: " + self.visit_drop_index.__doc__ @@ -836,8 +845,8 @@ def _get_schema_from_url(self, connection, schema): def normalize_name(self, name): """ - Converting EXASol case insensitive identifiers (upper case) - to SQLAlchemy case insensitive identifiers (lower case) + Converting Exasol case-insensitive identifiers (upper case) + to SQLAlchemy case-insensitive identifiers (lower case) """ if name is None: return None @@ -852,8 +861,8 @@ def normalize_name(self, name): def denormalize_name(self, name): """ - Converting SQLAlchemy case insensitive identifiers (lower case) - to EXASol case insensitive identifiers (upper case) + Converting SQLAlchemy case-insensitive identifiers (lower case) + to Exasol case-insensitive identifiers (upper case) """ if name is None or len(name) == 0: return None @@ -880,6 +889,12 @@ def get_schema_names(self, connection, **kw): result = connection.execute(sql.text(sql_statement)) return [self.normalize_name(row[0]) for row in result] + @staticmethod + def _get_schema_replacement_string(schema_name) -> str: + if schema_name is None: + return "CURRENT_SCHEMA" + return ":schema" + def _get_schema_for_input_or_current(self, connection, schema): schema = self._get_schema_for_input(connection, schema) if schema is None: @@ -887,9 +902,14 @@ def _get_schema_for_input_or_current(self, connection, schema): return self.denormalize_name(schema) def _get_schema_for_input(self, connection, schema): - return self.denormalize_name( - schema or self._get_schema_from_url(connection, schema) - ) + if not schema: + backup_schema = self._get_schema_from_url(connection, schema) + if backup_schema: + # need to convert to str as quoted text cannot be modified with + # the denormalize_name operations + schema = str(backup_schema) + + return self.denormalize_name(schema) @staticmethod def _get_current_schema(connection): @@ -914,14 +934,18 @@ def get_table_names(self, connection, schema, **kw): tables = [self.normalize_name(row[0]) for row in result] return tables - def has_table(self, connection, table_name, schema=None, **kw): + @reflection.cache + def has_table(self, connection, table_name, schema=None, **kw) -> bool: + self._ensure_has_table_connection(connection) + schema = self._get_schema_for_input(connection, schema) sql_statement = ( - "SELECT table_name from SYS.EXA_ALL_TABLES " - "WHERE table_name = :table_name " + "SELECT OBJECT_NAME FROM SYS.EXA_ALL_OBJECTS " + "WHERE OBJECT_TYPE IN ('TABLE', 'VIEW') " + "AND OBJECT_NAME = :table_name " ) if schema is not None: - sql_statement += "AND table_schema = :schema" + sql_statement += "AND ROOT_NAME = :schema" result = connection.execute( sql.text(sql_statement), @@ -935,34 +959,38 @@ def has_table(self, connection, table_name, schema=None, **kw): @reflection.cache def get_view_names(self, connection, schema=None, **kw): - schema = self._get_schema_for_input(connection, schema) + schema_name = self._get_schema_for_input(connection, schema) sql_statement = "SELECT view_name FROM SYS.EXA_ALL_VIEWS WHERE view_schema = " - if schema is None: + if schema_name is None: sql_statement += "CURRENT_SCHEMA ORDER BY view_name" result = connection.execute(sql.text(sql_statement)) else: sql_statement += ":schema ORDER BY view_name" result = connection.execute( - sql.text(sql_statement), {"schema": self.denormalize_name(schema)} + sql.text(sql_statement), {"schema": self.denormalize_name(schema_name)} ) return [self.normalize_name(row[0]) for row in result] @reflection.cache def get_view_definition(self, connection, view_name, schema=None, **kw): - schema = self._get_schema_for_input(connection, schema) - sql_stmnt = "SELECT view_text FROM sys.exa_all_views WHERE view_name = :view_name AND view_schema = " - if schema is None: - sql_stmnt += "CURRENT_SCHEMA" + schema_name = self._get_schema_for_input(connection, schema) + sql_statement = "SELECT view_text FROM sys.exa_all_views WHERE view_name = :view_name AND view_schema = " + if schema_name is None: + sql_statement += "CURRENT_SCHEMA" else: - sql_stmnt += ":schema" + sql_statement += ":schema" result = connection.execute( - sql.text(sql_stmnt), + sql.text(sql_statement), { "view_name": self.denormalize_name(view_name), - "schema": self.denormalize_name(schema), + "schema": self.denormalize_name(schema_name), }, ).scalar() - return result if result else None + if result: + return result + raise sqlalchemy.exc.NoSuchTableError( + f"{schema_name}.{view_name}" if schema_name else view_name + ) @staticmethod def quote_string_value(string_value): @@ -990,19 +1018,31 @@ def get_column_sql_query_str(): "ORDER BY column_ordinal_position" ) + def _verify_table_exists(self, connection, table_name, schema_name): + if not self.has_table( + connection=connection, table_name=table_name, schema=schema_name + ): + raise sqlalchemy.exc.NoSuchTableError( + f"{schema_name}.{table_name}" if schema_name else table_name + ) + @reflection.cache def _get_columns(self, connection, table_name, schema=None, **kw): - schema = self._get_schema_for_input(connection, schema) - schema_str = "CURRENT_SCHEMA" if schema is None else ":schema" - table_name_str = ":table" + schema_name = self._get_schema_for_input(connection, schema) + table_name = self.denormalize_name(table_name) + self._verify_table_exists( + connection=connection, table_name=table_name, schema_name=schema_name + ) + sql_statement = self.get_column_sql_query_str().format( - schema=schema_str, table=table_name_str + schema=self._get_schema_replacement_string(schema_name=schema_name), + table=":table", ) result = connection.execute( sql.text(sql_statement), { - "schema": self.denormalize_name(schema), - "table": self.denormalize_name(table_name), + "schema": self.denormalize_name(schema_name), + "table": table_name, }, ) return list(result) @@ -1094,21 +1134,22 @@ def _get_constraint_sql_str(schema, table_name, contraint_type): @reflection.cache def _get_pk_constraint(self, connection, table_name, schema, **kw): - schema = self._get_schema_for_input(connection, schema) + schema_name = self._get_schema_for_input(connection, schema) table_name = self.denormalize_name(table_name) - table_name_string = ":table" - if schema is None: - schema_string = "CURRENT_SCHEMA " - else: - schema_string = ":schema " + self._verify_table_exists( + connection=connection, table_name=table_name, schema_name=schema_name + ) + sql_statement = self._get_constraint_sql_str( - schema_string, table_name_string, "PRIMARY KEY" + schema=self._get_schema_replacement_string(schema_name=schema_name), + table_name=":table", + contraint_type="PRIMARY KEY", ) result = connection.execute( sql.text(sql_statement), { - "schema": self.denormalize_name(schema), - "table": self.denormalize_name(table_name), + "schema": schema_name, + "table": table_name, }, ) pkeys = [] @@ -1130,16 +1171,22 @@ def get_pk_constraint(self, connection, table_name, schema=None, **kw): @reflection.cache def _get_foreign_keys(self, connection, table_name, schema=None, **kw): - table_name_string = ":table" - schema_string = "CURRENT_SCHEMA " if schema is None else ":schema " + schema_name = self._get_schema_for_input(connection, schema) + table_name = self.denormalize_name(table_name) + self._verify_table_exists( + connection=connection, table_name=table_name, schema_name=schema_name + ) + sql_statement = self._get_constraint_sql_str( - schema_string, table_name_string, "FOREIGN KEY" + schema=self._get_schema_replacement_string(schema_name=schema_name), + table_name=":table", + contraint_type="FOREIGN KEY", ) result = connection.execute( sql.text(sql_statement), { - "schema": self.denormalize_name(schema), - "table": self.denormalize_name(table_name), + "schema": schema_name, + "table": table_name, }, ) return list(result) @@ -1197,5 +1244,5 @@ def fkey_rec(): @reflection.cache def get_indexes(self, connection, table_name, schema=None, **kw): - """EXASolution has no explicit indexes""" + """Exasol has no explicit indexes""" return [] diff --git a/sqlalchemy_exasol/pyodbc.py b/sqlalchemy_exasol/pyodbc.py deleted file mode 100644 index 5af85758..00000000 --- a/sqlalchemy_exasol/pyodbc.py +++ /dev/null @@ -1,303 +0,0 @@ -""" -Connect string:: - - exa+pyodbc://:@ - exa+pyodbc://:@:/? - -""" - -import logging -import re -import sys -from warnings import warn - -from packaging import version -from sqlalchemy import sql -from sqlalchemy.connectors.pyodbc import PyODBCConnector -from sqlalchemy.engine import reflection -from sqlalchemy.util.langhelpers import asbool - -from sqlalchemy_exasol.base import ( - EXADialect, - EXAExecutionContext, -) -from sqlalchemy_exasol.warnings import SqlaExasolDeprecationWarning - -logger = logging.getLogger("sqlalchemy_exasol") - - -class EXADialect_pyodbc(EXADialect, PyODBCConnector): - supports_statement_cache = False - execution_ctx_cls = EXAExecutionContext - driver_version = None - - def __init__(self, **kw): - message = ( - "'pyodbc' support in 'sqlalchemy_exasol' is deprecated and will be removed. " - "Please switch to the websocket driver. See documentation for details." - ) - warn(message, SqlaExasolDeprecationWarning) - super().__init__(**kw) - - def get_driver_version(self, connection): - # LooseVersion will also work with interim versions like '4.2.7dev1' or '5.0.rc4' - if self.driver_version is None: - self.driver_version = version.parse( - connection.connection.getinfo(self.dbapi.SQL_DRIVER_VER) or "2.0.0" - ) - return self.driver_version - - if sys.platform == "darwin": - - def connect(self, *cargs, **cparams): - # Get connection - conn = super().connect(*cargs, **cparams) - - # Set up encodings - conn.setdecoding(self.dbapi.SQL_CHAR, encoding="utf-8") - conn.setdecoding(self.dbapi.SQL_WCHAR, encoding="utf-8") - conn.setdecoding(self.dbapi.SQL_WMETADATA, encoding="utf-8") - - conn.setencoding(encoding="utf-8") - - # Return connection - return conn - - def create_connect_args(self, url): - """ - Connection strings are EXASolution specific. See EXASolution manual on Connection-String-Parameters. - - `ODBC Driver Settings `_ - """ - opts = url.translate_connect_args(username="user") - opts.update(url.query) - # always enable efficient conversion to Python types: see https://www.exasol.com/support/browse/EXASOL-898 - opts["INTTYPESINRESULTSIFPOSSIBLE"] = "y" - # Make sure the exasol odbc driver reports the expected error codes. - # see also: - # * https://docs.exasol.com/db/latest/connect_exasol/drivers/odbc/using_odbc.htm - # * https://github.com/exasol/sqlalchemy-exasol/issues/118 - opts["SQLSTATEMAPPINGACTIVE"] = "y" - opts["SQLSTATEMAPPINGS"] = "42X91:23000,27002:23000" - - keys = opts - query = url.query - - connect_args = {} - for param in ("ansi", "unicode_results", "autocommit"): - if param in keys: - connect_args[param.upper()] = asbool(keys.pop(param)) - - dsn_connection = "dsn" in keys or ("host" in keys and "port" not in keys) - if dsn_connection: - connectors = ["DSN=%s" % (keys.pop("dsn", "") or keys.pop("host", ""))] - else: - connectors = ["DRIVER={%s}" % keys.pop("driver", None)] - - port = "" - if "port" in keys and not "port" in query: - port = ":%d" % int(keys.pop("port")) - - connectors.extend( - [ - "EXAHOST={}{}".format(keys.pop("host", ""), port), - "EXASCHEMA=%s" % keys.pop("database", ""), - ] - ) - - user = keys.pop("user", None) - if user: - connectors.append("UID=%s" % user) - connectors.append("PWD=%s" % keys.pop("password", "")) - else: - connectors.append("Trusted_Connection=Yes") - - # if set to 'Yes', the ODBC layer will try to automagically - # convert textual data from your database encoding to your - # client encoding. This should obviously be set to 'No' if - # you query a cp1253 encoded database from a latin1 client... - if "odbc_autotranslate" in keys: - connectors.append("AutoTranslate=%s" % keys.pop("odbc_autotranslate")) - - connectors.extend([f"{k}={v}" for k, v in sorted(keys.items())]) - return [[";".join(connectors)], connect_args] - - def is_disconnect(self, e, connection, cursor): - if isinstance(e, self.dbapi.Error): - error_codes = { - "40004", # Connection lost. - "40009", # Connection lost after internal server error. - "40018", # Connection lost after system running out of memory. - "40020", # Connection lost after system running out of memory. - } - exasol_error_codes = { - "HY000": ( # Generic Exasol error code - re.compile(r"operation timed out", re.IGNORECASE), - re.compile(r"connection lost", re.IGNORECASE), - re.compile(r"Socket closed by peer", re.IGNORECASE), - ) - } - - error_code, error_msg = e.args[:2] - - # import pdb; pdb.set_trace() - if error_code in exasol_error_codes: - # Check exasol error - for msg_re in exasol_error_codes[error_code]: - if msg_re.search(error_msg): - return True - - return False - - # Check Pyodbc error - return error_code in error_codes - - return super().is_disconnect(e, connection, cursor) - - @staticmethod - def _is_sql_fallback_requested(**kwargs): - is_fallback_requested = kwargs.get("use_sql_fallback", False) - if is_fallback_requested: - logger.warning("Using sql fallback instead of odbc functions") - return is_fallback_requested - - @staticmethod - def _dbapi_connection(connection): - return connection.connection.driver_connection - - @reflection.cache - def _tables_for_schema(self, connection, schema, table_type=None, table_name=None): - schema = self._get_schema_for_input_or_current(connection, schema) - table_name = self.denormalize_name(table_name) - conn = self._dbapi_connection(connection) - with conn.cursor().tables( - schema=schema, tableType=table_type, table=table_name - ) as table_cursor: - return [row for row in table_cursor] - - @reflection.cache - def get_view_definition(self, connection, view_name, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super().get_view_definition(connection, view_name, schema, **kw) - if view_name is None: - return None - - tables = self._tables_for_schema( - connection, schema, table_type="VIEW", table_name=view_name - ) - if len(tables) != 1: - return None - - quoted_view_name_string = self.quote_string_value(tables[0][2]) - quoted_view_schema_string = self.quote_string_value(tables[0][1]) - sql_statement = ( - "/*snapshot execution*/ SELECT view_text " - f"FROM sys.exa_all_views WHERE view_name = {quoted_view_name_string} " - f"AND view_schema = {quoted_view_schema_string}" - ) - result = connection.execute(sql.text(sql_statement)).scalar() - return result if result else None - - @reflection.cache - def get_table_names(self, connection, schema, **kw): - if self._is_sql_fallback_requested(**kw): - return super().get_table_names(connection, schema, **kw) - tables = self._tables_for_schema(connection, schema, table_type="TABLE") - return [self.normalize_name(row.table_name) for row in tables] - - @reflection.cache - def get_view_names(self, connection, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super().get_view_names(connection, schema, **kw) - tables = self._tables_for_schema(connection, schema, table_type="VIEW") - return [self.normalize_name(row.table_name) for row in tables] - - def has_table(self, connection, table_name, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super().has_table(connection, table_name, schema, **kw) - tables = self.get_table_names( - connection=connection, schema=schema, table_name=table_name, **kw - ) - return self.normalize_name(table_name) in tables - - def _get_schema_names_query(self, connection, **kw): - if self._is_sql_fallback_requested(**kw): - return super()._get_schema_names_query(connection, **kw) - return "/*snapshot execution*/ " + super()._get_schema_names_query( - connection, **kw - ) - - @reflection.cache - def _get_columns(self, connection, table_name, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super()._get_columns(connection, table_name, schema, **kw) - - tables = self._tables_for_schema( - connection, schema=schema, table_name=table_name - ) - if len(tables) != 1: - return [] - - # get_columns_sql originally returned all columns of all tables if table_name is None, - # we follow this behavior here for compatibility. However, the documentation for Dialects - # does not mention this behavior: - # https://docs.sqlalchemy.org/en/13/core/internals.html#sqlalchemy.engine.interfaces.Dialect - quoted_schema_string = self.quote_string_value(tables[0].table_schem) - quoted_table_string = self.quote_string_value(tables[0].table_name) - sql_statement = "/*snapshot execution*/ {query}".format( - query=self.get_column_sql_query_str() - ) - sql_statement = sql_statement.format( - schema=quoted_schema_string, table=quoted_table_string - ) - response = connection.execute(sql.text(sql_statement)) - - return list(response) - - @reflection.cache - def _get_pk_constraint(self, connection, table_name, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super()._get_pk_constraint(connection, table_name, schema, **kw) - - conn = self._dbapi_connection(connection) - schema = self._get_schema_for_input_or_current(connection, schema) - table_name = self.denormalize_name(table_name) - with conn.cursor().primaryKeys(table=table_name, schema=schema) as cursor: - pkeys = [] - constraint_name = None - for row in cursor: - table, primary_key, constraint = row[2], row[3], row[5] - if table != table_name and table_name is not None: - continue - pkeys.append(self.normalize_name(primary_key)) - constraint_name = self.normalize_name(constraint) - return {"constrained_columns": pkeys, "name": constraint_name} - - @reflection.cache - def _get_foreign_keys(self, connection, table_name, schema=None, **kw): - if self._is_sql_fallback_requested(**kw): - return super()._get_foreign_keys(connection, table_name, schema, **kw) - - # Need to use a workaround, because SQLForeignKeys functions doesn't work for an unknown reason - tables = self._tables_for_schema( - connection=connection, - schema=schema, - table_name=table_name, - table_type="TABLE", - ) - if len(tables) == 0: - return [] - - quoted_schema_string = self.quote_string_value(tables[0].table_schem) - quoted_table_string = self.quote_string_value(tables[0].table_name) - sql_statement = "/*snapshot execution*/ {query}".format( - query=self._get_constraint_sql_str( - quoted_schema_string, quoted_table_string, "FOREIGN KEY" - ) - ) - response = connection.execute(sql.text(sql_statement)) - - return list(response) - - -dialect = EXADialect_pyodbc diff --git a/sqlalchemy_exasol/requirements.py b/sqlalchemy_exasol/requirements.py index ac00ba2f..b81ca35b 100644 --- a/sqlalchemy_exasol/requirements.py +++ b/sqlalchemy_exasol/requirements.py @@ -155,13 +155,6 @@ def bound_limit_offset(self): """ return exclusions.closed() - @property - def duplicate_key_raises_integrity_error(self): - return exclusions.only_on( - [lambda config: config.db.dialect.driver == "pyodbc"], - reason="Currently this is only supported by pyodbc based dialects", - ) - @property def independent_connections(self): return exclusions.open() diff --git a/sqlalchemy_exasol/turbodbc.py b/sqlalchemy_exasol/turbodbc.py deleted file mode 100644 index 5e402b3f..00000000 --- a/sqlalchemy_exasol/turbodbc.py +++ /dev/null @@ -1,151 +0,0 @@ -import decimal -from warnings import warn - -from sqlalchemy import types as sqltypes -from sqlalchemy import util - -from sqlalchemy_exasol.base import EXADialect -from sqlalchemy_exasol.warnings import SqlaExasolDeprecationWarning - -DEFAULT_CONNECTION_PARAMS = { - # always enable efficient conversion to Python types: - # see https://www.exasol.com/support/browse/EXASOL-898 - "inttypesinresultsifpossible": "y", -} - -DEFAULT_TURBODBC_PARAMS = {"read_buffer_size": 50} - -TURBODBC_TRANSLATED_PARAMS = { - "read_buffer_size", - "parameter_sets_to_buffer", - "use_async_io", - "varchar_max_character_limit", - "prefer_unicode", - "large_decimals_as_64_bit_types", - "limit_varchar_results_to_max", - "autocommit", -} - - -class _ExaDecimal(sqltypes.DECIMAL): - def bind_processor(self, dialect): - return super().bind_processor(dialect) - - def result_processor(self, dialect, coltype): - if self.asdecimal: - fstring = "%%.%df" % self._effective_decimal_return_scale - - def to_decimal(value): - if value is None: - return None - elif isinstance(value, decimal.Decimal): - return value - elif isinstance(value, float): - return decimal.Decimal(fstring % value) - else: - return decimal.Decimal(value) - - return to_decimal - else: - return None - - -class _ExaInteger(sqltypes.INTEGER): - def bind_processor(self, dialect): - return super().bind_processor(dialect) - - def result_processor(self, dialect, coltype): - def to_integer(value): - # cast if turbodbc returns a VARCHAR - if coltype == 30: - return int(value) - else: - return value - - return to_integer - - -class EXADialect_turbodbc(EXADialect): - driver = "turbodbc" - driver_version = None - supports_statement_cache = False - supports_native_decimal = False - supports_sane_multi_rowcount = False - - colspecs = {sqltypes.Numeric: _ExaDecimal, sqltypes.Integer: _ExaInteger} - - def __init__(self, **kw): - message = ( - "'turbodbc' support in 'sqlalchemy_exasol' is deprecated and will be removed. " - "Please switch to the websocket driver. See documentation for details." - ) - warn(message, SqlaExasolDeprecationWarning) - super().__init__(**kw) - - @classmethod - def dbapi(cls): - return __import__("turbodbc") - - def create_connect_args(self, url): - options = self._get_options_with_defaults(url) - self._translate_none(options) - self._interpret_destination(options) - - return [[options.pop("dsn", None)], options] - - @staticmethod - def _get_options_with_defaults(url): - user_options = url.translate_connect_args( - username="uid", password="pwd", database="exaschema", host="destination" - ) - user_options.update(url.query) - - options = { - key.lower(): value for (key, value) in DEFAULT_CONNECTION_PARAMS.items() - } - options.update( - {key.lower(): value for (key, value) in DEFAULT_TURBODBC_PARAMS.items()} - ) - for key in user_options.keys(): - options[key.lower()] = user_options[key] - - real_turbodbc = __import__("turbodbc") - turbodbc_options = {} - for param in TURBODBC_TRANSLATED_PARAMS: - if param in options: - raw = options.pop(param) - if param in { - "use_async_io", - "prefer_unicode", - "large_decimals_as_64_bit_types", - "limit_varchar_results_to_max", - "autocommit", - }: - value = util.asbool(raw) - elif param == "read_buffer_size": - value = real_turbodbc.Megabytes(util.asint(raw)) - else: - value = util.asint(raw) - turbodbc_options[param] = value - - options["turbodbc_options"] = real_turbodbc.make_options(**turbodbc_options) - - return options - - @staticmethod - def _interpret_destination(options): - if ("port" in options) or ("database" in options): - options["exahost"] = "{}:{}".format( - options.pop("destination"), options.pop("port") - ) - else: - options["dsn"] = options.pop("destination") - - @staticmethod - def _translate_none(options): - for key in options: - if options[key] == "None": - options[key] = None - - -dialect = EXADialect_turbodbc diff --git a/sqlalchemy_exasol/types.py b/sqlalchemy_exasol/types.py new file mode 100644 index 00000000..29cef60b --- /dev/null +++ b/sqlalchemy_exasol/types.py @@ -0,0 +1,30 @@ +import decimal + +from sqlalchemy.sql import sqltypes + + +class ExaDecimal(sqltypes.DECIMAL): + def bind_processor(self, dialect): + return super().bind_processor(dialect) + + def to_decimal(self, value): + fstring = "%%.%df" % self._effective_decimal_return_scale + + if value is None: + return None + if isinstance(value, decimal.Decimal): + return value + if isinstance(value, float): + return decimal.Decimal(fstring % value) + return decimal.Decimal(value) + + @staticmethod + def handle_not_as_decimal(value): + if value is None: + return None + return float(value) + + def result_processor(self, dialect, coltype): + if self.asdecimal: + return self.to_decimal + return self.handle_not_as_decimal diff --git a/sqlalchemy_exasol/util.py b/sqlalchemy_exasol/util.py index a52423c6..5b18f005 100644 --- a/sqlalchemy_exasol/util.py +++ b/sqlalchemy_exasol/util.py @@ -1,7 +1,7 @@ import datetime from sqlalchemy_exasol import base -from sqlalchemy_exasol.pyodbc import EXADialect_pyodbc +from sqlalchemy_exasol.websocket import EXADialect_websocket def raw_sql(query): @@ -14,7 +14,7 @@ def raw_sql(query): :returns: A string of raw SQL :rtype: string """ - dialect = EXADialect_pyodbc() + dialect = EXADialect_websocket() class LiteralCompiler(base.EXACompiler): def visit_bindparam( @@ -30,20 +30,19 @@ def visit_bindparam( def render_literal_value(self, value, type_): if value is None: return "NULL" - elif isinstance(value, bytes): + if isinstance(value, bytes): return "'{value}'".format(value=value.decode("utf-8")) - elif isinstance(value, str): + if isinstance(value, str): return f"'{value}'" - elif type(value) is datetime.date: + if type(value) is datetime.date: return "to_date('{value}', 'YYYY-MM-DD')".format( value=value.strftime("%Y-%m-%d") ) - elif type(value) is datetime.datetime: + if type(value) is datetime.datetime: return "to_timestamp('{value}', 'YYYY-MM-DD HH24:MI:SS.FF6')".format( value=value.strftime("%Y-%m-%d %H:%M:%S.%f") ) - else: - return f"{value}" + return f"{value}" compiler = LiteralCompiler(dialect, query) return compiler.string diff --git a/sqlalchemy_exasol/websocket.py b/sqlalchemy_exasol/websocket.py index 325b3172..b4bf6490 100644 --- a/sqlalchemy_exasol/websocket.py +++ b/sqlalchemy_exasol/websocket.py @@ -1,5 +1,4 @@ import datetime -import decimal import time from collections import ( ChainMap, @@ -11,6 +10,7 @@ from sqlalchemy.sql import sqltypes from sqlalchemy_exasol.base import EXADialect +from sqlalchemy_exasol.types import ExaDecimal from sqlalchemy_exasol.version import VERSION @@ -24,29 +24,6 @@ def to_integer(value): return to_integer -class Decimal(sqltypes.DECIMAL): - def bind_processor(self, dialect): - return super().bind_processor(dialect) - - def result_processor(self, dialect, coltype): - if not self.asdecimal: - return lambda value: None if value is None else float(value) - - fstring = "%%.%df" % self._effective_decimal_return_scale - - def to_decimal(value): - if value is None: - return None - elif isinstance(value, decimal.Decimal): - return value - elif isinstance(value, float): - return decimal.Decimal(fstring % value) - else: - return decimal.Decimal(value) - - return to_decimal - - class Date(sqltypes.DATE): def bind_processor(self, dialect): return super().bind_processor(dialect) @@ -90,13 +67,13 @@ class EXADialect_websocket(EXADialect): supports_statement_cache = False colspecs = { sqltypes.Integer: Integer, - sqltypes.Numeric: Decimal, + sqltypes.Numeric: ExaDecimal, sqltypes.Date: Date, sqltypes.DateTime: DateTime, } @classmethod - def dbapi(cls): + def import_dbapi(cls): return __import__( "exasol.driver.websocket.dbapi2", fromlist="exasol.driver.websocket" ) diff --git a/test/integration/exasol/conftest.py b/test/integration/exasol/conftest.py index 5f368df2..3427f2d9 100644 --- a/test/integration/exasol/conftest.py +++ b/test/integration/exasol/conftest.py @@ -5,8 +5,6 @@ import pytest from sqlalchemy.dialects import registry -registry.register("exa.pyodbc", "sqlalchemy_exasol.pyodbc", "EXADialect_pyodbc") -registry.register("exa.turbodbc", "sqlalchemy_exasol.turbodbc", "EXADialect_turbodbc") registry.register( "exa.websocket", "sqlalchemy_exasol.websocket", "EXADialect_websocket" ) diff --git a/test/integration/exasol/test_certificate.py b/test/integration/exasol/test_certificate.py index b1b57f8d..9159e181 100644 --- a/test/integration/exasol/test_certificate.py +++ b/test/integration/exasol/test_certificate.py @@ -9,14 +9,8 @@ sql, testing, ) -from sqlalchemy.testing.fixtures import ( - TestBase, - config, -) - -FINGERPRINT_SECURITY_RATIONALE = ( - "Only websocket supports giving a fingerprint in the connection" -) +from sqlalchemy.testing import config +from sqlalchemy.testing.fixtures import TestBase def get_fingerprint(dsn): @@ -62,9 +56,10 @@ def remove_ssl_settings(url): pass return url.set(query=query) - @pytest.mark.skipif( + @pytest.mark.xfail( testing.db.dialect.server_version_info < (7, 1, 0), reason="DB version(s) before 7.1.0 don't enforce ssl/tls", + strict=True, ) def test_db_connection_fails_with_default_settings_for_self_signed_certificates( self, @@ -81,10 +76,6 @@ def test_db_connection_fails_with_default_settings_for_self_signed_certificates( expected_substrings = ["self-signed certificate", "self signed certificate"] assert any([e in actual_message for e in expected_substrings]) - @pytest.mark.skipif( - "websocket" not in testing.db.dialect.driver, - reason="Only websocket supports passing on connect_args like this.", - ) def test_db_skip_certification_validation_passes(self): url = self.remove_ssl_settings(config.db.url) @@ -101,10 +92,6 @@ def test_db_with_ssl_verify_none_passes(self): result = self.perform_test_query(engine) assert result == [(42,)] - @pytest.mark.skipif( - "websocket" not in testing.db.dialect.driver, - reason=FINGERPRINT_SECURITY_RATIONALE, - ) def test_db_with_fingerprint_passes(self): url = self.remove_ssl_settings(config.db.url) connect_args = url.translate_connect_args(database="schema") @@ -118,10 +105,6 @@ def test_db_with_fingerprint_passes(self): result = self.perform_test_query(engine) assert result == [(42,)] - @pytest.mark.skipif( - "websocket" not in testing.db.dialect.driver, - reason=FINGERPRINT_SECURITY_RATIONALE, - ) def test_db_with_wrong_fingerprint_fails(self): url = self.remove_ssl_settings(config.db.url) connect_args = url.translate_connect_args(database="schema") diff --git a/test/integration/exasol/test_deadlock.py b/test/integration/exasol/test_deadlock.py index 609b5498..674feaf0 100644 --- a/test/integration/exasol/test_deadlock.py +++ b/test/integration/exasol/test_deadlock.py @@ -2,7 +2,6 @@ from threading import Thread import pytest -import sqlalchemy.testing as testing from sqlalchemy import ( create_engine, inspect, @@ -19,9 +18,8 @@ # TODO: get_schema_names, get_view_names and get_view_definition didn't cause deadlocks in this scenario -@pytest.mark.skipif( - "pyodbc" not in str(testing.db.url), - reason="We currently only support snapshot metadata requests in the pyodbc based dialect", +@pytest.mark.xfail( + reason="We do not currently support snapshot metadata", ) class MetadataTest(fixtures.TablesTest): __backend__ = True @@ -31,116 +29,48 @@ class MetadataTest(fixtures.TablesTest): def create_transaction(self, url, con_name): engine = create_engine( - config.db.url, echo=self.CONNECTION_ECHO, logging_name="engine" + con_name + config.db.url, + echo=self.CONNECTION_ECHO, + logging_name="engine" + con_name, ) session = engine.connect().execution_options(autocommit=False) return engine, session - def test_no_deadlock_for_get_table_names_without_fallback(self): - def without_fallback(session2, schema, table): + def test_no_deadlock_for_get_table_names(self): + def get_table_names(session2, schema, table): dialect = inspect(session2).dialect - dialect.get_table_names(session2, schema=schema, use_sql_fallback=False) + dialect.get_table_names(session2, schema=schema) - self.run_deadlock_for_table(without_fallback) + self.run_deadlock_for_table(get_table_names) - # NOTE: If a DB >= 7.1.0 still deadlocks here, it may due to the usage of an old ODBC driver version - @pytest.mark.skipif( - testing.db.dialect.server_version_info >= (7, 1, 0), - reason="DB version(s) after 7.1.0 should not deadlock here", - ) - def test_deadlock_for_get_table_names_with_fallback(self): - def with_fallback(session2, schema, table): + def test_no_deadlock_for_get_columns(self): + def get_columns(session2, schema, table): dialect = inspect(session2).dialect - dialect.get_table_names(session2, schema=schema, use_sql_fallback=True) + dialect.get_columns(session2, schema=schema, table_name=table) - with pytest.raises(Exception): - self.run_deadlock_for_table(with_fallback) + self.run_deadlock_for_table(get_columns) - @pytest.mark.skipif( - testing.db.dialect.server_version_info <= (7, 1, 0), - reason="DB version(s) before 7.1.0 are expected to deadlock here", - ) - def test_no_deadlock_for_get_table_names_with_fallback(self): - def with_fallback(session2, schema, table): + def test_no_deadlock_for_get_pk_constraint(self): + def get_pk_constraint(session2, schema, table): dialect = inspect(session2).dialect - dialect.get_table_names(session2, schema=schema, use_sql_fallback=True) + dialect.get_pk_constraint(session2, table_name=table, schema=schema) - self.run_deadlock_for_table(with_fallback) + self.run_deadlock_for_table(get_pk_constraint) - def test_no_deadlock_for_get_columns_without_fallback(self): - def without_fallback(session2, schema, table): + def test_no_deadlock_for_get_foreign_keys(self): + def get_foreign_keys(session2, schema, table): dialect = inspect(session2).dialect - dialect.get_columns( - session2, schema=schema, table_name=table, use_sql_fallback=False - ) - - self.run_deadlock_for_table(without_fallback) - - def test_no_deadlock_for_get_columns_with_fallback(self): - # TODO: Doesnt produce a deadlock anymore since last commit? - def with_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_columns( - session2, schema=schema, table_name=table, use_sql_fallback=True - ) - - self.run_deadlock_for_table(with_fallback) - - def test_no_deadlock_for_get_pk_constraint_without_fallback(self): - def without_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_pk_constraint( - session2, table_name=table, schema=schema, use_sql_fallback=False - ) + dialect.get_foreign_keys(session2, table_name=table, schema=schema) - self.run_deadlock_for_table(without_fallback) + self.run_deadlock_for_table(get_foreign_keys) - def test_no_deadlock_for_get_pk_constraint_with_fallback(self): - def with_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_pk_constraint( - session2, table_name=table, schema=schema, use_sql_fallback=True - ) - - self.run_deadlock_for_table(with_fallback) - - def test_no_deadlock_for_get_foreign_keys_without_fallback(self): - def without_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_foreign_keys( - session2, table_name=table, schema=schema, use_sql_fallback=False - ) - - self.run_deadlock_for_table(without_fallback) - - def test_no_deadlock_for_get_foreign_keys_with_fallback(self): - def with_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_foreign_keys( - session2, table_name=table, schema=schema, use_sql_fallback=True - ) - - self.run_deadlock_for_table(with_fallback) - - def test_no_deadlock_for_get_view_names_without_fallback(self): + def test_no_deadlock_for_get_view_names(self): # TODO: think of other scenarios where metadata deadlocks with view could happen - def without_fallback(session2, schema, table): + def get_view_names(session2, schema, table): dialect = inspect(session2).dialect - dialect.get_view_names( - session2, table_name=table, schema=schema, use_sql_fallback=False - ) - - self.run_deadlock_for_table(without_fallback) - - def test_no_deadlock_for_get_view_names_with_fallback(self): - # TODO: think of other scenarios where metadata deadlocks with view could happen - def with_fallback(session2, schema, table): - dialect = inspect(session2).dialect - dialect.get_view_names( - session2, table_name=table, schema=schema, use_sql_fallback=True - ) + dialect.get_view_names(session2, table_name=table, schema=schema) - self.run_deadlock_for_table(with_fallback) + self.run_deadlock_for_table(get_view_names) def watchdog(self, session0, schema): while self.watchdog_run: @@ -188,7 +118,7 @@ def run_deadlock_for_table(self, function): ) session0.execute(sql.text(f"INSERT INTO {schema}.deadlock_test1 VALUES 1")) session0.execute(sql.text(f"INSERT INTO {schema}.deadlock_test2 VALUES (1,1)")) - session0.execute(sql.text("commit")) + session0.commit() self.watchdog_run = True t1 = Thread(target=self.watchdog, args=(session0, schema)) t1.start() @@ -204,14 +134,14 @@ def run_deadlock_for_table(self, function): session3.execute( sql.text(f"DELETE FROM {schema}.deadlock_test2 WHERE false") ) - session3.execute(sql.text("commit")) + session3.commit() engine2, session2 = self.create_transaction(url, "transaction2") session2.execute(sql.text("SELECT 1")) function(session2, schema, "deadlock_test2") - session2.execute(sql.text("commit")) - session1.execute(sql.text("commit")) + session2.commit() + session1.commit() except Exception as e: self.watchdog_run = False t1.join() @@ -238,7 +168,7 @@ def run_deadlock_for_get_view_names(self, function): f"CREATE OR REPLACE VIEW {schema}.deadlock_test_view_1 AS SELECT * FROM {schema}.deadlock_test_table" ) ) - session0.execute(sql.text("commit")) + session0.commit() self.watchdog_run = True t1 = Thread(target=self.watchdog, args=(session0, schema)) t1.start() @@ -256,14 +186,14 @@ def run_deadlock_for_get_view_names(self, function): engine3, session3 = self.create_transaction(url, "transaction3") session3.execute(sql.text("SELECT 1")) session3.execute(sql.text(f"DROP VIEW {schema}.deadlock_test_view_1")) - session3.execute(sql.text("commit")) + session3.commit() engine2, session2 = self.create_transaction(url, "transaction2") session2.execute(sql.text("SELECT 1")) function(session2, schema) - session2.execute(sql.text("commit")) - session1.execute(sql.text("commit")) + session2.commit() + session1.commit() except Exception as e: self.watchdog_run = False t1.join() diff --git a/test/integration/exasol/test_exadialect_pyodbc.py b/test/integration/exasol/test_exadialect_pyodbc.py deleted file mode 100644 index dadeed47..00000000 --- a/test/integration/exasol/test_exadialect_pyodbc.py +++ /dev/null @@ -1,133 +0,0 @@ -from unittest.mock import Mock - -import pyodbc -import pytest -from sqlalchemy import testing -from sqlalchemy.engine import url as sa_url -from sqlalchemy.pool import _ConnectionFairy -from sqlalchemy.testing import fixtures - -from sqlalchemy_exasol.pyodbc import EXADialect_pyodbc - - -@pytest.mark.skipif( - "pyodbc" not in testing.db.dialect.driver, - reason="This tests are only relevant for the dialect Exasol pyodbc dialect", -) -class EXADialect_pyodbcTest(fixtures.TestBase): - def setup_method(self): - self.dialect = EXADialect_pyodbc() - self.dialect.dbapi = pyodbc - - def assert_parsed(self, dsn, expected_connector, expected_args): - url = sa_url.make_url(dsn) - connector, args = self.dialect.create_connect_args(url) - assert connector == expected_connector - assert args == expected_args - - def test_create_connect_args(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?driver=EXAODBC", - [ - "DRIVER={EXAODBC};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {}, - ) - - def test_create_connect_args_with_driver(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?driver=FOOBAR", - [ - "DRIVER={FOOBAR};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {}, - ) - - def test_create_connect_args_dsn(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@exa_test", - [ - "DSN=exa_test;EXAHOST=;EXASCHEMA=;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {}, - ) - - def test_create_connect_args_trusted(self): - self.assert_parsed( - "exa+pyodbc://192.168.1.2..8:1234/my_schema", - [ - "DRIVER={None};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;Trusted_Connection=Yes;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {}, - ) - - def test_create_connect_args_autotranslate(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?odbc_autotranslate=Yes", - [ - "DRIVER={None};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;AutoTranslate=Yes;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {}, - ) - - def test_create_connect_args_with_param(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?autocommit=true", - [ - "DRIVER={None};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {"AUTOCOMMIT": True}, - ) - - def test_create_connect_args_with_param_multiple(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?autocommit=true&ansi=false&unicode_results=false", - [ - "DRIVER={None};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ], - {"AUTOCOMMIT": True, "ANSI": False, "UNICODE_RESULTS": False}, - ) - - def test_create_connect_args_with_unknown_params(self): - self.assert_parsed( - "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema?clientname=test&querytimeout=10", - [ - "DRIVER={None};EXAHOST=192.168.1.2..8:1234;EXASCHEMA=my_schema;UID=scott;PWD=tiger;INTTYPESINRESULTSIFPOSSIBLE=y" - ";SQLSTATEMAPPINGACTIVE=y" - ";SQLSTATEMAPPINGS=42X91:23000,27002:23000" - ";clientname=test;querytimeout=10" - ], - {}, - ) - - def test_is_disconnect(self): - connection = Mock(spec=_ConnectionFairy) - cursor = Mock(spec=pyodbc.Cursor) - - errors = [ - pyodbc.Error( - "HY000", - "[HY000] [EXASOL][EXASolution driver]Connection lost in socket read attempt. Operation timed out (-1) (SQLExecDirectW)", - ), - pyodbc.Error( - "HY000", "[HY000] [EXASOL][EXASolution driver]Socket closed by peer." - ), - ] - - for error in errors: - status = self.dialect.is_disconnect(error, connection, cursor) - assert status diff --git a/test/integration/exasol/test_exadialect_turbodbc.py b/test/integration/exasol/test_exadialect_turbodbc.py deleted file mode 100644 index 4aa89bc9..00000000 --- a/test/integration/exasol/test_exadialect_turbodbc.py +++ /dev/null @@ -1,146 +0,0 @@ -import pytest -from sqlalchemy import testing -from sqlalchemy.engine import url as sa_url -from sqlalchemy.testing import ( - eq_, - fixtures, -) - -from sqlalchemy_exasol.turbodbc import ( - DEFAULT_CONNECTION_PARAMS, - EXADialect_turbodbc, -) - - -@pytest.mark.skipif( - "turbodbc" not in testing.db.dialect.driver, - reason="This tests are only relevant for the dialect Exasol turbodbc dialect", -) -class EXADialectTurbodbcTest(fixtures.TestBase): - dialect = None - default_host_args = {"exahost": "192.168.1.2..8:1234", "exaschema": "my_schema"} - default_user_args = { - "uid": "scott", - "pwd": "tiger", - } - - @staticmethod - def _assert_connect_args(result, expected, expected_turbodbc): - eq_({k: v for k, v in result.items() if k != "turbodbc_options"}, expected) - assert result[ - "turbodbc_options" - ].read_buffer_size.megabytes == expected_turbodbc.get("read_buffer_size", 50) - assert result[ - "turbodbc_options" - ].parameter_sets_to_buffer == expected_turbodbc.get( - "parameter_sets_to_buffer", 1000 - ) - assert result["turbodbc_options"].use_async_io == expected_turbodbc.get( - "use_async_io", False - ) - - def setup_method(self): - self.dialect = EXADialect_turbodbc() - - def _get_connection_arguments(self, dsn): - url = sa_url.make_url(dsn) - _, args = self.dialect.create_connect_args(url) - return args - - def assert_parsed( - self, dsn, expected_connector, expected_args, expected_turbodbc_args - ): - url = sa_url.make_url(dsn) - connector, args = self.dialect.create_connect_args(url) - eq_(connector, expected_connector) - self._assert_connect_args(args, expected_args, expected_turbodbc_args) - - def test_create_connect_args(self): - expected = self.default_host_args.copy() - expected.update(self.default_user_args) - expected.update(DEFAULT_CONNECTION_PARAMS) - self.assert_parsed( - "exa+turbodbc://scott:tiger@192.168.1.2..8:1234/my_schema", - [None], - expected, - {}, - ) - - def test_create_connect_args_with_driver(self): - expected = self.default_host_args.copy() - expected.update(self.default_user_args) - expected.update(DEFAULT_CONNECTION_PARAMS) - expected["driver"] = "EXASolo" - self.assert_parsed( - "exa+turbodbc://scott:tiger@192.168.1.2..8:1234/my_schema?driver=EXASolo", - [None], - expected, - {}, - ) - - def test_create_connect_args_dsn_without_user(self): - self.assert_parsed( - "exa+turbodbc://exa_test", ["exa_test"], DEFAULT_CONNECTION_PARAMS, {} - ) - - def test_create_connect_args_dsn(self): - expected = self.default_user_args.copy() - expected.update(DEFAULT_CONNECTION_PARAMS) - self.assert_parsed( - "exa+pyodbc://scott:tiger@exa_test", ["exa_test"], expected, {} - ) - - def test_create_connect_args_with_turbodbc_args(self): - expected = self.default_host_args.copy() - expected.update(self.default_user_args) - expected.update(DEFAULT_CONNECTION_PARAMS) - self.assert_parsed( - "exa+turbodbc://scott:tiger@192.168.1.2..8:1234/my_schema?" - "read_buffer_size=4200¶meter_sets_to_buffer=111&use_async_io=True" - "&varchar_max_character_limit=99&prefer_unicode=True" - "&large_decimals_as_64_bit_types=True" - "&limit_varchar_results_to_max=True" - "&autocommit=True", - [None], - expected, - { - "read_buffer_size": 4200, - "parameter_sets_to_buffer": 111, - "use_async_io": True, - "varchar_max_character_limit": 99, - "prefer_unicode": True, - "large_decimals_as_64_bit_types": True, - "limit_varchar_results_to_max": True, - "autocommit": True, - }, - ) - - def test_create_connect_args_trusted(self): - expected = self.default_host_args.copy() - expected.update(DEFAULT_CONNECTION_PARAMS) - self.assert_parsed( - "exa+pyodbc://192.168.1.2..8:1234/my_schema", [None], expected, {} - ) - - def test_create_connect_args_with_custom_parameter(self): - base_url = "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema" - assert "custom" not in self._get_connection_arguments(base_url) - assert ( - self._get_connection_arguments(base_url + "?CUSTOM=something")["custom"] - == "something" - ) - - def test_create_connect_args_with_parameter_set_to_none(self): - base_url = "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema" - assert ( - self._get_connection_arguments(base_url + "?CUSTOM=None")["custom"] is None - ) - - def test_create_connect_args_overrides_default(self): - base_url = "exa+pyodbc://scott:tiger@192.168.1.2..8:1234/my_schema" - - before = self._get_connection_arguments(base_url)["inttypesinresultsifpossible"] - after = self._get_connection_arguments( - base_url + "?inttypesinresultsifpossible=custom" - )["inttypesinresultsifpossible"] - assert before != after diff --git a/test/integration/exasol/test_exasol.py b/test/integration/exasol/test_exasol.py index 7a440a55..c4a161f6 100644 --- a/test/integration/exasol/test_exasol.py +++ b/test/integration/exasol/test_exasol.py @@ -135,8 +135,6 @@ def test_alter_table_distribute_by(self): class UtilTest(fixtures.TablesTest): __backend__ = True - __engine_options__ = {"implicit_returning": False} - @classmethod def define_tables(cls, metadata): Table( diff --git a/test/integration/exasol/test_get_metadata_functions.py b/test/integration/exasol/test_get_metadata_functions.py index 67149ab7..ba394a1b 100644 --- a/test/integration/exasol/test_get_metadata_functions.py +++ b/test/integration/exasol/test_get_metadata_functions.py @@ -5,6 +5,7 @@ sql, ) from sqlalchemy.engine.url import URL +from sqlalchemy.exc import NoSuchTableError from sqlalchemy.sql.ddl import ( CreateSchema, DropSchema, @@ -108,244 +109,124 @@ def create_engine_with_database_name(cls, connection, new_database_name): engine = create_engine(url) return engine - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_schema_names(self, engine_name, use_sql_fallback): + def test_get_schema_names(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect - schema_names = dialect.get_schema_names( - connection=c, use_sql_fallback=use_sql_fallback - ) + schema_names = dialect.get_schema_names(connection=c) assert self.schema in schema_names and self.schema_2 in schema_names @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_compare_get_schema_names_for_sql_and_odbc(self, engine_name): - with self.engine_map[engine_name].begin() as c: - dialect = inspect(c).dialect - schema_names_fallback = dialect.get_schema_names( - connection=c, use_sql_fallback=True - ) - schema_names_odbc = dialect.get_schema_names(connection=c) - assert sorted(schema_names_fallback) == sorted(schema_names_odbc) - - @pytest.mark.parametrize("use_sql_fallback", [True, False]) - @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], - ) - def test_get_table_names(self, use_sql_fallback, engine_name): + def test_get_table_names(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect - table_names = dialect.get_table_names( - connection=c, schema=self.schema, use_sql_fallback=use_sql_fallback - ) - assert "t" in table_names and "s" in table_names - - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) - @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], - ) - def test_compare_get_table_names_for_sql_and_odbc(self, schema, engine_name): - with self.engine_map[engine_name].begin() as c: - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) - dialect = inspect(c).dialect - table_names_fallback = dialect.get_table_names( - connection=c, schema=schema, use_sql_fallback=True - ) - table_names_odbc = dialect.get_table_names(connection=c, schema=schema) - assert table_names_fallback == table_names_odbc + table_names = dialect.get_table_names(connection=c, schema=self.schema) + assert table_names == ["s", "t"] - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_has_table_table_exists(self, use_sql_fallback, engine_name): + def test_has_table_table_exists(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect has_table = dialect.has_table( connection=c, schema=self.schema, table_name="t", - use_sql_fallback=use_sql_fallback, ) - assert has_table, "Table %s.T was not found, but should exist" % self.schema + assert has_table, f"Table {self.schema}.T was not found, but should exist" - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_has_table_table_exists_not(self, use_sql_fallback, engine_name): + def test_has_table_table_exists_not(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect has_table = dialect.has_table( connection=c, schema=self.schema, table_name="not_exist", - use_sql_fallback=use_sql_fallback, - ) - assert not has_table, ( - "Table %s.not_exist was found, but should not exist" % self.schema - ) - - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) - @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], - ) - def test_compare_has_table_for_sql_and_odbc(self, schema, engine_name): - with self.engine_map[engine_name].begin() as c: - dialect = inspect(c).dialect - has_table_fallback = dialect.has_table( - connection=c, schema=schema, use_sql_fallback=True, table_name="t" - ) - has_table_odbc = dialect.has_table( - connection=c, schema=schema, table_name="t" - ) - assert has_table_fallback == has_table_odbc, ( - "Expected table %s.t with odbc and fallback" % schema ) + assert ( + not has_table + ), f"Table {self.schema}.not_exist was found, but should not exist" - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_view_names(self, use_sql_fallback, engine_name): + def test_get_view_names(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect - view_names = dialect.get_view_names( - connection=c, schema=self.schema, use_sql_fallback=use_sql_fallback - ) - assert "v" in view_names + view_names = dialect.get_view_names(connection=c, schema=self.schema) + assert view_names == ["v"] - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_view_names_for_sys(self, use_sql_fallback, engine_name): + def test_get_view_names_for_sys(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect - view_names = dialect.get_view_names( - connection=c, schema="sys", use_sql_fallback=use_sql_fallback - ) - assert len(view_names) == 0 + view_names = dialect.get_view_names(connection=c, schema="sys") + assert view_names == [] - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_view_definition(self, use_sql_fallback, engine_name): + def test_get_view_definition(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect view_definition = dialect.get_view_definition( connection=c, schema=self.schema, view_name="v", - use_sql_fallback=use_sql_fallback, - ) - assert self.view_defintion == view_definition - - @pytest.mark.parametrize("use_sql_fallback", [True, False]) - @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], - ) - def test_get_view_definition_view_name_none(self, use_sql_fallback, engine_name): - with self.engine_map[engine_name].begin() as c: - dialect = inspect(c).dialect - view_definition = dialect.get_view_definition( - connection=c, - schema=self.schema, - view_name=None, - use_sql_fallback=use_sql_fallback, - ) - assert view_definition is None - - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) - @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], - ) - def test_compare_get_view_names_for_sql_and_odbc(self, schema, engine_name): - with self.engine_map[engine_name].begin() as c: - dialect = inspect(c).dialect - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) - view_names_fallback = dialect.get_view_names( - connection=c, schema=schema, use_sql_fallback=True ) - view_names_odbc = dialect.get_view_names(connection=c, schema=schema) - assert view_names_fallback == view_names_odbc + assert view_definition == self.view_defintion - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_compare_get_view_definition_for_sql_and_odbc(self, schema, engine_name): + def test_get_view_definition_view_name_none(self, engine_name): with self.engine_map[engine_name].begin() as c: - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) - view_name = "v" dialect = inspect(c).dialect - view_definition_fallback = dialect.get_view_definition( - connection=c, view_name=view_name, schema=schema, use_sql_fallback=True - ) - view_definition_odbc = dialect.get_view_definition( - connection=c, view_name=view_name, schema=schema - ) - assert view_definition_fallback == view_definition_odbc + with pytest.raises(NoSuchTableError): + dialect.get_view_definition( + connection=c, + schema=self.schema, + view_name=None, + ) - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) - @pytest.mark.parametrize("table", ["t", "s", "unknown"]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_compare_get_columns_for_sql_and_odbc(self, schema, table, engine_name): - with self.engine_map[engine_name].begin() as c: - dialect = inspect(c).dialect - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) - columns_fallback = dialect.get_columns( - connection=c, table_name=table, schema=schema, use_sql_fallback=True - ) - columns_odbc = dialect.get_columns( - connection=c, table_name=table, schema=schema - ) - assert str(columns_fallback) == str( - columns_odbc - ) # object equality doesn't work for sqltypes - - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) @pytest.mark.parametrize( - "engine_name", - [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], + "schema,table", + [ + pytest.param( + TEST_GET_METADATA_FUNCTIONS_SCHEMA, "unknown", id="not existing table" + ), + pytest.param("NOT_A_SCHEMA", "s", id="not existing schema"), + ], ) - def test_compare_get_columns_none_table_for_sql_and_odbc(self, schema, engine_name): + def test_get_columns_raises_exception_for_no_table( + self, schema, table, engine_name + ): with self.engine_map[engine_name].begin() as c: - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) dialect = inspect(c).dialect - table = None - columns_fallback = dialect.get_columns( - connection=c, table_name=table, schema=schema, use_sql_fallback=True - ) - columns_odbc = dialect.get_columns( - connection=c, table_name=table, schema=schema - ) - assert str(columns_odbc) == str(columns_fallback) + with pytest.raises(NoSuchTableError): + dialect.get_columns(connection=c, table_name=table, schema=schema) def make_columns_comparable( self, column_list @@ -355,19 +236,17 @@ def make_columns_comparable( key=lambda k: k["name"], ) - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_columns(self, use_sql_fallback, engine_name): + def test_get_columns(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect columns = dialect.get_columns( connection=c, schema=self.schema, table_name="t", - use_sql_fallback=use_sql_fallback, ) expected = [ { @@ -404,112 +283,104 @@ def test_get_columns(self, use_sql_fallback, engine_name): expected ) == self.make_columns_comparable(columns) - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_columns_table_name_none(self, use_sql_fallback, engine_name): + def test_get_columns_table_name_none(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect columns = dialect.get_columns( connection=c, schema=self.schema, table_name=None, - use_sql_fallback=use_sql_fallback, ) assert columns == [] - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) - @pytest.mark.parametrize("table", ["t", "s"]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_compare_get_pk_constraint_for_sql_and_odbc( + @pytest.mark.parametrize( + "schema,table", + [ + pytest.param( + TEST_GET_METADATA_FUNCTIONS_SCHEMA, "unknown", id="not existing table" + ), + pytest.param("NOT_A_SCHEMA", "s", id="not existing schema"), + ], + ) + def test_get_pk_constraint_raises_exception_for_no_table( self, schema, table, engine_name ): with self.engine_map[engine_name].begin() as c: - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema)) dialect = inspect(c).dialect - pk_constraint_fallback = dialect.get_pk_constraint( - connection=c, table_name=table, schema=schema, use_sql_fallback=True - ) - pk_constraint_odbc = dialect.get_pk_constraint( - connection=c, table_name=table, schema=schema - ) - assert str(pk_constraint_fallback) == str(pk_constraint_odbc) + with pytest.raises(NoSuchTableError): + dialect.get_pk_constraint(connection=c, table_name=table, schema=schema) - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_pk_constraint(self, use_sql_fallback, engine_name): + def test_get_pk_constraint(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect pk_constraint = dialect.get_pk_constraint( connection=c, schema=self.schema, table_name="t", - use_sql_fallback=use_sql_fallback, ) assert pk_constraint["constrained_columns"] == [ "pid1", "pid2", ] and pk_constraint["name"].startswith("sys_") - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_pk_constraint_table_name_none(self, use_sql_fallback, engine_name): + def test_get_pk_constraint_table_name_none(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect pk_constraint = dialect.get_pk_constraint( connection=c, schema=self.schema, table_name=None, - use_sql_fallback=use_sql_fallback, ) assert pk_constraint is None - @pytest.mark.parametrize("table", ["t", "s"]) - @pytest.mark.parametrize("schema", [TEST_GET_METADATA_FUNCTIONS_SCHEMA, None]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_compare_get_foreign_keys_for_sql_and_odbc( + @pytest.mark.parametrize( + "schema,table", + [ + pytest.param( + TEST_GET_METADATA_FUNCTIONS_SCHEMA, "unknown", id="not existing table" + ), + pytest.param("NOT_A_SCHEMA", "s", id="not existing schema"), + ], + ) + def test_get_foreign_keys_raises_exception_for_no_table( self, schema, table, engine_name ): with self.engine_map[engine_name].begin() as c: - if schema is None: - c.execute(sql.text("OPEN SCHEMA %s" % self.schema_2)) dialect = inspect(c).dialect - foreign_keys_fallback = dialect.get_foreign_keys( - connection=c, table_name=table, schema=schema, use_sql_fallback=True - ) - foreign_keys_odbc = dialect.get_foreign_keys( - connection=c, table_name=table, schema=schema - ) - assert str(foreign_keys_fallback) == str(foreign_keys_odbc) + with pytest.raises(NoSuchTableError): + dialect.get_foreign_keys(connection=c, table_name=table, schema=schema) - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_foreign_keys(self, use_sql_fallback, engine_name): + def test_get_foreign_keys(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect foreign_keys = dialect.get_foreign_keys( connection=c, schema=self.schema, table_name="s", - use_sql_fallback=use_sql_fallback, ) expected = [ { @@ -523,18 +394,16 @@ def test_get_foreign_keys(self, use_sql_fallback, engine_name): assert foreign_keys == expected - @pytest.mark.parametrize("use_sql_fallback", [True, False]) @pytest.mark.parametrize( "engine_name", [ENGINE_NONE_DATABASE, ENGINE_SCHEMA_DATABASE, ENGINE_SCHEMA_2_DATABASE], ) - def test_get_foreign_keys_table_name_none(self, use_sql_fallback, engine_name): + def test_get_foreign_keys_table_name_none(self, engine_name): with self.engine_map[engine_name].begin() as c: dialect = inspect(c).dialect foreign_keys = dialect.get_foreign_keys( connection=c, schema=self.schema, table_name=None, - use_sql_fallback=use_sql_fallback, ) assert foreign_keys == [] diff --git a/test/integration/exasol/test_regression.py b/test/integration/exasol/test_regression.py index 31f3b451..c1a9790c 100644 --- a/test/integration/exasol/test_regression.py +++ b/test/integration/exasol/test_regression.py @@ -22,8 +22,10 @@ CreateSchema, DropSchema, ) -from sqlalchemy.testing import fixtures -from sqlalchemy.testing.fixtures import config +from sqlalchemy.testing import ( + config, + fixtures, +) class TranslateMap(fixtures.TestBase): diff --git a/test/integration/exasol/test_update.py b/test/integration/exasol/test_update.py index 930a64a2..3508c062 100644 --- a/test/integration/exasol/test_update.py +++ b/test/integration/exasol/test_update.py @@ -1,10 +1,9 @@ -import pytest from sqlalchemy import * from sqlalchemy.testing import ( + config, eq_, fixtures, ) -from sqlalchemy.testing.fixtures import config from sqlalchemy.testing.schema import ( Column, Table, @@ -81,9 +80,6 @@ def fixtures(cls): } -@pytest.mark.skipif( - config.db.dialect.driver == "turbodbc", reason="not supported by turbodbc" -) class UpdateTest(_UpdateTestBase, fixtures.TablesTest): __backend__ = True @@ -129,12 +125,7 @@ def test_update_executemany(self): with config.db.begin() as conn: result = conn.execute(stmt, values) - # Depending on the dialect it either reports that the affected rows information - # is not available (-1) or it reports the actual number of updated/affected rows(2) - expected_rowcount_odbc = -1 - expected_rowcount_wss = 2 - expected_rowcount = [expected_rowcount_odbc, expected_rowcount_wss] - assert result.rowcount in expected_rowcount + assert result.rowcount == 2 self._assert_users( users, [(7, "jack2"), (8, "ed"), (9, "fred2"), (10, "chuck")] diff --git a/test/integration/regression/test_regression_bug335.py b/test/integration/regression/test_regression_bug335.py index c6ac53bb..38af670e 100644 --- a/test/integration/regression/test_regression_bug335.py +++ b/test/integration/regression/test_regression_bug335.py @@ -14,19 +14,14 @@ DropSchema, ) -from exasol.odbc import ( - ODBC_DRIVER, - odbcconfig, -) - @pytest.fixture -def pyodbc_connection_string(exasol_config): +def connection_string(exasol_config): config = exasol_config return ( - f"exa+pyodbc://{config.username}:{config.password}@{config.host}:{config.port}/" + f"exa+websocket://{config.username}:{config.password}@{config.host}:{config.port}/" f"?DEFAULTPARAMSIZE=200&INTTYPESINRESULTSIFPOSSIBLE=y" - "&FINGERPRINT=NOCERTCHECK&CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC" + "&FINGERPRINT=NOCERTCHECK&CONNECTIONLCALL=en_US.UTF-8&AUTOCOMMIT=0" ) @@ -55,14 +50,14 @@ def users_table(pyexasol_connection, test_schema): def test_lastrowid_does_not_create_extra_commit( - exasol_config, users_table, pyodbc_connection_string + exasol_config, users_table, connection_string ): """ For further details on this regression see `Issue-335 `_. """ schema_name, table_name = users_table metadata = MetaData() - engine = create_engine(pyodbc_connection_string) + engine = create_engine(connection_string) table = Table( table_name, @@ -72,16 +67,15 @@ def test_lastrowid_does_not_create_extra_commit( schema=schema_name, ) - with odbcconfig(ODBC_DRIVER): - with engine.connect() as connection: - with connection.begin() as transaction: - # Insert without an explicit ID will trigger a call to `get_lastrowid` - # which in turn cause the unintended autocommit - insert_statement = insert(table).values(name="Gandalf") - connection.execute(insert_statement) - transaction.rollback() + with engine.connect() as connection: + with connection.begin() as transaction: + # Insert without an explicit ID will trigger a call to `get_lastrowid` + # which in turn cause the unintended autocommit + insert_statement = insert(table).values(name="Gandalf") + connection.execute(insert_statement) + transaction.rollback() - result = connection.execute( - sql.text(f"SELECT * FROM {schema_name}.{table_name};") - ).fetchall() + result = connection.execute( + sql.text(f"SELECT * FROM {schema_name}.{table_name};") + ).fetchall() assert len(result) == 0 diff --git a/test/integration/regression/test_regression_bug390.py b/test/integration/regression/test_regression_bug390.py index 256b7ba7..a4ba0a3d 100644 --- a/test/integration/regression/test_regression_bug390.py +++ b/test/integration/regression/test_regression_bug390.py @@ -10,9 +10,9 @@ def test_connection_with_block_cleans_up_properly(pytester, exasol_config): pytester.makepyfile( # fmt: off cleandoc( - f""" + f""" from sqlalchemy import create_engine, sql - + def test(): url = "exa+websocket://{{user}}:{{pw}}@{{host}}:{{port}}?SSLCertificate=SSL_VERIFY_NONE" url = url.format( @@ -33,8 +33,8 @@ def test(): actual = str(r.stderr) # We can't assert here actual != "", because runpytest_subprocess prints warnings - # that can't be caught for Python 3.13 independent of the backend (pyodbc, turboodbc, websockets) - # since we moved from pytest-itde plugin to the pytest-backend plugin. + # that can't be caught for Python 3.13 since we moved from pytest-itde plugin to + # the pytest-backend plugin. # The warnings look like the following: # :784: ResourceWarning: unclosed database in # diff --git a/test/integration/sqlalchemy/conftest.py b/test/integration/sqlalchemy/conftest.py index c72c8282..43526d4d 100644 --- a/test/integration/sqlalchemy/conftest.py +++ b/test/integration/sqlalchemy/conftest.py @@ -5,8 +5,6 @@ import pytest from sqlalchemy.dialects import registry -registry.register("exa.pyodbc", "sqlalchemy_exasol.pyodbc", "EXADialect_pyodbc") -registry.register("exa.turbodbc", "sqlalchemy_exasol.turbodbc", "EXADialect_turbodbc") registry.register( "exa.websocket", "sqlalchemy_exasol.websocket", "EXADialect_websocket" ) diff --git a/test/integration/sqlalchemy/test_suite.py b/test/integration/sqlalchemy/test_suite.py index a927ba41..310d7185 100644 --- a/test/integration/sqlalchemy/test_suite.py +++ b/test/integration/sqlalchemy/test_suite.py @@ -1,30 +1,92 @@ # import all SQLAlchemy tests for this dialect +from enum import Enum from inspect import cleandoc import pytest -from sqlalchemy import ( - create_engine, - testing, +import sqlalchemy as sa +from pyexasol import ExaQueryError +from sqlalchemy import Inspector +from sqlalchemy.schema import ( + DDL, + Index, ) -from sqlalchemy.schema import DDL +from sqlalchemy.sql import sqltypes from sqlalchemy.testing.suite import ComponentReflectionTest as _ComponentReflectionTest from sqlalchemy.testing.suite import CompoundSelectTest as _CompoundSelectTest from sqlalchemy.testing.suite import DifficultParametersTest as _DifficultParametersTest from sqlalchemy.testing.suite import ExceptionTest as _ExceptionTest -from sqlalchemy.testing.suite import ExpandingBoundInTest as _ExpandingBoundInTest from sqlalchemy.testing.suite import HasIndexTest as _HasIndexTest from sqlalchemy.testing.suite import HasTableTest as _HasTableTest from sqlalchemy.testing.suite import InsertBehaviorTest as _InsertBehaviorTest +from sqlalchemy.testing.suite import LongNameBlowoutTest as _LongNameBlowoutTest from sqlalchemy.testing.suite import NumericTest as _NumericTest from sqlalchemy.testing.suite import QuotedNameArgumentTest as _QuotedNameArgumentTest +from sqlalchemy.testing.suite import ReturningGuardsTest as _ReturningGuardsTest from sqlalchemy.testing.suite import RowCountTest as _RowCountTest from sqlalchemy.testing.suite import RowFetchTest as _RowFetchTest +from sqlalchemy.testing.suite.test_reflection import _multi_combination + +""" +Here, all tests are imported from the testing suite of sqlalchemy to ensure that the +Exasol dialect passes these expected tests. If a tests fails, it is investigated and, +if the underlying issue(s) cannot be resolved, override them with a rationale & xfail for +the test or that test condition. +""" from sqlalchemy.testing.suite import * # noqa: F403, F401 -from sqlalchemy.testing.suite.test_ddl import ( - LongNameBlowoutTest as _LongNameBlowoutTest, +from sqlalchemy.testing.suite import testing + +# Tests marked with xfail and this reason are failing after updating to SQLAlchemy 2.x. +# We will investigate and fix as many as possible in next PRs. +BREAKING_CHANGES_SQL_ALCHEMY_2x = ( + "Failing test after updating to SQLAlchemy 2.x. To be investigated." ) +class XfailRationale(str, Enum): + MANUAL_INDEX = cleandoc( + """sqlalchemy-exasol does not support manual indexes. + (see https://docs.exasol.com/db/latest/performance/indexes.htm#Manualindexoperations) + Manual indexes are not recommended within the Exasol DB. + """ + ) + QUOTING = cleandoc( + """This suite was added to SQLAlchemy 1.3.19 on July 2020 to address + issues in other dialects related to object names that contain quotes + and double quotes. Since this feature is not relevant to the + Exasol dialect, the entire suite is set to xfail. For further info, see: + https://github.com/sqlalchemy/sqlalchemy/issues/5456""" + ) + SELECT_LIST = cleandoc( + """Exasol does not allow EXISTS or IN predicates as part of the select list.""" + ) + + +class ReturningGuardsTest(_ReturningGuardsTest): + """ + Exasol does not support the RETURNING clause. This is already the assumption + per the DefaultDialect. + + The single tests of class sqlalchemy.testing.suite.ReturningGuardsTest are + overridden, as they are written to send the request to the DB and receive an + error from the DB itself. For the websocket driver (based on PyExasol), the + exception raised is an ExaQueryError and not a DBAPIError. + """ + + @staticmethod + def _run_test(test_method, connection, run_stmt): + with pytest.raises(ExaQueryError): + test_method(connection, run_stmt) + + def test_delete_single(self, connection, run_stmt): + self._run_test(super().test_delete_single, connection, run_stmt) + + def test_insert_single(self, connection, run_stmt): + self._run_test(super().test_insert_single, connection, run_stmt) + + def test_update_single(self, connection, run_stmt): + self._run_test(super().test_update_single, connection, run_stmt) + + class RowFetchTest(_RowFetchTest): RATIONAL = cleandoc( """ @@ -36,7 +98,7 @@ class RowFetchTest(_RowFetchTest): ) @testing.config.requirements.duplicate_names_in_cursor_description - @pytest.mark.skipif("websocket" in testing.db.dialect.driver, reason=RATIONAL) + @pytest.mark.xfail(reason=RATIONAL, strict=True) def test_row_with_dupe_names(self, connection): super().test_row_with_dupe_names(connection) @@ -44,12 +106,21 @@ def test_row_with_dupe_names(self, connection): class HasTableTest(_HasTableTest): @classmethod def define_views(cls, metadata): + """ + Default implementation of define_views in + class sqlalchemy.testing.suite.HasTableTest + needs to be overridden here as Exasol treats "data" as a reserved word & + requires quoting. Changes to the original implementation are marked with + 'Note:'. + """ + # Note: column name "data" needs to be quoted as "data" is a reserved word query = 'CREATE VIEW vv AS SELECT id, "data" FROM test_table' event.listen(metadata, "after_create", DDL(query)) event.listen(metadata, "before_drop", DDL("DROP VIEW vv")) if testing.requires.schemas.enabled: + # Note: column name "data" needs to be quoted as "data" is a reserved word query = ( 'CREATE VIEW {}.vv AS SELECT id, "data" FROM {}.test_table_s'.format( config.test_schema, @@ -63,44 +134,13 @@ def define_views(cls, metadata): DDL("DROP VIEW %s.vv" % (config.test_schema)), ) - RATIONALE = cleandoc( - """ - The Exasol dialect does not check against views for `has_table`, see also `Inspector.has_table()`. - - This behaviour is subject to change with sqlalchemy 2.0. - See also: - * https://github.com/sqlalchemy/sqlalchemy/blob/3fc6c40ea77c971d3067dab0fdf57a5b5313b69b/lib/sqlalchemy/engine/reflection.py#L415 - * https://github.com/sqlalchemy/sqlalchemy/discussions/8678 - * https://github.com/sqlalchemy/sqlalchemy/commit/f710836488162518dcf2dc1006d90ecd77a2a178 - """ - ) - - @pytest.mark.xfail(reason=RATIONALE, strict=True) - @testing.requires.views - def test_has_table_view(self, connection): - super().test_has_table_view(connection) - - @pytest.mark.xfail(reason=RATIONALE, strict=True) - @testing.requires.views - @testing.requires.schemas - def test_has_table_view_schema(self, connection): - super().test_has_table_view_schema(connection) + @pytest.mark.xfail(reason=BREAKING_CHANGES_SQL_ALCHEMY_2x, strict=True) + def test_has_table_cache(self, connection): + super().test_has_table_cache(connection) class InsertBehaviorTest(_InsertBehaviorTest): @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, - reason=cleandoc( - """ - This test is failing for turbodbc and haven't been investigated yet. - Attention: - * turbodbc maintenance is paused until if it is clear if there is still demand for it - """ - ), - strict=True, - ) - @pytest.mark.xfail( - "websocket" in testing.db.dialect.driver, reason="This currently isn't supported by the websocket protocol L3-1064.", strict=True, ) @@ -110,135 +150,321 @@ def test_empty_insert_multiple(self, connection): class RowCountTest(_RowCountTest): - PYODBC_RATIONALE = cleandoc( - """ - pyodbc does not support returning the actual affected rows when executemany is used, - the cursor result always will be set to the rowcount = -1 in this case. - This also is a valid behaviour according to the python DBAPI specification. - For more details see also: - * https://peps.python.org/pep-0249/ - * https://peps.python.org/pep-0249/#rowcount - * https://peps.python.org/pep-0249/#id21 - * https://peps.python.org/pep-0249/#executemany - """ - ) + @pytest.mark.xfail(reason=BREAKING_CHANGES_SQL_ALCHEMY_2x, strict=True) + def test_non_rowcount_scenarios_no_raise(self): + # says cursor already closed so very likely need to fix! + super().test_non_rowcount_scenarios_no_raise() + - TURBODBC_RATIONALE = cleandoc( +class ComponentReflectionTest(_ComponentReflectionTest): + @classmethod + def define_reflected_tables(cls, metadata, schema): """ - The currently used turbodbc driver returns invalid results. - Attention: - * turbodbc maintenance is paused until if it is clear if there is still demand for it - * If this tests will succeed in the future consider repining the turbodbc driver - dependency in order to provide support for this "features". + Default implementation of define_reflected_tables in + class sqlalchemy.testing.suite.ComponentReflectionTest + needs to be overridden here as Exasol does not support column constraints + and manages indexes on its own. The code given in this overriding class + method was directly copied. See notes, marked with 'Commented out', highlighting + the changed places. """ - ) + if schema: + schema_prefix = schema + "." + else: + schema_prefix = "" + + if testing.requires.self_referential_foreign_keys.enabled: + parent_id_args = ( + ForeignKey("%susers.user_id" % schema_prefix, name="user_id_fk"), + ) + else: + parent_id_args = () + users = Table( + "users", + metadata, + Column("user_id", sa.INT, primary_key=True), + Column("test1", sa.CHAR(5), nullable=False), + Column("test2", sa.Float(), nullable=False), + Column("parent_user_id", sa.Integer, *parent_id_args), + # Commented out, as Exasol does not support column constraints + # sa.CheckConstraint( + # "test2 > 0", + # name="zz_test2_gt_zero", + # comment="users check constraint", + # ), + # sa.CheckConstraint("test2 <= 1000"), + schema=schema, + test_needs_fk=True, + ) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - @pytest.mark.skipif("pyodbc" in testing.db.dialect.driver, reason=PYODBC_RATIONALE) - @testing.requires.sane_multi_rowcount - def test_multi_update_rowcount(self, connection): - super().test_multi_update_rowcount(connection) + Table( + "dingalings", + metadata, + Column("dingaling_id", sa.Integer, primary_key=True), + Column( + "address_id", + sa.Integer, + ForeignKey( + "%semail_addresses.address_id" % schema_prefix, + name="zz_email_add_id_fg", + comment="di fk comment", + ), + ), + Column( + "id_user", + sa.Integer, + ForeignKey("%susers.user_id" % schema_prefix), + ), + # Commented out, as Exasol does not support unique constraints beyond primary keys + Column("data", sa.String(30)), # , unique=True), + # Commented out, as Exasol does not support column constraints + # sa.CheckConstraint( + # "address_id > 0 AND address_id < 1000", + # name="address_id_gt_zero", + # ), + # sa.UniqueConstraint( + # "address_id", + # "dingaling_id", + # name="zz_dingalings_multiple", + # comment="di unique comment", + # ), + schema=schema, + test_needs_fk=True, + ) + Table( + "email_addresses", + metadata, + Column("address_id", sa.Integer), + Column("remote_user_id", sa.Integer, ForeignKey(users.c.user_id)), + # Commented out, as Exasol manages indices internally + Column("email_address", sa.String(20)), # , index=True), + sa.PrimaryKeyConstraint( + "address_id", name="email_ad_pk", comment="ea pk comment" + ), + schema=schema, + test_needs_fk=True, + ) + Table( + "comment_test", + metadata, + Column("id", sa.Integer, primary_key=True, comment="id comment"), + Column("data", sa.String(20), comment="data % comment"), + Column( + "d2", + sa.String(20), + comment=r"""Comment types type speedily ' " \ '' Fun!""", + ), + Column("d3", sa.String(42), comment="Comment\nwith\rescapes"), + schema=schema, + comment=r"""the test % ' " \ table comment""", + ) + Table( + "no_constraints", + metadata, + Column("data", sa.String(20)), + schema=schema, + comment="no\nconstraints\rhas\fescaped\vcomment", + ) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - @pytest.mark.skipif("pyodbc" in testing.db.dialect.driver, reason=PYODBC_RATIONALE) - @testing.requires.sane_multi_rowcount - def test_multi_delete_rowcount(self, connection): - super().test_multi_delete_rowcount(connection) + if testing.requires.cross_schema_fk_reflection.enabled: + if schema is None: + Table( + "local_table", + metadata, + Column("id", sa.Integer, primary_key=True), + Column("data", sa.String(20)), + Column( + "remote_id", + ForeignKey("%s.remote_table_2.id" % testing.config.test_schema), + ), + test_needs_fk=True, + schema=config.db.dialect.default_schema_name, + ) + else: + Table( + "remote_table", + metadata, + Column("id", sa.Integer, primary_key=True), + Column( + "local_id", + ForeignKey( + "%s.local_table.id" % config.db.dialect.default_schema_name + ), + ), + Column("data", sa.String(20)), + schema=schema, + test_needs_fk=True, + ) + Table( + "remote_table_2", + metadata, + Column("id", sa.Integer, primary_key=True), + Column("data", sa.String(20)), + schema=schema, + test_needs_fk=True, + ) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - def test_update_rowcount1(self, connection): - super().test_update_rowcount1(connection) + if testing.requires.index_reflection.enabled: + Index("users_t_idx", users.c.test1, users.c.test2, unique=True) + Index("users_all_idx", users.c.user_id, users.c.test2, users.c.test1) + + if not schema: + # test_needs_fk is at the moment to force MySQL InnoDB + noncol_idx_test_nopk = Table( + "noncol_idx_test_nopk", + metadata, + Column("q", sa.String(5)), + test_needs_fk=True, + ) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - def test_update_rowcount2(self, connection): - super().test_update_rowcount2(connection) + noncol_idx_test_pk = Table( + "noncol_idx_test_pk", + metadata, + Column("id", sa.Integer, primary_key=True), + Column("q", sa.String(5)), + test_needs_fk=True, + ) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - def test_delete_rowcount(self, connection): - super().test_delete_rowcount(connection) + if ( + testing.requires.indexes_with_ascdesc.enabled + and testing.requires.reflect_indexes_with_ascdesc.enabled + ): + Index("noncol_idx_nopk", noncol_idx_test_nopk.c.q.desc()) + Index("noncol_idx_pk", noncol_idx_test_pk.c.q.desc()) - @pytest.mark.xfail( - "turbodbc" in testing.db.dialect.driver, reason=TURBODBC_RATIONALE, strict=True - ) - @testing.requires.sane_rowcount_w_returning - def test_update_rowcount_return_defaults(self, connection): - super().test_update_rowcount_return_defaults(connection) + if testing.requires.view_column_reflection.enabled: + cls.define_views(metadata, schema) + if not schema and testing.requires.temp_table_reflection.enabled: + cls.define_temp_tables(metadata) + @staticmethod + def _convert_view_nullable(expected_multi_output): + """ + Convert expected nullable to None -class DifficultParametersTest(_DifficultParametersTest): - tough_parameters = testing.combinations( - ("boring",), - ("per cent",), - ("per % cent",), - ("%percent",), - ("par(ens)",), - ("percent%(ens)yah",), - ("col:ons",), - ("_starts_with_underscore",), - ("more :: %colons%",), - ("_name",), - ("___name",), - ("[BracketsAndCase]",), - ("42numbers",), - ("percent%signs",), - ("has spaces",), - ("/slashes/",), - ("more/slashes",), - ("1param",), - ("1col:on",), - argnames="paramname", - ) + For columns of a view in Exasol, nullable is always NULL, + so the expected result needs to be modified. For more reference, see: + https://docs.exasol.com/saas/sql_references/system_tables/metadata/exa_all_columns.htm + """ + for key, value_list in expected_multi_output.items(): + schema, table_or_view = key + if not table_or_view.endswith("_v"): + continue + for column_def in value_list: + # Replace nullable: with nullable: None + column_def["nullable"] = None + + @pytest.mark.xfail(reason=XfailRationale.MANUAL_INDEX.value, strict=True) + def test_get_indexes(self, connection, use_schema): + super().test_get_indexes() - @tough_parameters - def test_round_trip_same_named_column(self, paramname, connection, metadata): - # dot_s and qmarks are currently disabled see https://github.com/exasol/sqlalchemy-exasol/issues/232 - super().test_round_trip_same_named_column(paramname, connection, metadata) + @_multi_combination + def test_get_multi_columns(self, get_multi_exp, schema, scope, kind, use_filter): + """ + The default implementation of test_get_multi_columns in + class sqlalchemy.testing.suite.ComponentReflectionTest + needs to be overridden here, as Exasol always requires nullable to be NULL + for the columns of views. The code given in this overriding class + method was directly copied. See notes, marked with 'Added', highlighting + the changed place. + """ + insp, kws, exp = get_multi_exp( + schema, + scope, + kind, + use_filter, + Inspector.get_columns, + self.exp_columns, + ) + # Added to convert nullable for columns in views + self._convert_view_nullable(exp) -class ComponentReflectionTest(_ComponentReflectionTest): - @pytest.mark.skip(reason="EXASOL has no explicit indexes") - def test_get_indexes(self, connection, use_schema): - super().test_get_indexes() + for kw in kws: + insp.clear_cache() + result = insp.get_multi_columns(**kw) + self._check_table_dict(result, exp, self._required_column_keys) + + @_multi_combination + def test_get_multi_foreign_keys( + self, get_multi_exp, schema, scope, kind, use_filter + ): + """ + The default implementation of test_get_multi_foreign_keys in + class sqlalchemy.testing.suite.ComponentReflectionTest + needs to be overridden here as Exasol does not support custom constraints. + The code given in this overriding class method was directly copied. See notes, + marked with 'Replaced', highlighting the changed place. + """ + def sort_entries_in_place(result_set): + for key, value_list in result_set.items(): + result_set[key] = sorted(value_list, key=lambda x: x["referred_table"]) + + insp, kws, exp = get_multi_exp( + schema, + scope, + kind, + use_filter, + Inspector.get_foreign_keys, + self.exp_fks, + ) -class HasIndexTest(_HasIndexTest): - RATIONAL = """EXASOL does not support no explicit indexes""" + for kw in kws: + insp.clear_cache() + result = insp.get_multi_foreign_keys(**kw) + + # Replaced as self._adjust_sort did not work, as some constraints + # cannot be added into an Exasol DB as described in define_reflected_tables + # self._adjust_sort( + # result, exp, lambda d: tuple(d["referred_table"]) + # ) + sort_entries_in_place(exp) + sort_entries_in_place(result) + + self._check_table_dict( + result, + exp, + { + "name", + "constrained_columns", + "referred_schema", + }, + ) - @pytest.mark.skip(reason=RATIONAL) + +class HasIndexTest(_HasIndexTest): + @pytest.mark.xfail(reason=XfailRationale.MANUAL_INDEX.value, strict=True) def test_has_index(self): super().test_has_index() - @pytest.mark.skip(reason=RATIONAL) + @pytest.mark.xfail(reason=XfailRationale.MANUAL_INDEX.value, strict=True) @testing.requires.schemas def test_has_index_schema(self): super().test_has_index_schema() class LongNameBlowoutTest(_LongNameBlowoutTest): - @testing.combinations( + testing_parameters = testing.combinations( ("fk",), ("pk",), - # Manual indexes are not recommended within the Exasol DB, - # (see https://docs.exasol.com/db/latest/performance/best_practices.htm) - # therefore they are currently not supported by the sqlalchemy-exasol extension. - # ("ix",) + ("ix",), ("ck", testing.requires.check_constraint_reflection.as_skips()), ("uq", testing.requires.unique_constraint_reflection.as_skips()), argnames="type_", ) - @testing.provide_metadata - def test_long_convention_name(self, type_, connection): - metadata = self.metadata + + @testing_parameters + def test_long_convention_name(self, type_, metadata, connection): + """ + The default implementation of test_long_convention_name in + class sqlalchemy.testing.suite.LongNameBlowoutTest needs to be + overridden here as Exasol does not support manually created indices. + Beyond the first check, if `type_ == "ix"`, the rest of the code + has been copied without modification. + """ + + if type_ == "ix": + pytest.xfail(reason=XfailRationale.MANUAL_INDEX.value) actual_name, reflected_name = getattr(self, type_)(metadata, connection) @@ -253,119 +479,62 @@ def test_long_convention_name(self, type_, connection): class CompoundSelectTest(_CompoundSelectTest): - @pytest.mark.skip( - reason=cleandoc( - """Skip this test as EXASOL does not allow EXISTS or IN predicates - as part of the select list. Skipping is implemented by redefining - the method as proposed by SQLAlchemy docs for new dialects.""" - ) - ) - def test_null_in_empty_set_is_false(self): - return + @pytest.mark.xfail(reason=XfailRationale.SELECT_LIST.value, strict=True) + def test_null_in_empty_set_is_false(self, connection): + self.test_null_in_empty_set_is_false(connection) class ExceptionTest(_ExceptionTest): - RATIONALE = ( - "This is likely a driver issue. We will investigate it in " - "https://github.com/exasol/sqlalchemy-exasol/issues/539." - ) + RATIONALE = """ + The websocket-based dialect does not yet support raising an + error on a duplicate key. This was noted before as an issue + for the deprecated ODBC-based dialects in: + - https://github.com/exasol/sqlalchemy-exasol/issues/539 + - https://github.com/exasol/sqlalchemy-exasol/issues/120 + """ - @pytest.mark.xfail("odbc" in testing.db.dialect.driver, reason=RATIONALE) + @pytest.mark.xfail(reason=RATIONALE, strict=True) @requirements.duplicate_key_raises_integrity_error def test_integrity_error(self): - # Note: autocommit currently is needed to force error evaluation, - # otherwise errors will be swallowed. - # see also https://github.com/exasol/sqlalchemy-exasol/issues/120 - engine = create_engine(config.db.url, connect_args={"autocommit": True}) - with engine.connect() as conn: - trans = conn.begin() - conn.execute(self.tables.manual_pk.insert(), {"id": 1, "data": "d1"}) - - assert_raises( - exc.IntegrityError, - conn.execute, - self.tables.manual_pk.insert(), - {"id": 1, "data": "d1"}, - ) - trans.rollback() + super().test_integrity_error() + @pytest.mark.xfail(reason=RATIONALE, strict=True) @requirements.duplicate_key_raises_integrity_error def test_integrity_error_raw_sql(self): + """ + Additional test that is related that our developers added. + """ insert = text("INSERT INTO MANUAL_PK VALUES (1, 'd1')") with config.db.begin() as conn: conn.execute(insert) assert_raises(exc.IntegrityError, conn.execute, insert) -class ExpandingBoundInTest(_ExpandingBoundInTest): - @pytest.mark.skip( - reason=cleandoc( - """Skip this test as EXASOL does not allow EXISTS or IN predicates - as part of the select list. Skipping is implemented by redefining - the method as proposed by SQLAlchemy docs for new dialects.""" - ) - ) - def test_null_in_empty_set_is_false(self): - return - - -class NumericTest(_NumericTest): - @pytest.mark.skipif( - "pyodbc" in testing.db.dialect.driver, - reason=cleandoc( - """FIXME: test skipped to allow upgrading to SQLAlchemy 1.3.x due - to vulnerability in 1.2.x. Need to understand reason for this. - Hypothesis is that the data type is not correctly coerced between - EXASOL and pyodbc.""" - ), - ) - @testing.requires.implicit_decimal_binds - @testing.emits_warning(r".*does \*not\* support Decimal objects natively") - def test_decimal_coerce_round_trip(self, connection): - super().test_decimal_coerce_round_trip(connection) - - +@pytest.mark.xfail(reason=XfailRationale.QUOTING.value, strict=True) class QuotedNameArgumentTest(_QuotedNameArgumentTest): - RATIONAL = cleandoc( - """This suite was added to SQLAlchemy 1.3.19 on July 2020 to address - issues in other dialects related to object names that contain quotes - and double quotes. Since this feature is not relevant to the - Exasol dialect, the entire suite will be skipped. More info on fix: - https://github.com/sqlalchemy/sqlalchemy/issues/5456""" - ) - - @pytest.mark.skip(reason=RATIONAL) - def test_get_table_options(self, name): - return - - @pytest.mark.skip(reason=RATIONAL) - def test_get_view_definition(self, name): - return - - @pytest.mark.skip(reason=RATIONAL) - def test_get_columns(self, name): - return + pass - @pytest.mark.skip(reason=RATIONAL) - def test_get_pk_constraint(self, name): - return - @pytest.mark.skip(reason=RATIONAL) - def test_get_foreign_keys(self, name): - return - - @pytest.mark.skip(reason=RATIONAL) - def test_get_indexes(self, name): - return +class NumericTest(_NumericTest): + RATIONALE = """ + The Exasol target backend maps Numeric to Decimal. Decimal is also used for both + Float & Double. Thus, we expect this test to fail. + """ - @pytest.mark.skip(reason=RATIONAL) - def test_get_unique_constraints(self, name): - return + @pytest.mark.xfail(reason=RATIONALE, strict=True) + @testing.combinations(sqltypes.Float, sqltypes.Double, argnames="cls_") + @testing.requires.float_is_numeric + def test_float_is_not_numeric(self, connection, cls_): + super().test_float_is_not_numeric() - @pytest.mark.skip(reason=RATIONAL) - def test_get_table_comment(self, name): - return - @pytest.mark.skip(reason=RATIONAL) - def test_get_check_constraints(self, name): - return +class DifficultParametersTest(_DifficultParametersTest): + @_DifficultParametersTest.tough_parameters + @config.requirements.unusual_column_name_characters + def test_round_trip_same_named_column(self, paramname, connection, metadata): + if testing.db.dialect.server_version_info <= (7, 1, 30): + # This does not work for Exasol DB versions <= 7.1.30. + # See: https://github.com/exasol/sqlalchemy-exasol/issues/232 + if paramname == "dot.s": + pytest.xfail(reason="dot.s does not work for <= 7.1.30") + super().test_round_trip_same_named_column(paramname, connection, metadata) diff --git a/test/unit/exasol/test_dbapi.py b/test/unit/exasol/test_dbapi.py index a4fb0b69..40b42dc9 100644 --- a/test/unit/exasol/test_dbapi.py +++ b/test/unit/exasol/test_dbapi.py @@ -6,7 +6,6 @@ import importlib import pytest - from exasol.driver.websocket._connection import _requires_connection from exasol.driver.websocket._cursor import ( MetaData, diff --git a/test/unit/exasol/test_deprectation_warnings.py b/test/unit/exasol/test_deprectation_warnings.py deleted file mode 100644 index f5b287b0..00000000 --- a/test/unit/exasol/test_deprectation_warnings.py +++ /dev/null @@ -1,12 +0,0 @@ -import pytest - -from sqlalchemy_exasol.pyodbc import EXADialect_pyodbc -from sqlalchemy_exasol.turbodbc import EXADialect_turbodbc - - -@pytest.mark.parametrize( - "klass,kwargs", [(EXADialect_pyodbc, {}), (EXADialect_turbodbc, {})] -) -def test_deprectation_warnings(klass, kwargs): - with pytest.deprecated_call(): - _ = EXADialect_pyodbc(**kwargs) diff --git a/test/unit/exasol/test_websocket.py b/test/unit/exasol/test_websocket.py index 1c1d2f8d..3f41bfa5 100644 --- a/test/unit/exasol/test_websocket.py +++ b/test/unit/exasol/test_websocket.py @@ -40,7 +40,7 @@ def set_kwargs( pytest.param( make_url( "exa+websocket://sys:exasol@localhost:8888/TEST?" - "CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC" + "CONNECTIONCALL=en_US.UTF-8" "&SSLCertificate=SSL_VERIFY_NONE" ), set_kwargs( @@ -54,7 +54,7 @@ def set_kwargs( pytest.param( make_url( "exa+websocket://sys:exasol@localhost:8888/TEST?" - "CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC" + "CONNECTIONCALL=en_US.UTF-8" "&SSLCertificate=SSL_VERIFY_NONE" "&ENCRYPTION=N" ), @@ -94,7 +94,7 @@ def test_create_connection_args(url, expected_kwargs): def test_raises_an_exception_for_invalid_arguments(): url = make_url( "exa+websocket://sys:exasol@localhost:8888/TEST?" - "CONNECTIONCALL=en_US.UTF-8&DRIVER=EXAODBC" + "CONNECTIONCALL=en_US.UTF-8" "&ENCRYPTION=N" )