Skip to content
Merged
11 changes: 9 additions & 2 deletions ci/test_linux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# - This is the script that test_docker.sh runs inside of the docker container

PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12 3.13 3.14}"
# Python versions where pyarrow (required by pandas extra) is not available
PANDAS_SKIP_VERSIONS="3.14"
THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SQLALCHEMY_DIR="$(dirname "${THIS_DIR}")"

Expand All @@ -19,7 +21,12 @@ for PYTHON_VERSION in ${PYTHON_VERSIONS}; do
echo "[Info] Testing with ${PYTHON_VERSION}"
SHORT_VERSION=$(python3 -c "print('${PYTHON_VERSION}'.replace('.', ''))")
SQLALCHEMY_WHL=$(ls $SQLALCHEMY_DIR/dist/snowflake_sqlalchemy-*-py3-none-any.whl | sort -r | head -n 1)
TEST_ENVLIST=fix_lint,py${SHORT_VERSION}-ci,py${SHORT_VERSION}-coverage,py${SHORT_VERSION}-pandas-ci,py${SHORT_VERSION}-pandas-coverage
TEST_ENVLIST=fix_lint,py${SHORT_VERSION}-ci
if [[ ! " ${PANDAS_SKIP_VERSIONS} " =~ " ${PYTHON_VERSION} " ]]; then
TEST_ENVLIST="${TEST_ENVLIST},py${SHORT_VERSION}-pandas-ci"
else
echo "[Info] Skipping pandas tests for Python ${PYTHON_VERSION} (pyarrow not available)"
fi
echo "[Info] Running tox for ${TEST_ENVLIST}"
python3 -m tox -e ${TEST_ENVLIST} --installpkg ${SQLALCHEMY_WHL}
python3 -m tox -p auto -e ${TEST_ENVLIST} --installpkg ${SQLALCHEMY_WHL}
done
8 changes: 6 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ development = [
"pytest-xdist",
"pytz",
"numpy",
"mock",
"syrupy",
]
pandas = ["snowflake-connector-python[pandas]"]
Expand Down Expand Up @@ -85,7 +84,11 @@ installer = "uv"
[tool.hatch.envs.sa14]
installer = "uv"
builder = true
extra-dependencies = ["SQLAlchemy>=1.4.19,<2.0.0", "pandas>=2.1.1,<2.2", "numpy<2"]
extra-dependencies = [
"SQLAlchemy>=1.4.19,<2.0.0",
"pandas>=2.1.1,<2.2",
"numpy<2",
]
features = ["development", "pandas"]
python = "3.12"

Expand Down Expand Up @@ -142,4 +145,5 @@ markers = [
"external: tests that could but should only run on our external CI",
"feature_max_lob_size: tests that could but should only run on our external CI",
"feature_v20: tests that could but should only run on SqlAlchemy v20",
"mypy: typing tests",
]
11 changes: 11 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,17 @@ def on_public_ci():
return running_on_public_ci()


@pytest.fixture()
def default_warehouse(db_parameters, engine_testaccount):
wh = db_parameters.get("warehouse")
if wh is None:
with engine_testaccount.connect() as conn:
wh = conn.exec_driver_sql("SELECT CURRENT_WAREHOUSE()").scalar()
if wh is None:
pytest.fail("No warehouse configured for the current user/session")
return wh


@pytest.fixture(scope="function")
def base_location(external_stage, engine_testaccount):
unique_id = str(uuid.uuid4())
Expand Down
15 changes: 6 additions & 9 deletions tests/custom_tables/test_create_dynamic_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
)


def test_create_dynamic_table(engine_testaccount, db_parameters):
warehouse = db_parameters.get("warehouse", "default")
def test_create_dynamic_table(engine_testaccount, default_warehouse):
metadata = MetaData()
test_table_1 = Table(
"test_table_1", metadata, Column("id", Integer), Column("name", String)
Expand All @@ -38,7 +37,7 @@ def test_create_dynamic_table(engine_testaccount, db_parameters):
Column("id", Integer),
Column("name", String),
target_lag=(1, TimeUnit.HOURS),
warehouse=warehouse,
warehouse=default_warehouse,
as_query="SELECT id, name from test_table_1;",
refresh_mode=SnowflakeKeyword.FULL,
)
Expand All @@ -58,9 +57,8 @@ def test_create_dynamic_table(engine_testaccount, db_parameters):


def test_create_dynamic_table_without_dynamictable_class(
engine_testaccount, db_parameters, snapshot
engine_testaccount, default_warehouse, snapshot
):
warehouse = db_parameters.get("warehouse", "default")
metadata = MetaData()
test_table_1 = Table(
"test_table_1", metadata, Column("id", Integer), Column("name", String)
Expand All @@ -79,7 +77,7 @@ def test_create_dynamic_table_without_dynamictable_class(
metadata,
Column("id", Integer),
Column("name", String),
snowflake_warehouse=warehouse,
snowflake_warehouse=default_warehouse,
snowflake_as_query="SELECT id, name from test_table_1;",
prefixes=["DYNAMIC"],
)
Expand All @@ -90,9 +88,8 @@ def test_create_dynamic_table_without_dynamictable_class(


def test_create_dynamic_table_without_dynamictable_and_defined_options(
engine_testaccount, db_parameters, snapshot
engine_testaccount, default_warehouse, snapshot
):
warehouse = db_parameters.get("warehouse", "default")
metadata = MetaData()
test_table_1 = Table(
"test_table_1", metadata, Column("id", Integer), Column("name", String)
Expand All @@ -113,7 +110,7 @@ def test_create_dynamic_table_without_dynamictable_and_defined_options(
Column("name", String),
snowflake_target_lag=TargetLagOption.create((1, TimeUnit.HOURS)),
snowflake_warehouse=IdentifierOption.create(
TableOptionKey.WAREHOUSE, warehouse
TableOptionKey.WAREHOUSE, default_warehouse
),
snowflake_as_query=AsQueryOption.create("SELECT id, name from test_table_1;"),
prefixes=["DYNAMIC"],
Expand Down
14 changes: 8 additions & 6 deletions tests/custom_tables/test_reflect_dynamic_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@
from snowflake.sqlalchemy.custom_commands import NoneType


def test_simple_reflection_dynamic_table_as_table(engine_testaccount, db_parameters):
warehouse = db_parameters.get("warehouse", "default")
def test_simple_reflection_dynamic_table_as_table(
engine_testaccount, default_warehouse
):
metadata = MetaData()
test_table_1 = Table(
"test_table_1", metadata, Column("id", Integer), Column("name", String)
Expand All @@ -24,7 +25,7 @@ def test_simple_reflection_dynamic_table_as_table(engine_testaccount, db_paramet
create_table_sql = f"""
CREATE DYNAMIC TABLE dynamic_test_table (id INT, name VARCHAR)
TARGET_LAG = '20 minutes'
WAREHOUSE = {warehouse}
WAREHOUSE = {default_warehouse}
AS SELECT id, name from test_table_1;
"""
with engine_testaccount.connect() as connection:
Expand All @@ -46,8 +47,9 @@ def test_simple_reflection_dynamic_table_as_table(engine_testaccount, db_paramet
metadata.drop_all(engine_testaccount)


def test_simple_reflection_without_options_loading(engine_testaccount, db_parameters):
warehouse = db_parameters.get("warehouse", "default")
def test_simple_reflection_without_options_loading(
engine_testaccount, default_warehouse
):
metadata = MetaData()
test_table_1 = Table(
"test_table_1", metadata, Column("id", Integer), Column("name", String)
Expand All @@ -63,7 +65,7 @@ def test_simple_reflection_without_options_loading(engine_testaccount, db_parame
create_table_sql = f"""
CREATE DYNAMIC TABLE dynamic_test_table (id INT, name VARCHAR)
TARGET_LAG = '20 minutes'
WAREHOUSE = {warehouse}
WAREHOUSE = {default_warehouse}
AS SELECT id, name from test_table_1;
"""
with engine_testaccount.connect() as connection:
Expand Down
46 changes: 27 additions & 19 deletions tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1898,31 +1898,39 @@ def test_normalize_name_empty_string_does_not_crash(engine_testaccount):
Note: Empty string column names are not tested because SQLAlchemy core
explicitly disallows empty column names (raises ArgumentError).
"""
schema = "test_normalize_empty"
schema = f"TEST_NORMALIZE_EMPTY_{random_string(5, choices=string.ascii_uppercase)}"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Idea: I think it would be better if we had a random_id_with_prefix function as a common place for test ID generation, while still allowing users to use prefixes to spot which test is failing.

normalized_schema = schema.lower()
with engine_testaccount.connect() as conn:
conn.execute(text(f"CREATE OR REPLACE SCHEMA {schema}"))
conn.execute(text("CREATE OR REPLACE TABLE NORMAL_TABLE (ID INTEGER)"))
conn.execute(text('CREATE OR REPLACE TABLE "" (ID INTEGER, NAME STRING)'))
conn.execute(
text(f"CREATE OR REPLACE TABLE {schema}.NORMAL_TABLE (ID INTEGER)")
)
conn.execute(
text(f'CREATE OR REPLACE TABLE {schema}."" (ID INTEGER, NAME STRING)')
)

md = MetaData(schema=schema)
md.reflect(bind=engine_testaccount)
try:
md = MetaData(schema=normalized_schema)
md.reflect(bind=engine_testaccount)

table_keys = list(md.tables.keys())
table_keys = list(md.tables.keys())

assert any(
"normal_table" in key for key in table_keys
), f"Expected normal_table in {table_keys}"
assert any(
"normal_table" in key for key in table_keys
), f"Expected normal_table in {table_keys}"

empty_string_as_table_identifier = f"{schema}."
assert (
empty_string_as_table_identifier in table_keys
), f"Expected empty string table '{empty_string_as_table_identifier}' in {table_keys}"

empty_table = md.tables[empty_string_as_table_identifier]
assert empty_table.name == ""
col_names = [c.name.lower() for c in empty_table.columns]
assert "id" in col_names
assert "name" in col_names
empty_string_as_table_identifier = f"{normalized_schema}."
assert (
empty_string_as_table_identifier in table_keys
), f"Expected empty string table '{empty_string_as_table_identifier}' in {table_keys}"

empty_table = md.tables[empty_string_as_table_identifier]
assert empty_table.name == ""
col_names = [c.name.lower() for c in empty_table.columns]
assert "id" in col_names
assert "name" in col_names
finally:
conn.execute(text(f"DROP SCHEMA IF EXISTS {schema}"))


def test_empty_column_names_not_supported_by_sqlalchemy():
Expand Down
13 changes: 13 additions & 0 deletions tests/test_decfloat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import decimal
import re
import sys
import warnings
from decimal import Decimal

Expand Down Expand Up @@ -161,6 +162,10 @@ def test_decfloat_str_conversion(self):
class TestDECFLOATIntegration:
"""Integration tests for DECFLOAT type against real Snowflake database."""

@pytest.mark.skipif(
sys.version_info < (3, 9),
reason="DECFLOAT requires snowflake-connector-python >= 3.14.1",
)
def test_decfloat_precision_with_enable_decfloat_parameter(self, request):
"""Test that enable_decfloat dialect parameter sets decimal context.

Expand Down Expand Up @@ -204,6 +209,10 @@ def test_decfloat_precision_with_enable_decfloat_parameter(self, request):
), "enable_decfloat=True should preserve full precision"
assert result == value_38_digits

@pytest.mark.skipif(
sys.version_info < (3, 9),
reason="DECFLOAT requires snowflake-connector-python >= 3.14.1",
)
def test_decfloat_precision_depends_on_decimal_context(self, engine_testaccount):
"""Test that Python decimal context affects DECFLOAT precision from connector.

Expand Down Expand Up @@ -275,6 +284,10 @@ def test_create_table_with_decfloat(self, engine_testaccount):
finally:
test_table.drop(engine_testaccount)

@pytest.mark.skipif(
sys.version_info < (3, 9),
reason="DECFLOAT requires snowflake-connector-python >= 3.14.1",
)
def test_insert_and_select_decfloat_values(self, engine_testaccount):
"""Test inserting and selecting DECFLOAT values.

Expand Down
34 changes: 21 additions & 13 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
[tox]
min_version = 4.0.0
envlist = fix_lint,
py{38,39,310,311,312,313,314}{,-pandas},
py{38,39,310,311,312,313,314},
py{38,39,310,311,312,313}-pandas,
coverage,
skip_missing_interpreters = true

Expand Down Expand Up @@ -39,18 +40,25 @@ setenv =
COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}}
SQLALCHEMY_WARN_20 = 1
ci: SNOWFLAKE_PYTEST_OPTS = -vvv --tb=long
commands = pytest \
{env:SNOWFLAKE_PYTEST_OPTS:} \
--cov "snowflake.sqlalchemy" \
--junitxml {toxworkdir}/junit_{envname}.xml \
--ignore=tests/sqlalchemy_test_suite \
-n8 \
{posargs:tests}
pytest {env:SNOWFLAKE_PYTEST_OPTS:} \
--cov "snowflake.sqlalchemy" --cov-append \
--junitxml {toxworkdir}/junit_{envname}.xml \
-n8 \
{posargs:tests/sqlalchemy_test_suite}
commands =
!pandas: pytest \
!pandas: {env:SNOWFLAKE_PYTEST_OPTS:} \
!pandas: --cov "snowflake.sqlalchemy" \
!pandas: --junitxml {toxworkdir}/junit_{envname}.xml \
!pandas: --ignore=tests/sqlalchemy_test_suite \
!pandas: -n8 \
!pandas: {posargs:tests}
!pandas: pytest {env:SNOWFLAKE_PYTEST_OPTS:} \
!pandas: --cov "snowflake.sqlalchemy" --cov-append \
!pandas: --junitxml {toxworkdir}/junit_{envname}.xml \
!pandas: {posargs:tests/sqlalchemy_test_suite}
pandas: pytest \
pandas: {env:SNOWFLAKE_PYTEST_OPTS:} \
pandas: --cov "snowflake.sqlalchemy" --cov-append \
pandas: --junitxml {toxworkdir}/junit_{envname}.xml \
pandas: --ignore=tests/sqlalchemy_test_suite \
pandas: -n8 \
pandas: {posargs:tests/test_pandas.py tests/test_qmark.py tests/test_core.py::test_snowflake_sqlalchemy_as_valid_client_type}

[testenv:.pkg_external]
deps = build
Expand Down
Loading