Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 4 additions & 32 deletions .github/workflows/daily_precommit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,7 @@ jobs:
run: uv pip install -U setuptools pip wheel --system
- name: Install tox
run: uv pip install tox --system
# SNOW-2213578: Re-enable the test for 3.13
- if: ${{ contains('macos', matrix.os.download_name) && matrix.python-version != '3.13' }}
- if: ${{ contains('macos', matrix.os.download_name) }}
name: Run doctests
run: python -m tox -e "py${PYTHON_VERSION}-doctest-notudf-ci"
env:
Expand All @@ -174,8 +173,7 @@ jobs:
# Specify SNOWFLAKE_IS_PYTHON_RUNTIME_TEST: 1 when adding >= python3.12 with no server-side support
# For example, see https://github.com/snowflakedb/snowpark-python/pull/681
shell: bash
- if: ${{ matrix.python-version != '3.13' }}
name: Run tests (excluding doctests)
- name: Run tests (excluding doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-dailynotdoctest-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
Expand All @@ -185,18 +183,6 @@ jobs:
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
shell: bash
# SNOW-2213578 : Remove the test below and run udf tests for 3.13
- if: ${{ matrix.python-version == '3.13' }}
name: Run tests (excluding udf, doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-dailynotdoctestnotudf-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short
TOX_PARALLEL_NO_SPINNER: 1
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
shell: bash
- name: Install MS ODBC Driver (Ubuntu only)
if: ${{ matrix.os.download_name == 'linux' }}
run: |
Expand Down Expand Up @@ -608,8 +594,7 @@ jobs:
run: uv pip install -U setuptools pip wheel --system
- name: Install tox
run: uv pip install tox --system
# SNOW-2213578: Re-enable the test for 3.13
- if: ${{ contains('macos', matrix.os.download_name) && matrix.python-version != '3.13' }}
- if: ${{ contains('macos', matrix.os.download_name) }}
name: Run doctests
run: python -m tox -e "py${PYTHON_VERSION}-doctest-notudf-ci"
env:
Expand All @@ -618,8 +603,7 @@ jobs:
PYTEST_ADDOPTS: --color=yes --tb=short --disable_cte_optimization
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- if: ${{ matrix.python-version != '3.13' }}
name: Run tests (excluding doctests)
- name: Run tests (excluding doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-dailynotdoctest-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
Expand All @@ -629,18 +613,6 @@ jobs:
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
shell: bash
# SNOW-2213578 : Remove the test below and run udf tests for 3.13
- if: ${{ matrix.python-version == '3.13' }}
name: Run tests (excluding udf, doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-dailynotdoctestnotudf-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short --disable_cte_optimization
TOX_PARALLEL_NO_SPINNER: 1
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
shell: bash
- name: Combine coverages
run: python -m tox -e coverage --skip-missing-interpreters false
shell: bash
Expand Down
18 changes: 2 additions & 16 deletions .github/workflows/precommit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ jobs:
# For example, see https://github.com/snowflakedb/snowpark-python/pull/681
shell: bash
# do not run other tests for macos
- if: ${{ matrix.os != 'macos-latest' && matrix.python-version != '3.13' }}
- if: ${{ matrix.os != 'macos-latest' }}
name: Run tests (excluding doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-notdoctest-ci"
env:
Expand All @@ -194,19 +194,6 @@ jobs:
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
shell: bash
# SNOW-2213578 : Remove the test below and run udf tests for 3.13
# for 3.13, skip udf, doctest
- if: ${{ matrix.os != 'macos-latest' && matrix.python-version == '3.13' }}
name: Run tests (excluding udf, doctests)
run: python -m tox -e "py${PYTHON_VERSION/\./}-notudfdoctest-ci"
env:
PYTHON_VERSION: ${{ matrix.python-version }}
cloud_provider: ${{ matrix.cloud-provider }}
PYTEST_ADDOPTS: --color=yes --tb=short
SNOWPARK_PYTHON_API_TEST_BUCKET_PATH: ${{ secrets.SNOWPARK_PYTHON_API_TEST_BUCKET_PATH }}
SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION: ${{ vars.SNOWPARK_PYTHON_API_S3_STORAGE_INTEGRATION }}
TOX_PARALLEL_NO_SPINNER: 1
shell: bash
- name: Install MS ODBC Driver (Ubuntu only)
if: ${{ contains(matrix.os, 'ubuntu') }}
run: |
Expand All @@ -217,8 +204,7 @@ jobs:
shell: bash
- name: Run data source tests
# psycopg2 is not supported on macos 3.9
# SNOW-2213578: Re-enable the test for 3.13
if: ${{ !(matrix.os == 'macos-latest' && matrix.python-version == '3.9') && !(matrix.python-version == '3.13') }}
if: ${{ !(matrix.os == 'macos-latest' && matrix.python-version == '3.9') }}
run: python -m tox -e datasource
env:
PYTHON_VERSION: ${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ cd snowpark-python
#### Install the library in edit mode and install its dependencies

- Create a new Python virtual environment with any Python version that we support.
- The Snowpark Python API supports **Python 3.9, Python 3.10, Python 3.11 and Python 3.12**.
- The Snowpark Python API supports **Python 3.9, Python 3.10, Python 3.11, Python 3.12 and Python 3.13**.
- The Snowpark pandas API supports **Python 3.9, Python 3.10, and Python 3.11**. Additionally, Snowpark pandas requires **Modin 0.36.x or 0.37.x**, and **pandas 2.2.x or 2.3.x**.

```bash
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ If you don't have a Snowflake account yet, you can [sign up for a 30-day free tr

### Create a Python virtual environment
You can use [miniconda][miniconda], [anaconda][anaconda], or [virtualenv][virtualenv]
to create a Python 3.9, 3.10, 3.11 or 3.12 virtual environment.
to create a Python 3.9, 3.10, 3.11, 3.12 or 3.13 virtual environment.

For Snowpark pandas, only Python 3.9, 3.10, or 3.11 is supported.

Expand Down
33 changes: 19 additions & 14 deletions tests/integ/test_packaging.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,16 +185,19 @@ def test_patch_on_get_available_versions_for_packages(session):
reason="numpy and pandas are required",
)
def test_add_packages(session, local_testing_mode):
# Use numpy 2.3.1 for Python 3.13+, numpy 1.26.3 doesn't support Python 3.13
numpy_version = "numpy==2.3.1" if sys.version_info >= (3, 13) else "numpy==1.26.3"

session.add_packages(
[
"numpy==1.26.3",
numpy_version,
"pandas==2.2.3",
"matplotlib",
"pyyaml",
]
)
assert session.get_packages() == {
"numpy": "numpy==1.26.3",
"numpy": numpy_version,
"pandas": "pandas==2.2.3",
"matplotlib": "matplotlib",
"pyyaml": "pyyaml",
Expand All @@ -210,8 +213,9 @@ def get_numpy_pandas_dateutil_version() -> str:
df = session.create_dataframe([None]).to_df("a")
res = df.select(call_udf(udf_name)).collect()[0][0]
# don't need to check the version of dateutil, as it can be changed on the server side
expected_numpy_ver = "2.3.1" if sys.version_info >= (3, 13) else "1.26.3"
assert (
res.startswith("1.26.3/2.2.3")
res.startswith(f"{expected_numpy_ver}/2.2.3")
if not local_testing_mode
else res == get_numpy_pandas_dateutil_version()
)
Expand Down Expand Up @@ -284,7 +288,7 @@ def extract_major_minor_patch(version_string):

@pytest.mark.udf
def test_add_packages_with_underscore(session):
packages = ["spacy-model-en_core_web_sm", "typing_extensions"]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this package does not work in python 3.13

packages = ["huggingface_hub", "typing_extensions"]
count = (
session.table("information_schema.packages")
.where(col("package_name").in_(packages))
Expand All @@ -299,10 +303,9 @@ def test_add_packages_with_underscore(session):
@udf(name=udf_name, packages=packages)
def check_if_package_installed() -> bool:
try:
import spacy
import huggingface_hub # noqa: F401
import typing_extensions # noqa: F401

spacy.load("en_core_web_sm")
return True
except Exception:
return False
Expand Down Expand Up @@ -417,7 +420,7 @@ def test_add_requirements(session, resources_path, local_testing_mode):

session.add_requirements(test_files.test_requirements_file)
assert session.get_packages() == {
"numpy": "numpy==1.26.3",
"numpy": "numpy==2.3.1" if sys.version_info >= (3, 13) else "numpy==1.26.3",
"pandas": "pandas==2.2.3",
}

Expand All @@ -429,9 +432,10 @@ def get_numpy_pandas_version() -> str:

df = session.create_dataframe([None]).to_df("a")
res = df.select(call_udf(udf_name))
expected_numpy_ver = "2.3.1" if sys.version_info >= (3, 13) else "1.26.3"
Utils.check_answer(
res,
[Row("1.26.3/2.2.3")]
[Row(f"{expected_numpy_ver}/2.2.3")]
if not local_testing_mode
else [Row(f"{numpy.__version__}/{pandas.__version__}")],
)
Expand Down Expand Up @@ -690,7 +694,7 @@ def test_add_packages_with_native_dependency_without_force_push(session):
with pytest.raises(
RuntimeError, match="Your code depends on packages that contain native code"
):
session.add_packages(["catboost==1.2.3"])
session.add_packages(["catboost==1.2.8"])


@pytest.fixture(scope="function")
Expand Down Expand Up @@ -911,29 +915,30 @@ def test_add_requirements_with_empty_stage_as_cache_path(
}

session.add_requirements(test_files.test_requirements_file)
expected_numpy_ver = "2.3.1" if sys.version_info >= (3, 13) else "1.26.3"
assert session.get_packages() == {
"numpy": "numpy==1.26.3",
"numpy": f"numpy=={expected_numpy_ver}",
"pandas": "pandas==2.2.3",
}

udf_name = Utils.random_name_for_temp_object(TempObjectType.FUNCTION)

# use a newer snowpark to create an old snowpark udf could lead to conflict cloudpickle.
# e.g. using snowpark 1.27 with cloudpickle 3.0 to create udf using snowpark 1.8, this will leads to
# e.g. using snowpark 1.39 with cloudpickle 3.0 to create udf using snowpark 1.8, this will leads to
# error as cloudpickle 3.0 is specified in udf creation but unsupported in snowpark 1.8
# the solution is to downgrade to cloudpickle 2.2.1 in the env
# TODO: SNOW-1951792, improve error experience
# pin cloudpickle as 1.27.0 snowpark upper bounds it to <=3.0.0
# pin cloudpickle as 1.39.0 snowpark upper bounds it to <=3.0.0
@udf(
name=udf_name,
packages=["snowflake-snowpark-python==1.27.0", "cloudpickle==3.0.0"],
packages=["snowflake-snowpark-python==1.39.0", "cloudpickle==3.0.0"],
)
def get_numpy_pandas_version() -> str:
import snowflake.snowpark as snowpark

return f"{snowpark.__version__}"

Utils.check_answer(session.sql(f"select {udf_name}()"), [Row("1.27.0")])
Utils.check_answer(session.sql(f"select {udf_name}()"), [Row("1.39.0")])


@pytest.mark.udf
Expand Down
6 changes: 3 additions & 3 deletions tests/integ/test_stored_procedure.py
Original file line number Diff line number Diff line change
Expand Up @@ -2327,7 +2327,7 @@ def artifact_repo_test(session: snowflake.snowpark.Session) -> str:
@pytest.mark.parametrize(
"version_override, expect_warning",
[
("1.27.1", False), # Bugfix version - no warning
("1.39.1", False), # Bugfix version - no warning
("999.999.999", True), # Major version change - expect warning
],
)
Expand All @@ -2350,8 +2350,8 @@ def plus1(session_, x):
plus1,
return_type=IntegerType(),
input_types=[IntegerType()],
# pin cloudpickle as 1.27.0 snowpark upper bounds it to <=3.0.0
packages=["snowflake-snowpark-python==1.27.0", "cloudpickle==3.0.0"],
# pin cloudpickle as 1.39.0 snowpark upper bounds it to <=3.0.0
packages=["snowflake-snowpark-python==1.39.0", "cloudpickle==3.0.0"],
)
assert plus1_sp(lit(6)) == 7

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
numpy==2.3.1
pandas==2.2.3
4 changes: 4 additions & 0 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1633,6 +1633,10 @@ def test_pandas_udf_py_file(self):

@property
def test_requirements_file(self):
if sys.version_info >= (3, 13):
return os.path.join(
self.resources_path, "test_requirements_greater_than_python_3_12.txt"
)
return os.path.join(self.resources_path, "test_requirements.txt")

@property
Expand Down
Loading