diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index ebd334177e..72b103cd52 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -33,7 +33,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' - name: Display Python version run: python -c "import sys; import os; print(\"\n\".join(os.environ[\"PATH\"].split(os.pathsep))); print(sys.version); print(sys.executable);" - name: Upgrade setuptools, pip and wheel @@ -54,7 +54,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 - name: Set up Python @@ -77,13 +77,13 @@ jobs: id: manylinux_x86_64 - image: ubuntu-latest id: manylinux_aarch64 - - image: windows-2019 + - image: windows-latest id: win_amd64 - image: macos-latest id: macosx_x86_64 - image: macos-latest id: macosx_arm64 - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] name: Build ${{ matrix.os.id }}-py${{ matrix.python-version }} runs-on: ${{ matrix.os.image }} steps: @@ -102,7 +102,7 @@ jobs: platforms: all - uses: actions/checkout@v4 - name: Building wheel - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.21.3 env: CIBW_BUILD: cp${{ env.shortver }}-${{ matrix.os.id }} MACOSX_DEPLOYMENT_TARGET: 10.14 # Should be kept in sync with ci/build_darwin.sh @@ -129,10 +129,19 @@ jobs: download_name: manylinux_x86_64 - image_name: macos-latest download_name: macosx_x86_64 - - image_name: windows-2019 + - image_name: windows-latest download_name: win_amd64 - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] cloud-provider: [aws, azure, gcp] + # TODO: When there are prebuilt wheels accessible for our dependencies (i.e. numpy) + # for Python 3.13 windows runs can be re-enabled. Currently, according to numpy: + # "Numpy built with MINGW-W64 on Windows 64 bits is experimental, and only available for + # testing. You are advised not to use it for production." + exclude: + - os: + image_name: windows-latest + download_name: win_amd64 + python-version: "3.13" steps: - uses: actions/checkout@v4 - name: Set up Python @@ -196,9 +205,11 @@ jobs: fail-fast: false matrix: os: - - image_name: ubuntu-latest + # Because old the version 3.0.2 of snowflake-connector-python depends on oscrypto which causes conflicts with higher versions of libssl + # TODO: It can be changed to ubuntu-latest, when python sf connector version in tox is above 3.4.0 + - image_name: ubuntu-20.04 download_name: linux - python-version: [3.8] + python-version: [3.9] cloud-provider: [aws] steps: - uses: actions/checkout@v4 @@ -237,7 +248,7 @@ jobs: os: - image_name: ubuntu-latest download_name: linux - python-version: [3.8] + python-version: [3.9] cloud-provider: [aws] steps: - uses: actions/checkout@v4 @@ -260,7 +271,7 @@ jobs: shell: bash test-fips: - name: Test FIPS linux-3.8-${{ matrix.cloud-provider }} + name: Test FIPS linux-3.9-${{ matrix.cloud-provider }} needs: build runs-on: ubuntu-latest strategy: @@ -279,7 +290,7 @@ jobs: - name: Download wheel(s) uses: actions/download-artifact@v4 with: - name: manylinux_x86_64_py3.8 + name: manylinux_x86_64_py3.9 path: dist - name: Show wheels downloaded run: ls -lh dist @@ -287,7 +298,7 @@ jobs: - name: Run tests run: ./ci/test_fips_docker.sh env: - PYTHON_VERSION: 3.8 + PYTHON_VERSION: 3.9 cloud_provider: ${{ matrix.cloud-provider }} PYTEST_ADDOPTS: --color=yes --tb=short TOX_PARALLEL_NO_SPINNER: 1 @@ -295,7 +306,7 @@ jobs: - uses: actions/upload-artifact@v4 with: include-hidden-files: true - name: coverage_linux-fips-3.8-${{ matrix.cloud-provider }} + name: coverage_linux-fips-3.9-${{ matrix.cloud-provider }} path: | .coverage coverage.xml @@ -307,7 +318,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] cloud-provider: [aws] steps: - name: Set shortver @@ -360,7 +371,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.9' - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Upgrade setuptools and pip diff --git a/.github/workflows/create_req_files.yml b/.github/workflows/create_req_files.yml index 18b0043591..4aba9a598e 100644 --- a/.github/workflows/create_req_files.yml +++ b/.github/workflows/create_req_files.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v3 - name: Set up Python diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index daab94e49a..327fd4b2ef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ repos: - id: check-hooks-apply - id: check-useless-excludes - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace exclude: > @@ -28,7 +28,7 @@ repos: hooks: - id: yesqa - repo: https://github.com/mgedmin/check-manifest - rev: "0.49" + rev: "0.50" hooks: - id: check-manifest - repo: https://github.com/PyCQA/isort @@ -43,18 +43,18 @@ repos: - --append-only files: ^src/snowflake/connector/.*\.py$ - repo: https://github.com/asottile/pyupgrade - rev: v3.15.2 + rev: v3.19.0 hooks: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/PyCQA/flake8 - rev: 7.0.0 + rev: 7.1.1 hooks: - id: flake8 additional_dependencies: - flake8-bugbear - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.10.0' + rev: 'v1.13.0' hooks: - id: mypy files: | @@ -87,14 +87,14 @@ repos: - types-pyOpenSSL - types-setuptools - repo: https://github.com/psf/black - rev: 24.4.2 + rev: 24.10.0 hooks: - id: black args: - --safe language_version: python3 - repo: https://github.com/pre-commit/mirrors-clang-format - rev: v17.0.6 + rev: v19.1.3 hooks: - id: clang-format types_or: [c++, c] diff --git a/DESCRIPTION.md b/DESCRIPTION.md index 640e5c504a..54d3b33807 100644 --- a/DESCRIPTION.md +++ b/DESCRIPTION.md @@ -8,6 +8,9 @@ Source code is also available at: https://github.com/snowflakedb/snowflake-conne # Release Notes - v3.14.1(TBD) + - Added support for Python 3.13. + - NOTE: Windows 64 support is still experimental and should not yet be used for production environments. + - Dropped support for Python 3.8. - Basic decimal floating-point type support. - Added handling of PAT provided in `password` field. - Improved error message for client-side query cancellations due to timeouts. diff --git a/Jenkinsfile b/Jenkinsfile index 3e191c2bc1..bc16773aa4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -47,11 +47,13 @@ timestamps { println("Exception computing commit hash from: ${response}") } parallel ( - 'Test Python 38': { build job: 'RT-PyConnector38-PC',parameters: params}, 'Test Python 39': { build job: 'RT-PyConnector39-PC',parameters: params}, 'Test Python 310': { build job: 'RT-PyConnector310-PC',parameters: params}, 'Test Python 311': { build job: 'RT-PyConnector311-PC',parameters: params}, 'Test Python 312': { build job: 'RT-PyConnector312-PC',parameters: params}, + 'Test Python 313': { build job: 'RT-PyConnector313-PC',parameters: params}, + 'Test Python 39 OldDriver': { build job: 'RT-PyConnector39-OldDriver-PC',parameters: params}, + 'Test Python 39 FIPS': { build job: 'RT-FIPS-PyConnector39',parameters: params}, ) } } diff --git a/README.md b/README.md index 239f1b138f..70f958aa4f 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ using the Snowflake JDBC or ODBC drivers. The connector has **no** dependencies on JDBC or ODBC. It can be installed using ``pip`` on Linux, Mac OSX, and Windows platforms -where Python 3.8.0 (or higher) is installed. +where Python 3.9.0 (or higher) is installed. Snowflake Documentation is available at: https://docs.snowflake.com/ @@ -27,7 +27,7 @@ https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowfl ### Locally -Install Python 3.8.0 or higher. Clone the Snowflake Connector for Python repository, then run the following commands +Install a supported Python version. Clone the Snowflake Connector for Python repository, then run the following commands to create a wheel package using PEP-517 build: ```shell @@ -42,7 +42,7 @@ Find the `snowflake_connector_python*.whl` package in the `./dist` directory. ### In Docker Or use our Dockerized build script `ci/build_docker.sh` and find the built wheel files in `dist/repaired_wheels`. -Note: `ci/build_docker.sh` can be used to compile only certain versions, like this: `ci/build_docker.sh "3.8 3.9"` +Note: `ci/build_docker.sh` can be used to compile only certain versions, like this: `ci/build_docker.sh "3.9 3.10"` ## Code hygiene and other utilities These tools are integrated into `tox` to allow us to easily set them up universally on any computer. diff --git a/ci/build_darwin.sh b/ci/build_darwin.sh index 08214a357d..8065ee245a 100755 --- a/ci/build_darwin.sh +++ b/ci/build_darwin.sh @@ -2,13 +2,8 @@ # # Build Snowflake Python Connector on Mac # NOTES: -# - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.8 3.9"` -arch=$(uname -m) -if [[ "$arch" == "arm64" ]]; then - PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" -else - PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" -fi +# - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.9 3.10"` +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" CONNECTOR_DIR="$(dirname "${THIS_DIR}")" diff --git a/ci/build_docker.sh b/ci/build_docker.sh index f98dcc86dd..1c661ea3ac 100755 --- a/ci/build_docker.sh +++ b/ci/build_docker.sh @@ -2,7 +2,7 @@ # # Build Snowflake Python Connector in Docker # NOTES: -# - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.8 3.9"` +# - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.9 3.10"` set -o pipefail THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/ci/build_linux.sh b/ci/build_linux.sh index 1daad7ffb9..f12717ec40 100755 --- a/ci/build_linux.sh +++ b/ci/build_linux.sh @@ -3,11 +3,11 @@ # Build Snowflake Python Connector on Linux # NOTES: # - This is designed to ONLY be called in our build docker image -# - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.8 3.9"` +# - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.9 3.10"` set -o pipefail U_WIDTH=16 -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" CONNECTOR_DIR="$(dirname "${THIS_DIR}")" DIST_DIR="${CONNECTOR_DIR}/dist" diff --git a/ci/build_windows.bat b/ci/build_windows.bat index 5e0f6ba23a..3835243c31 100644 --- a/ci/build_windows.bat +++ b/ci/build_windows.bat @@ -6,14 +6,14 @@ SET SCRIPT_DIR=%~dp0 SET CONNECTOR_DIR=%~dp0\..\ -set python_versions= 3.8 3.9 3.10 3.11 3.12 +set python_versions= 3.9 3.10 3.11 3.12 3.13 cd %CONNECTOR_DIR% set venv_dir=%WORKSPACE%\venv-flake8 if %errorlevel% neq 0 goto :error -py -3.8 -m venv %venv_dir% +py -3.9 -m venv %venv_dir% if %errorlevel% neq 0 goto :error call %venv_dir%\scripts\activate diff --git a/ci/docker/connector_build/Dockerfile b/ci/docker/connector_build/Dockerfile index 263803feb0..fa1febc883 100644 --- a/ci/docker/connector_build/Dockerfile +++ b/ci/docker/connector_build/Dockerfile @@ -14,6 +14,4 @@ WORKDIR /home/user RUN chmod 777 /home/user RUN git clone https://github.com/matthew-brett/multibuild.git && cd /home/user/multibuild && git checkout bfc6d8b82d8c37b8ca1e386081fd800e81c6ab4a -ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin:/opt/python/cp311-cp311/bin:/opt/python/cp312-cp312/bin" - ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_test/Dockerfile b/ci/docker/connector_test/Dockerfile index 4117585d4c..d90705038f 100644 --- a/ci/docker/connector_test/Dockerfile +++ b/ci/docker/connector_test/Dockerfile @@ -3,6 +3,10 @@ FROM $BASE_IMAGE RUN yum install -y java-11-openjdk +# TODO: When there are prebuilt wheels accessible for our dependencies (i.e. numpy) +# for Python 3.13 this rust cargo install command can be removed. +RUN yum -y install rust cargo + # This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.14/gosu-amd64 ENV GOSU_PATH $GOSU_URL @@ -14,6 +18,5 @@ RUN chmod +x /usr/local/bin/entrypoint.sh WORKDIR /home/user RUN chmod 777 /home/user -ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin/:/opt/python/cp39-cp39/bin/:/opt/python/cp310-cp310/bin/:/opt/python/cp311-cp311/bin/:/opt/python/cp312-cp312/bin/" ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_test_fips/Dockerfile b/ci/docker/connector_test_fips/Dockerfile index 7705dce471..06a5484b36 100644 --- a/ci/docker/connector_test_fips/Dockerfile +++ b/ci/docker/connector_test_fips/Dockerfile @@ -18,7 +18,7 @@ RUN sed -i s/mirror.centos.org/vault.centos.org/g /etc/yum.repos.d/*.repo && \ RUN yum clean all && \ yum install -y redhat-rpm-config gcc libffi-devel openssl openssl-devel && \ - yum install -y python38 python38-devel && \ + yum install -y python39 python39-devel && \ yum install -y java-11-openjdk && \ yum clean all && \ rm -rf /var/cache/yum diff --git a/ci/docker/connector_test_lambda/Dockerfile313 b/ci/docker/connector_test_lambda/Dockerfile313 new file mode 100644 index 0000000000..9b8d8d0f93 --- /dev/null +++ b/ci/docker/connector_test_lambda/Dockerfile313 @@ -0,0 +1,29 @@ +FROM public.ecr.aws/lambda/python:3.13-x86_64 + +WORKDIR /home/user/snowflake-connector-python + +# TODO: When there are prebuilt wheels accessible for our dependencies (i.e. numpy) +# for Python 3.13 all dnf ... commands installing building kits can be removed. + +# Install necessary packages and compilers - we need to build numpy for newer version +# Update dnf and install development tools +RUN dnf -y update && \ + dnf -y install \ + gcc \ + gcc-c++ \ + make \ + python3-devel \ + openblas-devel \ + lapack-devel && \ + dnf clean all +RUN dnf -y install rust cargo +RUN dnf -y upgrade + + +RUN chmod 777 /home/user/snowflake-connector-python +ENV PATH="${PATH}:/opt/python/cp313-cp313/bin/" +ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" + +RUN pip3 install -U pip setuptools wheel tox>=4 + +CMD [ "app.handler" ] diff --git a/ci/docker/connector_test_lambda/Dockerfile38 b/ci/docker/connector_test_lambda/Dockerfile38 deleted file mode 100644 index 3d9d0c8120..0000000000 --- a/ci/docker/connector_test_lambda/Dockerfile38 +++ /dev/null @@ -1,12 +0,0 @@ -FROM public.ecr.aws/lambda/python:3.8-x86_64 - -RUN yum install -y git - -WORKDIR /home/user/snowflake-connector-python -RUN chmod 777 /home/user/snowflake-connector-python -ENV PATH="${PATH}:/opt/python/cp38-cp38/bin/" -ENV PYTHONPATH="${PYTHONPATH}:/home/user/snowflake-connector-python/ci/docker/connector_test_lambda/" - -RUN pip3 install -U pip setuptools wheel tox>=4 - -CMD [ "app.handler" ] diff --git a/ci/docker/connector_test_lambda/app.py b/ci/docker/connector_test_lambda/app.py index d5b2f26ce3..70fa95bb0f 100644 --- a/ci/docker/connector_test_lambda/app.py +++ b/ci/docker/connector_test_lambda/app.py @@ -7,7 +7,7 @@ LOGGER = logging.getLogger(__name__) REPO_PATH = "/home/user/snowflake-connector-python" -PY_SHORT_VER = f"{sys.version_info[0]}{sys.version_info[1]}" # 38, 39, 310, 311, 312 +PY_SHORT_VER = f"{sys.version_info[0]}{sys.version_info[1]}" # 39, 310, 311, 312, 313 ARCH = "x86" # x86, aarch64 diff --git a/ci/test_darwin.sh b/ci/test_darwin.sh index 81ea9911a0..b6139c76da 100755 --- a/ci/test_darwin.sh +++ b/ci/test_darwin.sh @@ -2,10 +2,10 @@ # # Test Snowflake Connector on a Darwin Jenkins slave # NOTES: -# - Versions to be tested should be passed in as the first argument, e.g: "3.8 3.9". If omitted 3.8-3.11 will be assumed. +# - Versions to be tested should be passed in as the first argument, e.g: "3.9 3.10". If omitted 3.9-3.13 will be assumed. # - This script uses .. to download the newest wheel files from S3 -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" PARAMETERS_DIR="${CONNECTOR_DIR}/.github/workflows/parameters/public" diff --git a/ci/test_docker.sh b/ci/test_docker.sh index 073372366d..9da02c5887 100755 --- a/ci/test_docker.sh +++ b/ci/test_docker.sh @@ -1,13 +1,13 @@ #!/bin/bash -e # Test Snowflake Python Connector in Docker # NOTES: -# - By default this script runs Python 3.8 tests, as these are installed in dev vms -# - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.8 3.9"` +# - By default this script runs Python 3.9 tests, as these are installed in dev vms +# - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.9 3.10"` set -o pipefail # In case this is ran from dev-vm -PYTHON_ENV=${1:-3.8} +PYTHON_ENV=${1:-3.9} # Set constants THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/ci/test_fips.sh b/ci/test_fips.sh index f275dca944..219b3c4bc6 100755 --- a/ci/test_fips.sh +++ b/ci/test_fips.sh @@ -6,12 +6,12 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # shellcheck disable=SC1090 CONNECTOR_DIR="$( dirname "${THIS_DIR}")" -CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp38*manylinux2014*.whl | sort -r | head -n 1)" +CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl | sort -r | head -n 1)" # fetch wiremock curl https://repo1.maven.org/maven2/org/wiremock/wiremock-standalone/3.11.0/wiremock-standalone-3.11.0.jar --output "${CONNECTOR_DIR}/.wiremock/wiremock-standalone.jar" -python3.8 -m venv fips_env +python3 -m venv fips_env source fips_env/bin/activate pip install -U setuptools pip pip install "${CONNECTOR_WHL}[pandas,secure-local-storage,development]" diff --git a/ci/test_fips_docker.sh b/ci/test_fips_docker.sh index 4150296de5..46f3a1ed30 100755 --- a/ci/test_fips_docker.sh +++ b/ci/test_fips_docker.sh @@ -4,10 +4,10 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" # In case this is not run locally and not on Jenkins -if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp38*manylinux2014*.whl) == '' ]]; then +if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp39*manylinux2014*.whl) == '' ]]; then echo "Missing wheel files, going to compile Python connector in Docker..." - $THIS_DIR/build_docker.sh 3.8 - cp $CONNECTOR_DIR/dist/repaired_wheels/*cp38*manylinux2014*.whl $CONNECTOR_DIR/dist/ + $THIS_DIR/build_docker.sh 3.9 + cp $CONNECTOR_DIR/dist/repaired_wheels/*cp39*manylinux2014*.whl $CONNECTOR_DIR/dist/ fi cd $THIS_DIR/docker/connector_test_fips diff --git a/ci/test_lambda_docker.sh b/ci/test_lambda_docker.sh index e4869f125e..cc3c1fe9f9 100755 --- a/ci/test_lambda_docker.sh +++ b/ci/test_lambda_docker.sh @@ -2,7 +2,7 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" -PYTHON_VERSION="${1:-3.8}" +PYTHON_VERSION="${1:-3.9}" PYTHON_SHORT_VERSION="$(echo "$PYTHON_VERSION" | tr -d .)" # In case this is not run locally and not on Jenkins diff --git a/ci/test_linux.sh b/ci/test_linux.sh index 2984de3774..0c08eca14a 100755 --- a/ci/test_linux.sh +++ b/ci/test_linux.sh @@ -2,11 +2,11 @@ # # Test Snowflake Connector in Linux # NOTES: -# - Versions to be tested should be passed in as the first argument, e.g: "3.8 3.9". If omitted 3.7-3.11 will be assumed. +# - Versions to be tested should be passed in as the first argument, e.g: "3.9 3.10". If omitted 3.9-3.13 will be assumed. # - This script assumes that ../dist/repaired_wheels has the wheel(s) built for all versions to be tested # - This is the script that test_docker.sh runs inside of the docker container -PYTHON_VERSIONS="${1:-3.8 3.9 3.10 3.11 3.12}" +PYTHON_VERSIONS="${1:-3.9 3.10 3.11 3.12 3.13}" THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" CONNECTOR_DIR="$( dirname "${THIS_DIR}")" diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4c62329f39..643758abe9 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -30,7 +30,7 @@ gpg --quiet --batch --yes --decrypt --passphrase="%PARAMETERS_SECRET%" %PARAMS_F :: create tox execution virtual env set venv_dir=%WORKSPACE%\tox_venv -py -3.8 -m venv %venv_dir% +py -3.9 -m venv %venv_dir% if %errorlevel% neq 0 goto :error call %venv_dir%\scripts\activate diff --git a/setup.cfg b/setup.cfg index 344d18b891..c04e4c5eff 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,11 +20,11 @@ classifiers = Operating System :: OS Independent Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Programming Language :: SQL Topic :: Database Topic :: Scientific/Engineering :: Information Analysis @@ -40,7 +40,7 @@ project_urls = Changelog=https://github.com/snowflakedb/snowflake-connector-python/blob/main/DESCRIPTION.md [options] -python_requires = >=3.8 +python_requires = >=3.9 packages = find_namespace: install_requires = asn1crypto>0.24.0,<2.0.0 @@ -52,7 +52,6 @@ install_requires = pyjwt<3.0.0 pytz requests<3.0.0 - importlib-metadata; python_version < '3.8' packaging charset_normalizer>=2,<4 idna>=2.5,<4 @@ -84,7 +83,7 @@ development = Cython coverage more-itertools - numpy<1.27.0 + numpy<2.1.0 pendulum!=2.1.1 pexpect pytest<7.5.0 @@ -94,7 +93,7 @@ development = pytest-xdist pytzdata pandas = - pandas>=1.0.0,<3.0.0 + pandas>=2.1.2,<3.0.0 pyarrow<19.0.0 secure-local-storage = keyring>=23.1.0,<26.0.0 diff --git a/src/snowflake/connector/connection.py b/src/snowflake/connector/connection.py index a71565e2b7..5fccfe01bf 100644 --- a/src/snowflake/connector/connection.py +++ b/src/snowflake/connector/connection.py @@ -978,7 +978,7 @@ def execute_stream( remove_comments: bool = False, cursor_class: SnowflakeCursor = SnowflakeCursor, **kwargs, - ) -> Generator[SnowflakeCursor, None, None]: + ) -> Generator[SnowflakeCursor]: """Executes a stream of SQL statements. This is a non-standard convenient method.""" split_statements_list = split_statements( stream, remove_comments=remove_comments diff --git a/src/snowflake/connector/converter.py b/src/snowflake/connector/converter.py index dfc25d3cf8..ac42b12678 100644 --- a/src/snowflake/connector/converter.py +++ b/src/snowflake/connector/converter.py @@ -28,7 +28,7 @@ from .sfdatetime import sfdatetime_total_seconds_from_timedelta if TYPE_CHECKING: - from numpy import int64 + from numpy import bool_, int64 try: import numpy @@ -505,8 +505,8 @@ def _bytes_to_snowflake(self, value: bytes) -> bytes: _bytearray_to_snowflake = _bytes_to_snowflake - def _bool_to_snowflake(self, value: bool) -> bool: - return value + def _bool_to_snowflake(self, value: bool | bool_) -> bool: + return bool(value) def _bool__to_snowflake(self, value) -> bool: return bool(value) @@ -636,6 +636,9 @@ def _list_to_snowflake(self, value: list) -> list: def __numpy_to_snowflake(self, value): return value + def _float16_to_snowflake(self, value): + return float(value) + _int8_to_snowflake = __numpy_to_snowflake _int16_to_snowflake = __numpy_to_snowflake _int32_to_snowflake = __numpy_to_snowflake @@ -644,9 +647,8 @@ def __numpy_to_snowflake(self, value): _uint16_to_snowflake = __numpy_to_snowflake _uint32_to_snowflake = __numpy_to_snowflake _uint64_to_snowflake = __numpy_to_snowflake - _float16_to_snowflake = __numpy_to_snowflake - _float32_to_snowflake = __numpy_to_snowflake - _float64_to_snowflake = __numpy_to_snowflake + _float32_to_snowflake = _float16_to_snowflake + _float64_to_snowflake = _float16_to_snowflake def _datetime64_to_snowflake(self, value) -> str: return str(value) + "+00:00" diff --git a/src/snowflake/connector/gzip_decoder.py b/src/snowflake/connector/gzip_decoder.py index 6296d0ab53..6c370bc6df 100644 --- a/src/snowflake/connector/gzip_decoder.py +++ b/src/snowflake/connector/gzip_decoder.py @@ -67,7 +67,7 @@ def decompress_raw_data_by_zcat(raw_data_fd: IO, add_bracket: bool = True) -> by def decompress_raw_data_to_unicode_stream( raw_data_fd: IO, -) -> Generator[str, None, None]: +) -> Generator[str]: """Decompresses a raw data in file like object and yields a Unicode string. Args: diff --git a/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c b/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c index 975cf37cf5..371e198847 100644 --- a/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c +++ b/src/snowflake/connector/nanoarrow_cpp/ArrowIterator/nanoarrow_ipc.c @@ -17,15 +17,18 @@ flatbuffers_voffset_t id__tmp, *vt__tmp; \ FLATCC_ASSERT(t != 0 && "null pointer table access"); \ id__tmp = ID; \ - vt__tmp = (flatbuffers_voffset_t *)(( \ - uint8_t *)(t)-__flatbuffers_soffset_read_from_pe(t)); \ + vt__tmp = \ + (flatbuffers_voffset_t *)((uint8_t *)(t) - \ + __flatbuffers_soffset_read_from_pe(t)); \ if (__flatbuffers_voffset_read_from_pe(vt__tmp) >= \ sizeof(vt__tmp[0]) * (id__tmp + 3u)) { \ offset = __flatbuffers_voffset_read_from_pe(vt__tmp + id__tmp + 2); \ } \ } -#define __flatbuffers_field_present(ID, t) \ - { __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; } +#define __flatbuffers_field_present(ID, t) \ + { \ + __flatbuffers_read_vt(ID, offset__tmp, t) return offset__tmp != 0; \ + } #define __flatbuffers_scalar_field(T, ID, t) \ { \ __flatbuffers_read_vt(ID, offset__tmp, t) if (offset__tmp) { \ @@ -222,27 +225,27 @@ static inline flatbuffers_string_t flatbuffers_string_cast_from_union( const flatbuffers_union_t u__tmp) { return flatbuffers_string_cast_from_generic(u__tmp.value); } -#define __flatbuffers_define_union_field(NS, ID, N, NK, T, r) \ - static inline T##_union_type_t N##_##NK##_type_get(N##_table_t t__tmp) \ - __##NS##union_type_field(((ID)-1), t__tmp) static inline NS##generic_t \ - N##_##NK##_get(N##_table_t t__tmp) __##NS##table_field( \ - NS##generic_t, ID, t__tmp, r) static inline T##_union_type_t \ - N##_##NK##_type(N##_table_t t__tmp) __##NS##union_type_field( \ - ((ID)-1), t__tmp) static inline NS##generic_t \ - N##_##NK(N##_table_t t__tmp) __##NS##table_field( \ - NS##generic_t, ID, t__tmp, r) static inline int \ - N##_##NK##_is_present(N##_table_t t__tmp) \ - __##NS##field_present( \ - ID, t__tmp) static inline T##_union_t \ - N##_##NK##_union(N##_table_t t__tmp) { \ - T##_union_t u__tmp = {0, 0}; \ - u__tmp.type = N##_##NK##_type_get(t__tmp); \ - if (u__tmp.type == 0) return u__tmp; \ - u__tmp.value = N##_##NK##_get(t__tmp); \ - return u__tmp; \ - } \ - static inline NS##string_t N##_##NK##_as_string(N##_table_t t__tmp) { \ - return NS##string_cast_from_generic(N##_##NK##_get(t__tmp)); \ +#define __flatbuffers_define_union_field(NS, ID, N, NK, T, r) \ + static inline T##_union_type_t N##_##NK##_type_get(N##_table_t t__tmp) \ + __##NS##union_type_field(((ID) - 1), t__tmp) static inline NS##generic_t \ + N##_##NK##_get(N##_table_t t__tmp) __##NS##table_field( \ + NS##generic_t, ID, t__tmp, r) static inline T##_union_type_t \ + N##_##NK##_type(N##_table_t t__tmp) __##NS##union_type_field( \ + ((ID) - 1), t__tmp) static inline NS##generic_t \ + N##_##NK(N##_table_t t__tmp) __##NS##table_field( \ + NS##generic_t, ID, t__tmp, r) static inline int \ + N##_##NK##_is_present(N##_table_t t__tmp) \ + __##NS##field_present( \ + ID, t__tmp) static inline T##_union_t \ + N##_##NK##_union(N##_table_t t__tmp) { \ + T##_union_t u__tmp = {0, 0}; \ + u__tmp.type = N##_##NK##_type_get(t__tmp); \ + if (u__tmp.type == 0) return u__tmp; \ + u__tmp.value = N##_##NK##_get(t__tmp); \ + return u__tmp; \ + } \ + static inline NS##string_t N##_##NK##_as_string(N##_table_t t__tmp) { \ + return NS##string_cast_from_generic(N##_##NK##_get(t__tmp)); \ } #define __flatbuffers_define_union_vector_ops(NS, T) \ @@ -703,10 +706,14 @@ static inline int __flatbuffers_string_cmp(flatbuffers_string_t v, T##_mutable_vec_t v__tmp = (T##_mutable_vec_t)N##_##NK##_get(t); \ if (v__tmp) T##_vec_sort(v__tmp); \ } -#define __flatbuffers_sort_table_field(N, NK, T, t) \ - { T##_sort((T##_mutable_table_t)N##_##NK##_get(t)); } -#define __flatbuffers_sort_union_field(N, NK, T, t) \ - { T##_sort(T##_mutable_union_cast(N##_##NK##_union(t))); } +#define __flatbuffers_sort_table_field(N, NK, T, t) \ + { \ + T##_sort((T##_mutable_table_t)N##_##NK##_get(t)); \ + } +#define __flatbuffers_sort_union_field(N, NK, T, t) \ + { \ + T##_sort(T##_mutable_union_cast(N##_##NK##_union(t))); \ + } #define __flatbuffers_sort_table_vector_field_elements(N, NK, T, t) \ { \ T##_vec_t v__tmp = N##_##NK##_get(t); \ @@ -12006,7 +12013,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( @@ -24265,7 +24274,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( @@ -30667,7 +30678,9 @@ static inline size_t org_apache_arrow_flatbuf_Tensor_vec_len( #endif static const flatbuffers_voffset_t - __org_apache_arrow_flatbuf_TensorDim_required[] = {0}; + __org_apache_arrow_flatbuf_TensorDim_required[] = { + 0 + }; typedef flatbuffers_ref_t org_apache_arrow_flatbuf_TensorDim_ref_t; static org_apache_arrow_flatbuf_TensorDim_ref_t org_apache_arrow_flatbuf_TensorDim_clone( diff --git a/test/conftest.py b/test/conftest.py index c85f954c26..59b46690b8 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -55,7 +55,7 @@ def patch_connection( self, con: SnowflakeConnection, propagate: bool = True, - ) -> Generator[TelemetryCaptureHandler, None, None]: + ) -> Generator[TelemetryCaptureHandler]: original_telemetry = con._telemetry new_telemetry = TelemetryCaptureHandler( original_telemetry, diff --git a/test/integ/conftest.py b/test/integ/conftest.py index b7c1550b72..1bb4a235dd 100644 --- a/test/integ/conftest.py +++ b/test/integ/conftest.py @@ -163,7 +163,7 @@ def get_db_parameters(connection_name: str = "default") -> dict[str, Any]: @pytest.fixture(scope="session", autouse=True) -def init_test_schema(db_parameters) -> Generator[None, None, None]: +def init_test_schema(db_parameters) -> Generator[None]: """Initializes and destroys the schema specific to this pytest session. This is automatically called per test session. @@ -186,7 +186,7 @@ def init_test_schema(db_parameters) -> Generator[None, None, None]: def create_connection(connection_name: str, **kwargs) -> SnowflakeConnection: """Creates a connection using the parameters defined in parameters.py. - You can select from the different connections by supplying the appropiate + You can select from the different connections by supplying the appropriate connection_name parameter and then anything else supplied will overwrite the values from parameters.py. """ @@ -200,7 +200,7 @@ def create_connection(connection_name: str, **kwargs) -> SnowflakeConnection: def db( connection_name: str = "default", **kwargs, -) -> Generator[SnowflakeConnection, None, None]: +) -> Generator[SnowflakeConnection]: if not kwargs.get("timezone"): kwargs["timezone"] = "UTC" if not kwargs.get("converter_class"): @@ -216,7 +216,7 @@ def db( def negative_db( connection_name: str = "default", **kwargs, -) -> Generator[SnowflakeConnection, None, None]: +) -> Generator[SnowflakeConnection]: if not kwargs.get("timezone"): kwargs["timezone"] = "UTC" if not kwargs.get("converter_class"): diff --git a/test/integ/pandas/test_pandas_tools.py b/test/integ/pandas/test_pandas_tools.py index dd01bea817..e53afc5335 100644 --- a/test/integ/pandas/test_pandas_tools.py +++ b/test/integ/pandas/test_pandas_tools.py @@ -69,7 +69,7 @@ def assert_result_equals( def test_fix_snow_746341( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): cat = '"cat"' df = pandas.DataFrame([[1], [2]], columns=[f"col_'{cat}'"]) @@ -88,7 +88,7 @@ def test_fix_snow_746341( @pytest.mark.parametrize("auto_create_table", [True, False]) @pytest.mark.parametrize("index", [False]) def test_write_pandas_with_overwrite( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, auto_create_table: bool, index: bool, @@ -230,7 +230,7 @@ def test_write_pandas_with_overwrite( @pytest.mark.parametrize("create_temp_table", [True, False]) @pytest.mark.parametrize("index", [False]) def test_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], db_parameters: dict[str, str], compression: str, chunk_size: int, @@ -301,7 +301,7 @@ def test_write_pandas( def test_write_non_range_index_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], db_parameters: dict[str, str], ): compression = "gzip" @@ -381,7 +381,7 @@ def test_write_non_range_index_pandas( @pytest.mark.parametrize("table_type", ["", "temp", "temporary", "transient"]) def test_write_pandas_table_type( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], table_type: str, ): with conn_cnx() as cnx: @@ -413,7 +413,7 @@ def test_write_pandas_table_type( def test_write_pandas_create_temp_table_deprecation_warning( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): with conn_cnx() as cnx: table_name = random_string(5, "driver_versions_") @@ -441,7 +441,7 @@ def test_write_pandas_create_temp_table_deprecation_warning( @pytest.mark.parametrize("use_logical_type", [None, True, False]) def test_write_pandas_use_logical_type( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], use_logical_type: bool | None, ): table_name = random_string(5, "USE_LOCAL_TYPE_").upper() @@ -488,7 +488,7 @@ def test_write_pandas_use_logical_type( def test_invalid_table_type_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): with conn_cnx() as cnx: with pytest.raises(ValueError, match="Unsupported table type"): @@ -501,7 +501,7 @@ def test_invalid_table_type_write_pandas( def test_empty_dataframe_write_pandas( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): table_name = random_string(5, "empty_dataframe_") df = pandas.DataFrame([], columns=["name", "balance"]) @@ -725,7 +725,7 @@ def mocked_execute(*args, **kwargs): @pytest.mark.parametrize("quote_identifiers", [True, False]) def test_default_value_insertion( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, ): """Tests whether default values can be successfully inserted with the pandas writeback.""" @@ -779,7 +779,7 @@ def test_default_value_insertion( @pytest.mark.parametrize("quote_identifiers", [True, False]) def test_autoincrement_insertion( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], quote_identifiers: bool, ): """Tests whether default values can be successfully inserted with the pandas writeback.""" @@ -833,7 +833,7 @@ def test_autoincrement_insertion( ], ) def test_special_name_quoting( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], auto_create_table: bool, column_names: list[str], ): @@ -880,7 +880,7 @@ def test_special_name_quoting( def test_auto_create_table_similar_column_names( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): """Tests whether similar names do not cause issues when auto-creating a table as expected.""" table_name = random_string(5, "numbas_") @@ -911,7 +911,7 @@ def test_auto_create_table_similar_column_names( def test_all_pandas_types( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): table_name = random_string(5, "all_types_") datetime_with_tz = datetime(1997, 6, 3, 14, 21, 32, 00, tzinfo=timezone.utc) @@ -984,7 +984,7 @@ def test_all_pandas_types( @pytest.mark.parametrize("object_type", ["STAGE", "FILE FORMAT"]) def test_no_create_internal_object_privilege_in_target_schema( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], caplog, object_type, ): @@ -1063,7 +1063,7 @@ def test__iceberg_config_statement_helper(): def test_write_pandas_with_on_error( - conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + conn_cnx: Callable[..., Generator[SnowflakeConnection]], ): """Tests whether overwriting table using a Pandas DataFrame works as expected.""" random_table_name = random_string(5, "userspoints_") diff --git a/test/integ/test_arrow_result.py b/test/integ/test_arrow_result.py index dc0fe21494..5cdd3bb341 100644 --- a/test/integ/test_arrow_result.py +++ b/test/integ/test_arrow_result.py @@ -116,7 +116,7 @@ pandas.NaT, pandas.Timestamp("2024-01-01 12:00:00+0000", tz="UTC"), ], - "NUMBER": [numpy.NAN, 1.0, 2.0, 3.0], + "NUMBER": [numpy.nan, 1.0, 2.0, 3.0], } PANDAS_STRUCTURED_REPRS = { diff --git a/test/integ/test_vendored_urllib.py b/test/integ/test_vendored_urllib.py index 3d6f27f9b3..bf178b214b 100644 --- a/test/integ/test_vendored_urllib.py +++ b/test/integ/test_vendored_urllib.py @@ -13,9 +13,7 @@ vendored_imported = False -@pytest.mark.skipif( - not vendored_imported, reason="vendored library is not imported for old driver" -) +@pytest.mark.skipolddriver(reason="vendored library is not imported for old driver") def test_local_fix_for_closed_socket_bug(): # https://github.com/urllib3/urllib3/issues/1878#issuecomment-641534573 http = urllib3.PoolManager(maxsize=1) diff --git a/test/unit/test_ocsp.py b/test/unit/test_ocsp.py index 02d83c8b3e..84f6b7b69c 100644 --- a/test/unit/test_ocsp.py +++ b/test/unit/test_ocsp.py @@ -117,19 +117,20 @@ def create_x509_cert(hash_algorithm): @pytest.fixture(autouse=True) def random_ocsp_response_validation_cache(): + RANDOM_FILENAME_SUFFIX_LEN = 10 file_path = { "linux": os.path.join( "~", ".cache", "snowflake", - f"ocsp_response_validation_cache{random_string()}", + f"ocsp_response_validation_cache{random_string(RANDOM_FILENAME_SUFFIX_LEN)}", ), "darwin": os.path.join( "~", "Library", "Caches", "Snowflake", - f"ocsp_response_validation_cache{random_string()}", + f"ocsp_response_validation_cache{random_string(RANDOM_FILENAME_SUFFIX_LEN)}", ), "windows": os.path.join( "~", @@ -137,7 +138,7 @@ def random_ocsp_response_validation_cache(): "Local", "Snowflake", "Caches", - f"ocsp_response_validation_cache{random_string()}", + f"ocsp_response_validation_cache{random_string(RANDOM_FILENAME_SUFFIX_LEN)}", ), } yield SFDictFileCache( diff --git a/tox.ini b/tox.ini index 1fe5fc8f4a..920cf3c79f 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ source = src/snowflake/connector [tox] minversion = 4 envlist = fix_lint, - py{37,38,39,310,311,312}-{extras,unit-parallel,integ,pandas,sso}, + py{39,310,311,312,313}-{extras,unit-parallel,integ,pandas,sso}, coverage skip_missing_interpreters = true @@ -67,14 +67,15 @@ commands = extras: python -m test.extras.run {posargs:} [testenv:olddriver] -basepython = python3.8 +basepython = python3.9 description = run the old driver tests with pytest under {basepython} deps = pip >= 19.3.1 - pyOpenSSL==22.1.0 - snowflake-connector-python==1.9.1 + pyOpenSSL<=25.0.0 + snowflake-connector-python==3.0.2 azure-storage-blob==2.1.0 - pandas + pandas==2.0.3 + numpy==1.26.4 pendulum!=2.1.1 pytest<6.1.0 pytest-cov @@ -91,7 +92,7 @@ commands = {env:SNOWFLAKE_PYTEST_CMD} --ignore=test/unit --ignore=test/pandas -m "not skipolddriver" -vvv {posargs:} test [testenv:noarrowextension] -basepython = python3.8 +basepython = python3.9 skip_install = True description = run import with no arrow extension under {basepython} setenv = SNOWFLAKE_DISABLE_COMPILE_ARROW_EXTENSIONS=1 @@ -113,9 +114,9 @@ commands = coverage combine coverage xml -o {env:COV_REPORT_DIR:{toxworkdir}}/coverage.xml coverage html -d {env:COV_REPORT_DIR:{toxworkdir}}/htmlcov ; diff-cover --compare-branch {env:DIFF_AGAINST:origin/master} {toxworkdir}/coverage.xml -depends = py37, py38, py39, py310, py311, py312 +depends = py39, py310, py311, py312, py313 -[testenv:py{37,38,39,310,311,312}-coverage] +[testenv:py{39,310,311,312,313}-coverage] # I hate doing this, but this env is for Jenkins, please keep it up-to-date with the one env above it if necessary description = [run locally after tests]: combine coverage data and create report specifically with {basepython} deps = {[testenv:coverage]deps} @@ -133,7 +134,7 @@ deps = flake8 commands = flake8 {posargs} [testenv:fix_lint] -basepython = python3.8 +basepython = python3.9 description = format the code base to adhere to our styles, and complain about what we cannot do automatically passenv = PROGRAMDATA @@ -149,7 +150,7 @@ deps = pip-tools skip_install = True commands = pip-compile setup.py -depends = py37, py38, py39, py310, py311, py312 +depends = py39, py310, py311, py312, py313 [pytest] log_level = info