Skip to content

Commit d6c8779

Browse files
committed
chore: Add 3.14 and 3.14t builds: update GHA matrix, bump uv and cibuildwheel to include 3.14rc2, and handle new pandas warning, and mark unsupported packages as < 3.14.
1 parent c42539c commit d6c8779

File tree

6 files changed

+31
-18
lines changed

6 files changed

+31
-18
lines changed

.github/workflows/cleanup_pypi.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ jobs:
5252
- name: Install Astral UV
5353
uses: astral-sh/setup-uv@v6
5454
with:
55-
version: "0.7.14"
55+
version: "0.8.16"
5656

5757
- name: Run Cleanup
5858
env:

.github/workflows/coverage.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ jobs:
7070
- name: Install Astral UV and enable the cache
7171
uses: astral-sh/setup-uv@v6
7272
with:
73-
version: "0.7.14"
73+
version: "0.8.16"
7474
python-version: 3.9
7575
enable-cache: true
7676
cache-suffix: -${{ github.workflow }}

.github/workflows/packaging_sdist.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
- name: Install Astral UV
5959
uses: astral-sh/setup-uv@v6
6060
with:
61-
version: "0.7.14"
61+
version: "0.8.16"
6262
python-version: 3.11
6363

6464
- name: Build sdist

.github/workflows/packaging_wheels.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jobs:
3030
strategy:
3131
fail-fast: false
3232
matrix:
33-
python: [ cp39, cp310, cp311, cp312, cp313 ]
33+
python: [ cp39, cp310, cp311, cp312, cp313, cp314, cp314t ]
3434
platform:
3535
- { os: windows-2025, arch: amd64, cibw_system: win }
3636
- { os: ubuntu-24.04, arch: x86_64, cibw_system: manylinux }
@@ -79,16 +79,17 @@ jobs:
7979
# Install Astral UV, which will be used as build-frontend for cibuildwheel
8080
- uses: astral-sh/setup-uv@v6
8181
with:
82-
version: "0.7.14"
82+
version: "0.8.16"
8383
enable-cache: false
8484
cache-suffix: -${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }}
85+
python-version: ${{ matrix.python }}
8586

8687
- name: Build${{ inputs.testsuite != 'none' && ' and test ' || ' ' }}wheels
87-
uses: pypa/cibuildwheel@v3.0
88+
uses: pypa/cibuildwheel@v3.1
8889
env:
8990
CIBW_ARCHS: ${{ matrix.platform.arch == 'amd64' && 'AMD64' || matrix.platform.arch }}
9091
CIBW_BUILD: ${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }}
91-
92+
CIBW_ENVIRONMENT: PYTHON_GIL=1
9293
- name: Upload wheel
9394
uses: actions/upload-artifact@v4
9495
with:

pyproject.toml

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ all = [ # users can install duckdb with 'duckdb[all]', which will install this l
4747
"fsspec", # used in duckdb.filesystem
4848
"numpy", # used in duckdb.experimental.spark and in duckdb.fetchnumpy()
4949
"pandas", # used for pandas dataframes all over the place
50-
"pyarrow", # used for pyarrow support
51-
"adbc_driver_manager", # for the adbc driver (TODO: this should live under the duckdb package)
50+
"pyarrow; python_version < '3.14'", # used for pyarrow support
51+
"adbc_driver_manager; python_version < '3.14'", # for the adbc driver (TODO: this should live under the duckdb package)
5252
]
5353

5454
######################################################################################################
@@ -123,6 +123,13 @@ if.env.COVERAGE = false
123123
inherit.cmake.define = "append"
124124
cmake.define.DISABLE_UNITY = "1"
125125

126+
[[tool.scikit-build.overrides]]
127+
# Windows Free-Threading
128+
if.platform-system = "^win32"
129+
if.abi-flags = "t"
130+
inherit.cmake.define = "append"
131+
cmake.define.CMAKE_C_FLAGS="/DPy_MOD_GIL_USED /DPy_GIL_DISABLED"
132+
cmake.define.CMAKE_CXX_FLAGS="/DPy_MOD_GIL_USED /DPy_GIL_DISABLED"
126133

127134
[tool.scikit-build.sdist]
128135
include = [
@@ -204,6 +211,7 @@ required-environments = [ # ... but do always resolve for all of them
204211
"python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'x86_64'",
205212
"python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'aarch64'",
206213
]
214+
prerelease = "allow" # for 3.14
207215

208216
# We just need pytorch for tests, wihtout GPU acceleration. PyPI doesn't host a cpu-only version for Linux, so we have
209217
# to configure the index url for cpu-only pytorch manually
@@ -220,30 +228,30 @@ torchvision = [ { index = "pytorch-cpu" } ]
220228
stubdeps = [ # dependencies used for typehints in the stubs
221229
"fsspec",
222230
"pandas",
223-
"polars",
224-
"pyarrow",
231+
"polars; python_version < '3.14'",
232+
"pyarrow; python_version < '3.14'",
225233
]
226234
test = [ # dependencies used for running tests
227235
"pytest",
228236
"pytest-reraise",
229237
"pytest-timeout",
230238
"mypy",
231239
"coverage",
232-
"gcovr",
240+
"gcovr; python_version < '3.14'",
233241
"gcsfs",
234242
"packaging",
235-
"polars",
243+
"polars; python_version < '3.14'",
236244
"psutil",
237245
"py4j",
238246
"pyotp",
239-
"pyspark",
247+
"pyspark; python_version < '3.14'",
240248
"pytz",
241249
"requests",
242250
"urllib3",
243251
"fsspec>=2022.11.0",
244252
"pandas>=2.0.0",
245-
"pyarrow>=18.0.0",
246-
"torch>=2.2.2; sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13'",
253+
"pyarrow>=18.0.0; python_version < '3.14'",
254+
"torch>=2.2.2; python_version < '3.14' and (sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13')",
247255
"tensorflow==2.14.0; sys_platform == 'darwin' and python_version < '3.12'",
248256
"tensorflow-cpu>=2.14.0; sys_platform == 'linux' and platform_machine != 'aarch64' and python_version < '3.12'",
249257
"tensorflow-cpu>=2.14.0; sys_platform == 'win32' and python_version < '3.12'",
@@ -258,8 +266,8 @@ scripts = [ # dependencies used for running scripts
258266
"numpy",
259267
"pandas",
260268
"pcpp",
261-
"polars",
262-
"pyarrow",
269+
"polars; python_version < '3.14'",
270+
"pyarrow; python_version < '3.14'",
263271
"pytz"
264272
]
265273
pypi = [ # dependencies used by the pypi cleanup script
@@ -305,6 +313,7 @@ filterwarnings = [
305313
# Pyspark is throwing these warnings
306314
"ignore:distutils Version classes are deprecated:DeprecationWarning",
307315
"ignore:is_datetime64tz_dtype is deprecated:DeprecationWarning",
316+
"ignore:ChainedAssignmentError.*:FutureWarning"
308317
]
309318

310319
[tool.coverage.run]
@@ -379,6 +388,7 @@ manylinux-x86_64-image = "manylinux_2_28"
379388
manylinux-pypy_x86_64-image = "manylinux_2_28"
380389
manylinux-aarch64-image = "manylinux_2_28"
381390
manylinux-pypy_aarch64-image = "manylinux_2_28"
391+
enable = ["cpython-freethreading", "cpython-prerelease"]
382392

383393
[tool.cibuildwheel.linux]
384394
before-build = ["yum install -y ccache"]

tests/pytest.ini

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
[pytest]
33
filterwarnings =
44
error
5+
# Pandas ChainedAssignmentError warnings for 3.0
6+
ignore:ChainedAssignmentError.*:FutureWarning
57
ignore::UserWarning
68
ignore::DeprecationWarning
79
# Jupyter is throwing DeprecationWarnings

0 commit comments

Comments
 (0)