From ec7d96fe98496358170e4ecb7f0de91f6f6d0f74 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Sun, 17 Nov 2024 16:42:37 +0100 Subject: [PATCH 01/18] Add support for free-threading builds of CPython --- .github/workflows/test.yml | 21 +++-- c-ext/backend_c.c | 6 +- ci/requirements.freethreading.in | 12 +++ ci/requirements.freethreading.txt | 132 +++++++++++++++++++++++++++++ pyproject.toml | 2 +- tests/test_decompressor_fuzzing.py | 2 +- 6 files changed, 167 insertions(+), 8 deletions(-) create mode 100644 ci/requirements.freethreading.in create mode 100644 ci/requirements.freethreading.txt diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 75409092..fb1a0226 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,6 +20,7 @@ jobs: - '3.11' - '3.12' - '3.13' + - '3.13t' arch: - 'arm64' - 'x86' @@ -59,11 +60,10 @@ jobs: PYTHONDEVMODE: '1' steps: - name: Set up Python - uses: actions/setup-python@v5 + uses: Quansight-Labs/setup-python@v5 with: python-version: ${{ matrix.py }} architecture: ${{ matrix.arch }} - allow-prereleases: ${{ matrix.py == '3.13' && true || false }} - name: Install Rust if: matrix.arch == 'x64' @@ -74,7 +74,7 @@ jobs: - uses: actions/checkout@v4 - name: Install Dependencies - if: matrix.py != '3.13' + if: "!startsWith(matrix.py, '3.13')" shell: bash run: | python -m pip install --require-hashes -r ci/requirements.txt @@ -85,15 +85,20 @@ jobs: shell: bash run: | python -m pip install --require-hashes -r ci/requirements.313.txt + - name: Install Dependencies (Python 3.13t) + if: matrix.py == '3.13t' + shell: bash + run: | + python -m pip install --require-hashes -r ci/requirements.freethreading.txt # TODO enable once PyO3 supports 3.13. - name: Build (Rust) - if: matrix.arch == 'x64' && matrix.py != '3.13' + if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') run: | python -m pip install --config-settings='--build-option=--rust-backend' -e . - name: Build (No Rust) - if: matrix.arch != 'x64' || matrix.py == '3.13' + if: matrix.arch != 'x64' || startsWith(matrix.py, '3.13') run: | python -m pip install -e . @@ -101,7 +106,13 @@ jobs: run: | pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} -v tests/ + - name: Test in Parallel + if: "endsWith(matrix.py, 't')" + run: | + pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} --parallel-threads=10 -v tests/ + - name: Test CFFI Backend + if: "!startsWith(matrix.py, '3.13')" # see pyproject.toml:4 env: PYTHON_ZSTANDARD_IMPORT_POLICY: 'cffi' run: | diff --git a/c-ext/backend_c.c b/c-ext/backend_c.c index aabe30bc..a01187ec 100644 --- a/c-ext/backend_c.c +++ b/c-ext/backend_c.c @@ -210,6 +210,10 @@ void zstd_module_init(PyObject *m) { Py_DECREF(feature); #endif +#ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); +#endif + if (PyObject_SetAttrString(m, "backend_features", features) == -1) { return; } @@ -313,7 +317,7 @@ size_t roundpow2(size_t i) { int safe_pybytes_resize(PyObject **obj, Py_ssize_t size) { PyObject *tmp; - if ((*obj)->ob_refcnt == 1) { + if (Py_REFCNT(*obj) == 1) { return _PyBytes_Resize(obj, size); } diff --git a/ci/requirements.freethreading.in b/ci/requirements.freethreading.in new file mode 100644 index 00000000..2ba31630 --- /dev/null +++ b/ci/requirements.freethreading.in @@ -0,0 +1,12 @@ +# This is a dependency of pytest on Windows but isn't picked up by pip-compile. +atomicwrites +cibuildwheel +#cffi +colorama +hypothesis +mypy +pycparser +pytest-xdist +pytest-run-parallel +pytest +wheel diff --git a/ci/requirements.freethreading.txt b/ci/requirements.freethreading.txt new file mode 100644 index 00000000..1b73e1f2 --- /dev/null +++ b/ci/requirements.freethreading.txt @@ -0,0 +1,132 @@ +# +# This file is autogenerated by pip-compile with Python 3.13 +# by the following command: +# +# pip-compile --generate-hashes --output-file=ci/requirements.freethreading.txt --pre ci/requirements.freethreading.in +# +atomicwrites==1.4.1 \ + --hash=sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11 + # via -r ci/requirements.freethreading.in +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 + # via hypothesis +bashlex==0.18 \ + --hash=sha256:5bb03a01c6d5676338c36fd1028009c8ad07e7d61d8a1ce3f513b7fff52796ee \ + --hash=sha256:91d73a23a3e51711919c1c899083890cdecffc91d8c088942725ac13e9dcfffa + # via cibuildwheel +bracex==2.5.post1 \ + --hash=sha256:12c50952415bfa773d2d9ccb8e79651b8cdb1f31a42f6091b804f6ba2b4a66b6 \ + --hash=sha256:13e5732fec27828d6af308628285ad358047cec36801598368cb28bc631dbaf6 + # via cibuildwheel +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 + # via cibuildwheel +cibuildwheel==2.21.3 \ + --hash=sha256:3ce23a9e5406b3eeb80039d7a6fdb218a2450932a8037c0bf76511cd88dfb74e \ + --hash=sha256:f1d036a13603a6ce4019d8b1bd52c296cf32461a3b3be8441434b60b8b378b80 + # via -r ci/requirements.freethreading.in +colorama==0.4.6 \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 + # via -r ci/requirements.freethreading.in +execnet==2.1.1 \ + --hash=sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc \ + --hash=sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3 + # via pytest-xdist +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 + # via cibuildwheel +hypothesis==6.116.0 \ + --hash=sha256:9c1ac9a2edb77aacae1950d8ded6b3f40dbf8483097c88336265c348d2132c71 \ + --hash=sha256:d30271214eae0d4758b72b408e9777405c7c7f687e14e8a42853adea887b2891 + # via -r ci/requirements.freethreading.in +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + # via pytest +mypy==1.13.0 \ + --hash=sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc \ + --hash=sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e \ + --hash=sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f \ + --hash=sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74 \ + --hash=sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a \ + --hash=sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2 \ + --hash=sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b \ + --hash=sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73 \ + --hash=sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e \ + --hash=sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d \ + --hash=sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d \ + --hash=sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6 \ + --hash=sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca \ + --hash=sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d \ + --hash=sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5 \ + --hash=sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62 \ + --hash=sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a \ + --hash=sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc \ + --hash=sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7 \ + --hash=sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb \ + --hash=sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7 \ + --hash=sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732 \ + --hash=sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80 \ + --hash=sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a \ + --hash=sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc \ + --hash=sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2 \ + --hash=sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0 \ + --hash=sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24 \ + --hash=sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7 \ + --hash=sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b \ + --hash=sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372 \ + --hash=sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8 + # via -r ci/requirements.freethreading.in +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 + # via mypy +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 + # via + # cibuildwheel + # pytest +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb + # via cibuildwheel +pluggy==1.5.0 \ + --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ + --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 + # via pytest +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc + # via -r ci/requirements.freethreading.in +pytest==8.3.3 \ + --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ + --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 + # via + # -r ci/requirements.freethreading.in + # pytest-run-parallel + # pytest-xdist +pytest-run-parallel==0.1.0 \ + --hash=sha256:13d8579d39d60d5d77695e6bc292daa3352a5974eb446819f52fba4e20bb0d0f \ + --hash=sha256:271854a2919aaff4e2a39bc2094bd2f96aa32fba9e51a995405ead35b74cc062 + # via -r ci/requirements.freethreading.in +pytest-xdist==3.6.1 \ + --hash=sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7 \ + --hash=sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d + # via -r ci/requirements.freethreading.in +sortedcontainers==2.4.0 \ + --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \ + --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 + # via hypothesis +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 + # via mypy +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 + # via -r ci/requirements.freethreading.in diff --git a/pyproject.toml b/pyproject.toml index b029290c..b2a1d0a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [build-system] requires = [ "cffi>=1.16.0; python_version <= '3.12'", - "cffi==1.17.0rc1; python_version >= '3.13'", + # "cffi==1.17.0rc1; python_version >= '3.13'", # ok for default, nok for free-threading # 69.0.0 breaks handling of --config-settings=--build-option, which our CI # relies on. So constrained to an older version until we figure out a # workaround. See comment at diff --git a/tests/test_decompressor_fuzzing.py b/tests/test_decompressor_fuzzing.py index 53ecf0cc..968fcf4f 100644 --- a/tests/test_decompressor_fuzzing.py +++ b/tests/test_decompressor_fuzzing.py @@ -531,7 +531,7 @@ def test_random_output_sizes( ), read_sizes=strategies.data(), ) - def test_read_across_frames_false( + def test_read_across_frames_false( # fails self, chunks, level, write_size, read_sizes ): cctx = zstd.ZstdCompressor(level=level) From 538ec17629d6579b23b6a50b67d77058944d69ee Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Tue, 19 Nov 2024 18:58:56 +0100 Subject: [PATCH 02/18] disable rust installation step for 3.13, because the tests are disabled anyways --- .github/workflows/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e7ba8ebf..00729870 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -66,7 +66,7 @@ jobs: architecture: ${{ matrix.arch }} - name: Install Rust - if: matrix.arch == 'x64' + if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') uses: dtolnay/rust-toolchain@v1 with: toolchain: stable @@ -113,7 +113,7 @@ jobs: pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} -v tests/ - name: Test Rust Backend - if: matrix.arch == 'x64' + if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') # Rust backend is currently experimental. So ignore failures in it. continue-on-error: true env: From 27278be36cd1348e316a9f7bfed12166da45e000 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Sat, 23 Nov 2024 23:07:19 +0100 Subject: [PATCH 03/18] mark hypothesis tests as thread_unsafe --- tests/test_compressor_fuzzing.py | 27 +++++++++++++++++++++++++++ tests/test_data_structures_fuzzing.py | 5 +++++ tests/test_decompressor_fuzzing.py | 18 ++++++++++++++++++ 3 files changed, 50 insertions(+) diff --git a/tests/test_compressor_fuzzing.py b/tests/test_compressor_fuzzing.py index 032b4428..26ad96de 100644 --- a/tests/test_compressor_fuzzing.py +++ b/tests/test_compressor_fuzzing.py @@ -2,6 +2,9 @@ import os import unittest +import pytest + + try: import hypothesis import hypothesis.strategies as strategies @@ -15,6 +18,7 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_stream_reader_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -49,6 +53,7 @@ def test_stream_source_read( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -83,6 +88,7 @@ def test_buffer_source_read( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -116,6 +122,7 @@ def test_stream_source_read_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -149,6 +156,7 @@ def test_buffer_source_read_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -182,6 +190,7 @@ def test_stream_source_readinto( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -215,6 +224,7 @@ def test_buffer_source_readinto( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -250,6 +260,7 @@ def test_stream_source_readinto_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -285,6 +296,7 @@ def test_buffer_source_readinto_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -319,6 +331,7 @@ def test_stream_source_read1( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -353,6 +366,7 @@ def test_buffer_source_read1( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -386,6 +400,7 @@ def test_stream_source_read1_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -419,6 +434,7 @@ def test_buffer_source_read1_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -455,6 +471,7 @@ def test_stream_source_readinto1( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -491,6 +508,7 @@ def test_buffer_source_readinto1( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -526,6 +544,7 @@ def test_stream_source_readinto1_variance( self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -564,6 +583,7 @@ def test_buffer_source_readinto1_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_stream_writer_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -585,6 +605,7 @@ def test_write_size_variance(self, original, level, write_size): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_copy_stream_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -614,6 +635,7 @@ def test_read_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_compressobj_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -647,6 +669,7 @@ def test_random_input_sizes(self, original, level, chunk_sizes): self.assertEqual(b"".join(chunks), ref_frame) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -705,6 +728,7 @@ def test_flush_block(self, original, level, chunk_sizes, flushes): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_read_to_iter_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -738,6 +762,7 @@ def test_read_write_size_variance( "multi_compress_to_buffer not available", ) class TestCompressor_multi_compress_to_buffer_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.lists( strategies.sampled_from(random_input_data()), @@ -772,6 +797,7 @@ def test_data_equivalence(self, original, threads, use_dict): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_chunker_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.data_too_large, @@ -811,6 +837,7 @@ def test_random_input_sizes(self, original, level, chunk_size, input_sizes): self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1])) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, diff --git a/tests/test_data_structures_fuzzing.py b/tests/test_data_structures_fuzzing.py index 87c3b902..e533638f 100644 --- a/tests/test_data_structures_fuzzing.py +++ b/tests/test_data_structures_fuzzing.py @@ -1,6 +1,9 @@ import os import unittest +import pytest + + try: import hypothesis import hypothesis.strategies as strategies @@ -44,6 +47,7 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressionParametersHypothesis(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( s_windowlog, s_chainlog, @@ -73,6 +77,7 @@ def test_valid_init( strategy=strategy, ) + @pytest.mark.thread_unsafe @hypothesis.given( s_windowlog, s_chainlog, diff --git a/tests/test_decompressor_fuzzing.py b/tests/test_decompressor_fuzzing.py index 968fcf4f..03182465 100644 --- a/tests/test_decompressor_fuzzing.py +++ b/tests/test_decompressor_fuzzing.py @@ -1,6 +1,7 @@ import io import os import unittest +import pytest try: import hypothesis @@ -15,6 +16,7 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_stream_reader_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -58,6 +60,7 @@ def test_stream_source_read_variance( self.assertEqual(b"".join(chunks), original) # Similar to above except we have a constant read() size. + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -99,6 +102,7 @@ def test_stream_source_read_size( self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -141,6 +145,7 @@ def test_buffer_source_read_variance( self.assertEqual(b"".join(chunks), original) # Similar to above except we have a constant read() size. + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -181,6 +186,7 @@ def test_buffer_source_constant_read_size( self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -210,6 +216,7 @@ def test_stream_source_readall( data = dctx.stream_reader(source, read_size=source_read_size).readall() self.assertEqual(data, original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -252,6 +259,7 @@ def test_stream_source_read1_variance( self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -296,6 +304,7 @@ def test_stream_source_readinto1_variance( self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.data_too_large, @@ -332,6 +341,7 @@ def test_relative_seeks( self.assertEqual(original[offset : offset + len(chunk)], chunk) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -381,6 +391,7 @@ def test_multiple_frames(self, chunks, level, source_read_size, read_sizes): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_stream_writer_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -419,6 +430,7 @@ def test_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_copy_stream_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -450,6 +462,7 @@ def test_read_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_decompressobj_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -481,6 +494,7 @@ def test_random_input_sizes(self, original, level, chunk_sizes): self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -518,6 +532,7 @@ def test_random_output_sizes( self.assertEqual(b"".join(chunks), original) + @pytest.mark.thread_unsafe @hypothesis.given( chunks=strategies.lists( strategies.sampled_from(random_input_data()), @@ -570,6 +585,7 @@ def test_read_across_frames_false( # fails self.assertEqual(decompressed.getvalue(), source_chunks[0]) + @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -624,6 +640,7 @@ def test_read_across_frames_true( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_read_to_iter_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -654,6 +671,7 @@ def test_read_write_size_variance( "multi_decompress_to_buffer not available", ) class TestDecompressor_multi_decompress_to_buffer_fuzzing(unittest.TestCase): + @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.lists( strategies.sampled_from(random_input_data()), From 711bd22935ff4c39327c17ec25a44edef994b97e Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Sat, 23 Nov 2024 23:07:56 +0100 Subject: [PATCH 04/18] add test_shared_compressor (segfaults) --- tests/test_compressor_threadsafe.py | 30 +++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 tests/test_compressor_threadsafe.py diff --git a/tests/test_compressor_threadsafe.py b/tests/test_compressor_threadsafe.py new file mode 100644 index 00000000..8b5ecc50 --- /dev/null +++ b/tests/test_compressor_threadsafe.py @@ -0,0 +1,30 @@ +import io +import unittest +from threading import Barrier, Thread + +import pytest + +import zstandard as zstd + + +class TestCompressor_threadsafe(unittest.TestCase): + @pytest.mark.thread_unsafe + def test_shared_compressor(self): + num_parallel_threads = 10 + cctx = zstd.ZstdCompressor() + barrier = Barrier(num_parallel_threads) + + def thread(): + barrier.wait() + with self.assertRaises(zstd.ZstdError): + for _ in range(1_000): + cctx.compress(io.BytesIO(b"t" * 1048576).getvalue()) + + threads = [ + Thread(target=thread) + for _ in range(num_parallel_threads) + ] + for t in threads: + t.start() + for t in threads: + t.join() From ddc5c7ca0e3e915549f180a2245e147b79173c8c Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Wed, 27 Nov 2024 18:29:51 +0100 Subject: [PATCH 05/18] avoid io.BytesIO object --- tests/test_compressor_threadsafe.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_compressor_threadsafe.py b/tests/test_compressor_threadsafe.py index 8b5ecc50..d2323e7b 100644 --- a/tests/test_compressor_threadsafe.py +++ b/tests/test_compressor_threadsafe.py @@ -1,4 +1,3 @@ -import io import unittest from threading import Barrier, Thread @@ -18,7 +17,7 @@ def thread(): barrier.wait() with self.assertRaises(zstd.ZstdError): for _ in range(1_000): - cctx.compress(io.BytesIO(b"t" * 1048576).getvalue()) + cctx.compress(b"t" * 1048576) threads = [ Thread(target=thread) From 3e0ec595b114215281d4bd8dfc53bb517bfce4a3 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Wed, 27 Nov 2024 19:47:30 +0100 Subject: [PATCH 06/18] only allow one thread at a time to use ZstdCompressor --- c-ext/_pyzstd_atomics.h | 88 ++++++++++++++++++++++++++++++++++++++++ c-ext/compressor.c | 19 +++++++++ c-ext/python-zstandard.h | 2 + 3 files changed, 109 insertions(+) create mode 100644 c-ext/_pyzstd_atomics.h diff --git a/c-ext/_pyzstd_atomics.h b/c-ext/_pyzstd_atomics.h new file mode 100644 index 00000000..0c3e08b3 --- /dev/null +++ b/c-ext/_pyzstd_atomics.h @@ -0,0 +1,88 @@ +/* + * Provides wrappers around C11 standard library atomics and MSVC intrinsics + * to provide basic atomic load and store functionality. This is based on + * code in CPython's pyatomic.h, pyatomic_std.h, and pyatomic_msc.h + * + * Adapted from: + * - numpy/_core/src/common/npy_atomic.h + * - cpython/Include/cpython/pyatomic.h + */ + +#ifndef PYZSTD_ATOMICS_H +#define PYZSTD_ATOMICS_H + +#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ + && !defined(__STDC_NO_ATOMICS__) +// TODO: support C++ atomics as well if this header is ever needed in C++ + #include + #include + #define STDC_ATOMICS +#elif _MSC_VER + #include + #define MSC_ATOMICS + #if !defined(_M_X64) && !defined(_M_IX86) && !defined(_M_ARM64) + #error "Unsupported MSVC build configuration, neither x86 or ARM" + #endif +#elif defined(__GNUC__) && (__GNUC__ > 4) + #define GCC_ATOMICS +#elif defined(__clang__) + #if __has_builtin(__atomic_load) + #define GCC_ATOMICS + #endif +#else + #error "no supported atomic implementation for this platform/compiler" +#endif + + +static inline int8_t +pyzstd_atomic_load_int8(const int8_t *obj) { +#ifdef STDC_ATOMICS + return (int8_t)atomic_load((const _Atomic(int8_t)*)obj); +#elif defined(MSC_ATOMICS) +#if defined(_M_X64) || defined(_M_IX86) + return *(volatile int8_t *)obj; +#else // defined(_M_ARM64) + return (int8_t)__ldar8((unsigned __int8 volatile *)obj); +#endif +#elif defined(GCC_ATOMICS) + return __atomic_load_n(obj, __ATOMIC_SEQ_CST); +#endif +} + +static inline void +pyzstd_atomic_store_int8(int8_t *obj, int8_t value) { +#ifdef STDC_ATOMICS + atomic_store((_Atomic(int8_t)*)obj, value); +#elif defined(MSC_ATOMICS) + _InterlockedExchange8((volatile char *)obj, (char)value); +#elif defined(GCC_ATOMICS) + __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); +#endif +} + +static inline int +pyzstd_atomic_compare_exchange_int8(int8_t *obj, int8_t expected, int8_t desired) { +#ifdef STDC_ATOMICS + return atomic_compare_exchange_strong((_Atomic(int8_t)*)obj, + &expected, desired); +#elif defined(MSC_ATOMICS) + int8_t initial = (int8_t)_InterlockedCompareExchange8( + (volatile char *)obj, + (char)value, + (char)expected); + if (initial == *expected) { + return 1; + } + *expected = initial; + return 0; +#elif defined(GCC_ATOMICS) + return __atomic_compare_exchange_n(obj, &expected, desired, 0, + __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); +#endif +} + +#undef MSC_ATOMICS +#undef STDC_ATOMICS +#undef GCC_ATOMICS + +#endif // PYZSTD_ATOMICS_H diff --git a/c-ext/compressor.c b/c-ext/compressor.c index bbef9fd7..e365c44a 100644 --- a/c-ext/compressor.c +++ b/c-ext/compressor.c @@ -7,6 +7,7 @@ */ #include "python-zstandard.h" +#include "_pyzstd_atomics.h" extern PyObject *ZstdError; @@ -238,6 +239,8 @@ static int ZstdCompressor_init(ZstdCompressor *self, PyObject *args, Py_INCREF(dict); } + self->in_use = 0; + if (setup_cctx(self)) { return -1; } @@ -522,6 +525,14 @@ static PyObject *ZstdCompressor_compress(ZstdCompressor *self, PyObject *args, return NULL; } + if (pyzstd_atomic_load_int8(&self->in_use)) { + goto concurrent_use; + } + + if (!pyzstd_atomic_compare_exchange_int8(&self->in_use, 0, 1)) { + goto concurrent_use; + } + ZSTD_CCtx_reset(self->cctx, ZSTD_reset_session_only); destSize = ZSTD_compressBound(source.len); @@ -569,8 +580,16 @@ static PyObject *ZstdCompressor_compress(ZstdCompressor *self, PyObject *args, Py_SET_SIZE(output, outBuffer.pos); finally: + pyzstd_atomic_store_int8(&self->in_use, 0); PyBuffer_Release(&source); return output; + +concurrent_use: + PyErr_SetString(ZstdError, "concurrent use is not allowed. " + "See https://python-zstandard.readthedocs.io" + "/en/latest/api_usage.html" + "#thread-and-object-reuse-safety"); + return NULL; } static ZstdCompressionObj *ZstdCompressor_compressobj(ZstdCompressor *self, diff --git a/c-ext/python-zstandard.h b/c-ext/python-zstandard.h index 6c62b06f..f10e579e 100644 --- a/c-ext/python-zstandard.h +++ b/c-ext/python-zstandard.h @@ -104,6 +104,8 @@ typedef struct { ZSTD_CCtx *cctx; /* Compression parameters in use. */ ZSTD_CCtx_params *params; + /* Is this compressor being used by a thread? */ + int8_t in_use; } ZstdCompressor; extern PyTypeObject *ZstdCompressorType; From da3aec8095ccf3bb0cbd1e625f89b3c830ff4d62 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Wed, 27 Nov 2024 19:47:43 +0100 Subject: [PATCH 07/18] fix test_shared_compressor --- tests/test_compressor_threadsafe.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/test_compressor_threadsafe.py b/tests/test_compressor_threadsafe.py index d2323e7b..e77a89ff 100644 --- a/tests/test_compressor_threadsafe.py +++ b/tests/test_compressor_threadsafe.py @@ -1,5 +1,5 @@ import unittest -from threading import Barrier, Thread +from threading import Barrier, Lock, Thread import pytest @@ -12,18 +12,28 @@ def test_shared_compressor(self): num_parallel_threads = 10 cctx = zstd.ZstdCompressor() barrier = Barrier(num_parallel_threads) + raised_exceptions = 0 + raised_exceptions_lock = Lock() def thread(): + nonlocal raised_exceptions + barrier.wait() - with self.assertRaises(zstd.ZstdError): + try: for _ in range(1_000): cctx.compress(b"t" * 1048576) + except zstd.ZstdError: + with raised_exceptions_lock: + raised_exceptions += 1 threads = [ Thread(target=thread) for _ in range(num_parallel_threads) ] + # time.sleep(10) for t in threads: t.start() for t in threads: t.join() + + assert raised_exceptions == num_parallel_threads - 1 From 57f26ea9a439fbf4978b56bac107a0c62c0ac871 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Mon, 2 Dec 2024 18:32:10 +0100 Subject: [PATCH 08/18] fix memory ordering --- c-ext/_pyzstd_atomics.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/c-ext/_pyzstd_atomics.h b/c-ext/_pyzstd_atomics.h index 0c3e08b3..d1da99f8 100644 --- a/c-ext/_pyzstd_atomics.h +++ b/c-ext/_pyzstd_atomics.h @@ -37,7 +37,7 @@ static inline int8_t pyzstd_atomic_load_int8(const int8_t *obj) { #ifdef STDC_ATOMICS - return (int8_t)atomic_load((const _Atomic(int8_t)*)obj); + return (int8_t)atomic_load_explicit((const _Atomic(int8_t)*)obj, memory_order_relaxed); #elif defined(MSC_ATOMICS) #if defined(_M_X64) || defined(_M_IX86) return *(volatile int8_t *)obj; @@ -45,7 +45,7 @@ pyzstd_atomic_load_int8(const int8_t *obj) { return (int8_t)__ldar8((unsigned __int8 volatile *)obj); #endif #elif defined(GCC_ATOMICS) - return __atomic_load_n(obj, __ATOMIC_SEQ_CST); + return __atomic_load_n(obj, __ATOMIC_RELAXED); #endif } From 910d0047b2dccdb2034d6c2b273e276f35f2378a Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Mon, 10 Feb 2025 18:18:15 +0100 Subject: [PATCH 09/18] revert some changes --- c-ext/_pyzstd_atomics.h | 88 ----------------------------- c-ext/compressor.c | 19 ------- c-ext/python-zstandard.h | 2 - tests/test_compressor_threadsafe.py | 39 ------------- 4 files changed, 148 deletions(-) delete mode 100644 c-ext/_pyzstd_atomics.h delete mode 100644 tests/test_compressor_threadsafe.py diff --git a/c-ext/_pyzstd_atomics.h b/c-ext/_pyzstd_atomics.h deleted file mode 100644 index d1da99f8..00000000 --- a/c-ext/_pyzstd_atomics.h +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Provides wrappers around C11 standard library atomics and MSVC intrinsics - * to provide basic atomic load and store functionality. This is based on - * code in CPython's pyatomic.h, pyatomic_std.h, and pyatomic_msc.h - * - * Adapted from: - * - numpy/_core/src/common/npy_atomic.h - * - cpython/Include/cpython/pyatomic.h - */ - -#ifndef PYZSTD_ATOMICS_H -#define PYZSTD_ATOMICS_H - -#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L \ - && !defined(__STDC_NO_ATOMICS__) -// TODO: support C++ atomics as well if this header is ever needed in C++ - #include - #include - #define STDC_ATOMICS -#elif _MSC_VER - #include - #define MSC_ATOMICS - #if !defined(_M_X64) && !defined(_M_IX86) && !defined(_M_ARM64) - #error "Unsupported MSVC build configuration, neither x86 or ARM" - #endif -#elif defined(__GNUC__) && (__GNUC__ > 4) - #define GCC_ATOMICS -#elif defined(__clang__) - #if __has_builtin(__atomic_load) - #define GCC_ATOMICS - #endif -#else - #error "no supported atomic implementation for this platform/compiler" -#endif - - -static inline int8_t -pyzstd_atomic_load_int8(const int8_t *obj) { -#ifdef STDC_ATOMICS - return (int8_t)atomic_load_explicit((const _Atomic(int8_t)*)obj, memory_order_relaxed); -#elif defined(MSC_ATOMICS) -#if defined(_M_X64) || defined(_M_IX86) - return *(volatile int8_t *)obj; -#else // defined(_M_ARM64) - return (int8_t)__ldar8((unsigned __int8 volatile *)obj); -#endif -#elif defined(GCC_ATOMICS) - return __atomic_load_n(obj, __ATOMIC_RELAXED); -#endif -} - -static inline void -pyzstd_atomic_store_int8(int8_t *obj, int8_t value) { -#ifdef STDC_ATOMICS - atomic_store((_Atomic(int8_t)*)obj, value); -#elif defined(MSC_ATOMICS) - _InterlockedExchange8((volatile char *)obj, (char)value); -#elif defined(GCC_ATOMICS) - __atomic_store_n(obj, value, __ATOMIC_SEQ_CST); -#endif -} - -static inline int -pyzstd_atomic_compare_exchange_int8(int8_t *obj, int8_t expected, int8_t desired) { -#ifdef STDC_ATOMICS - return atomic_compare_exchange_strong((_Atomic(int8_t)*)obj, - &expected, desired); -#elif defined(MSC_ATOMICS) - int8_t initial = (int8_t)_InterlockedCompareExchange8( - (volatile char *)obj, - (char)value, - (char)expected); - if (initial == *expected) { - return 1; - } - *expected = initial; - return 0; -#elif defined(GCC_ATOMICS) - return __atomic_compare_exchange_n(obj, &expected, desired, 0, - __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); -#endif -} - -#undef MSC_ATOMICS -#undef STDC_ATOMICS -#undef GCC_ATOMICS - -#endif // PYZSTD_ATOMICS_H diff --git a/c-ext/compressor.c b/c-ext/compressor.c index e365c44a..bbef9fd7 100644 --- a/c-ext/compressor.c +++ b/c-ext/compressor.c @@ -7,7 +7,6 @@ */ #include "python-zstandard.h" -#include "_pyzstd_atomics.h" extern PyObject *ZstdError; @@ -239,8 +238,6 @@ static int ZstdCompressor_init(ZstdCompressor *self, PyObject *args, Py_INCREF(dict); } - self->in_use = 0; - if (setup_cctx(self)) { return -1; } @@ -525,14 +522,6 @@ static PyObject *ZstdCompressor_compress(ZstdCompressor *self, PyObject *args, return NULL; } - if (pyzstd_atomic_load_int8(&self->in_use)) { - goto concurrent_use; - } - - if (!pyzstd_atomic_compare_exchange_int8(&self->in_use, 0, 1)) { - goto concurrent_use; - } - ZSTD_CCtx_reset(self->cctx, ZSTD_reset_session_only); destSize = ZSTD_compressBound(source.len); @@ -580,16 +569,8 @@ static PyObject *ZstdCompressor_compress(ZstdCompressor *self, PyObject *args, Py_SET_SIZE(output, outBuffer.pos); finally: - pyzstd_atomic_store_int8(&self->in_use, 0); PyBuffer_Release(&source); return output; - -concurrent_use: - PyErr_SetString(ZstdError, "concurrent use is not allowed. " - "See https://python-zstandard.readthedocs.io" - "/en/latest/api_usage.html" - "#thread-and-object-reuse-safety"); - return NULL; } static ZstdCompressionObj *ZstdCompressor_compressobj(ZstdCompressor *self, diff --git a/c-ext/python-zstandard.h b/c-ext/python-zstandard.h index f10e579e..6c62b06f 100644 --- a/c-ext/python-zstandard.h +++ b/c-ext/python-zstandard.h @@ -104,8 +104,6 @@ typedef struct { ZSTD_CCtx *cctx; /* Compression parameters in use. */ ZSTD_CCtx_params *params; - /* Is this compressor being used by a thread? */ - int8_t in_use; } ZstdCompressor; extern PyTypeObject *ZstdCompressorType; diff --git a/tests/test_compressor_threadsafe.py b/tests/test_compressor_threadsafe.py deleted file mode 100644 index e77a89ff..00000000 --- a/tests/test_compressor_threadsafe.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest -from threading import Barrier, Lock, Thread - -import pytest - -import zstandard as zstd - - -class TestCompressor_threadsafe(unittest.TestCase): - @pytest.mark.thread_unsafe - def test_shared_compressor(self): - num_parallel_threads = 10 - cctx = zstd.ZstdCompressor() - barrier = Barrier(num_parallel_threads) - raised_exceptions = 0 - raised_exceptions_lock = Lock() - - def thread(): - nonlocal raised_exceptions - - barrier.wait() - try: - for _ in range(1_000): - cctx.compress(b"t" * 1048576) - except zstd.ZstdError: - with raised_exceptions_lock: - raised_exceptions += 1 - - threads = [ - Thread(target=thread) - for _ in range(num_parallel_threads) - ] - # time.sleep(10) - for t in threads: - t.start() - for t in threads: - t.join() - - assert raised_exceptions == num_parallel_threads - 1 From a51bf36ec476d34620f2d76c2f93ea73290e0452 Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Wed, 19 Feb 2025 17:28:28 +0100 Subject: [PATCH 10/18] use uv --- .github/workflows/test.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 15ab73c3..469777a7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -60,11 +60,15 @@ jobs: PYTHONDEVMODE: '1' steps: - name: Set up Python - uses: Quansight-Labs/setup-python@v5 + uses: astral-sh/setup-uv@v5 with: python-version: ${{ matrix.py }} architecture: ${{ matrix.arch }} + - name: install pip + run: | + uv pip install pip + - name: Install Rust if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') uses: dtolnay/rust-toolchain@v1 From 34fbb013ae3cd2a63b9da35694da9db78b0f6c1c Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 19 Feb 2025 14:58:08 -0700 Subject: [PATCH 11/18] disable the cffi backend on the free-threaded build at build time --- docs/installing.rst | 7 ++++++- pyproject.toml | 2 +- setup.py | 37 +++++++++++++++++++++---------------- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/docs/installing.rst b/docs/installing.rst index 1f3f464d..ac7b8fe8 100644 --- a/docs/installing.rst +++ b/docs/installing.rst @@ -44,6 +44,10 @@ One way to do this is to depend on the ``zstandard[cffi]`` dependency. e.g. ``pip install 'zstandard[cffi]'`` or add ``zstandard[cffi]`` to your pip requirements file. +CFFI does not yet support the free-threaded build of CPython so the CFFI +backend is disabled at build time for free-threaded Python regardless of +whether or not the ``cffi`` is specified. + Legacy Format Support ===================== @@ -84,7 +88,8 @@ All Install Arguments Do not compile the CFFI-based backend. ``--rust-backend`` - Compile the Rust backend (not yet feature complete). + Compile the Rust backend (not yet feature complete and not supported + on the free-threaded build or Python 3.13). If you invoke ``setup.py``, simply pass the aforementioned arguments. e.g. ``python3.9 setup.py --no-cffi-backend``. If using ``pip``, use the diff --git a/pyproject.toml b/pyproject.toml index aad72dc4..9d0e0f01 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ Documentation = "https://python-zstandard.readthedocs.io/en/latest/" [build-system] requires = [ - # "cffi>=1.17.0", # ok for default, nok for free-threading + "cffi>=1.17.0", "setuptools", ] # Need to use legacy backend because setup_zstd.py breaks build isolation. diff --git a/setup.py b/setup.py index 61156754..70954c1f 100755 --- a/setup.py +++ b/setup.py @@ -10,6 +10,7 @@ import os import platform import sys +import sysconfig from setuptools import setup @@ -36,7 +37,6 @@ ext_suffix = os.environ.get("SETUPTOOLS_EXT_SUFFIX") if ext_suffix: - import sysconfig # setuptools._distutils.command.build_ext doesn't use # SETUPTOOLS_EXT_SUFFIX like setuptools.command.build_ext does. # Work around the issue so that cross-compilation can work @@ -55,22 +55,27 @@ if py39compat: py39compat.add_ext_suffix = lambda vars: None -try: - import cffi - - # PyPy (and possibly other distros) have CFFI distributed as part of - # them. - cffi_version = LooseVersion(cffi.__version__) - if cffi_version < LooseVersion(MINIMUM_CFFI_VERSION): - print( - "CFFI %s or newer required (%s found); " - "not building CFFI backend" % (MINIMUM_CFFI_VERSION, cffi_version), - file=sys.stderr, - ) - cffi = None - -except ImportError: +if bool(sysconfig.get_config_var("Py_GIL_DISABLED")): + # cffi does not yet support the free-threaded build so we + # disable the cffi backend cffi = None +else: + try: + import cffi + + # PyPy (and possibly other distros) have CFFI distributed as part of + # them. + cffi_version = LooseVersion(cffi.__version__) + if cffi_version < LooseVersion(MINIMUM_CFFI_VERSION): + print( + "CFFI %s or newer required (%s found); " + "not building CFFI backend" % (MINIMUM_CFFI_VERSION, cffi_version), + file=sys.stderr, + ) + cffi = None + + except ImportError: + cffi = None sys.path.insert(0, ".") From 19dc19d815fc95b6528b804c5f17dbf02af3da1b Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 19 Feb 2025 15:02:42 -0700 Subject: [PATCH 12/18] remove pytest-run-parallel --- ci/requirements.freethreading.in | 1 - ci/requirements.freethreading.txt | 5 ----- tests/test_compressor_fuzzing.py | 24 ------------------------ tests/test_data_structures_fuzzing.py | 2 -- tests/test_decompressor_fuzzing.py | 17 ----------------- 5 files changed, 49 deletions(-) diff --git a/ci/requirements.freethreading.in b/ci/requirements.freethreading.in index 2ba31630..51b7e52b 100644 --- a/ci/requirements.freethreading.in +++ b/ci/requirements.freethreading.in @@ -7,6 +7,5 @@ hypothesis mypy pycparser pytest-xdist -pytest-run-parallel pytest wheel diff --git a/ci/requirements.freethreading.txt b/ci/requirements.freethreading.txt index 1b73e1f2..4a2892f2 100644 --- a/ci/requirements.freethreading.txt +++ b/ci/requirements.freethreading.txt @@ -108,12 +108,7 @@ pytest==8.3.3 \ --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 # via # -r ci/requirements.freethreading.in - # pytest-run-parallel # pytest-xdist -pytest-run-parallel==0.1.0 \ - --hash=sha256:13d8579d39d60d5d77695e6bc292daa3352a5974eb446819f52fba4e20bb0d0f \ - --hash=sha256:271854a2919aaff4e2a39bc2094bd2f96aa32fba9e51a995405ead35b74cc062 - # via -r ci/requirements.freethreading.in pytest-xdist==3.6.1 \ --hash=sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7 \ --hash=sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d diff --git a/tests/test_compressor_fuzzing.py b/tests/test_compressor_fuzzing.py index 26ad96de..a51d68e6 100644 --- a/tests/test_compressor_fuzzing.py +++ b/tests/test_compressor_fuzzing.py @@ -18,7 +18,6 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_stream_reader_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -53,7 +52,6 @@ def test_stream_source_read( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -88,7 +86,6 @@ def test_buffer_source_read( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -122,7 +119,6 @@ def test_stream_source_read_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -156,7 +152,6 @@ def test_buffer_source_read_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -190,7 +185,6 @@ def test_stream_source_readinto( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -224,7 +218,6 @@ def test_buffer_source_readinto( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -260,7 +253,6 @@ def test_stream_source_readinto_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -296,7 +288,6 @@ def test_buffer_source_readinto_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -331,7 +322,6 @@ def test_stream_source_read1( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -366,7 +356,6 @@ def test_buffer_source_read1( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -400,7 +389,6 @@ def test_stream_source_read1_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -434,7 +422,6 @@ def test_buffer_source_read1_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -471,7 +458,6 @@ def test_stream_source_readinto1( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -508,7 +494,6 @@ def test_buffer_source_readinto1( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -544,7 +529,6 @@ def test_stream_source_readinto1_variance( self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -583,7 +567,6 @@ def test_buffer_source_readinto1_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_stream_writer_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -605,7 +588,6 @@ def test_write_size_variance(self, original, level, write_size): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_copy_stream_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -635,7 +617,6 @@ def test_read_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_compressobj_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -669,7 +650,6 @@ def test_random_input_sizes(self, original, level, chunk_sizes): self.assertEqual(b"".join(chunks), ref_frame) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -728,7 +708,6 @@ def test_flush_block(self, original, level, chunk_sizes, flushes): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_read_to_iter_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -762,7 +741,6 @@ def test_read_write_size_variance( "multi_compress_to_buffer not available", ) class TestCompressor_multi_compress_to_buffer_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.lists( strategies.sampled_from(random_input_data()), @@ -797,7 +775,6 @@ def test_data_equivalence(self, original, threads, use_dict): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressor_chunker_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.data_too_large, @@ -837,7 +814,6 @@ def test_random_input_sizes(self, original, level, chunk_size, input_sizes): self.assertTrue(all(len(chunk) == chunk_size for chunk in chunks[:-1])) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, diff --git a/tests/test_data_structures_fuzzing.py b/tests/test_data_structures_fuzzing.py index e533638f..4313aeec 100644 --- a/tests/test_data_structures_fuzzing.py +++ b/tests/test_data_structures_fuzzing.py @@ -47,7 +47,6 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestCompressionParametersHypothesis(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( s_windowlog, s_chainlog, @@ -77,7 +76,6 @@ def test_valid_init( strategy=strategy, ) - @pytest.mark.thread_unsafe @hypothesis.given( s_windowlog, s_chainlog, diff --git a/tests/test_decompressor_fuzzing.py b/tests/test_decompressor_fuzzing.py index 03182465..a148cb97 100644 --- a/tests/test_decompressor_fuzzing.py +++ b/tests/test_decompressor_fuzzing.py @@ -16,7 +16,6 @@ @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_stream_reader_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -60,7 +59,6 @@ def test_stream_source_read_variance( self.assertEqual(b"".join(chunks), original) # Similar to above except we have a constant read() size. - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -102,7 +100,6 @@ def test_stream_source_read_size( self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -145,7 +142,6 @@ def test_buffer_source_read_variance( self.assertEqual(b"".join(chunks), original) # Similar to above except we have a constant read() size. - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -186,7 +182,6 @@ def test_buffer_source_constant_read_size( self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[hypothesis.HealthCheck.large_base_example] ) @@ -216,7 +211,6 @@ def test_stream_source_readall( data = dctx.stream_reader(source, read_size=source_read_size).readall() self.assertEqual(data, original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -259,7 +253,6 @@ def test_stream_source_read1_variance( self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -304,7 +297,6 @@ def test_stream_source_readinto1_variance( self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.data_too_large, @@ -341,7 +333,6 @@ def test_relative_seeks( self.assertEqual(original[offset : offset + len(chunk)], chunk) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -391,7 +382,6 @@ def test_multiple_frames(self, chunks, level, source_read_size, read_sizes): @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_stream_writer_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -430,7 +420,6 @@ def test_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_copy_stream_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -462,7 +451,6 @@ def test_read_write_size_variance( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_decompressobj_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -494,7 +482,6 @@ def test_random_input_sizes(self, original, level, chunk_sizes): self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -532,7 +519,6 @@ def test_random_output_sizes( self.assertEqual(b"".join(chunks), original) - @pytest.mark.thread_unsafe @hypothesis.given( chunks=strategies.lists( strategies.sampled_from(random_input_data()), @@ -585,7 +571,6 @@ def test_read_across_frames_false( # fails self.assertEqual(decompressed.getvalue(), source_chunks[0]) - @pytest.mark.thread_unsafe @hypothesis.settings( suppress_health_check=[ hypothesis.HealthCheck.large_base_example, @@ -640,7 +625,6 @@ def test_read_across_frames_true( @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set") class TestDecompressor_read_to_iter_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.sampled_from(random_input_data()), level=strategies.integers(min_value=1, max_value=5), @@ -671,7 +655,6 @@ def test_read_write_size_variance( "multi_decompress_to_buffer not available", ) class TestDecompressor_multi_decompress_to_buffer_fuzzing(unittest.TestCase): - @pytest.mark.thread_unsafe @hypothesis.given( original=strategies.lists( strategies.sampled_from(random_input_data()), From ffa062f6b22f679c111eed73dca14a1a792158ca Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Sat, 22 Feb 2025 14:56:26 -0700 Subject: [PATCH 13/18] some fixes for github actions config --- .github/workflows/test.yml | 10 +++------- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 469777a7..467d56b4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -106,20 +106,16 @@ jobs: run: | pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} -v tests/ - - name: Test in Parallel - if: "endsWith(matrix.py, 't')" - run: | - pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} --parallel-threads=10 -v tests/ - - name: Test CFFI Backend - if: "!startsWith(matrix.py, '3.13')" # see pyproject.toml:4 + # CFFI doesn't yet support the free-threaded build of CPython + if: matrix.py != '3.13t' env: PYTHON_ZSTANDARD_IMPORT_POLICY: 'cffi' run: | pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} -v tests/ - name: Test Rust Backend - if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') + if: matrix.arch == 'x64' # Rust backend is currently experimental. So ignore failures in it. continue-on-error: true env: diff --git a/Cargo.toml b/Cargo.toml index 445183fc..69907af7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,5 +26,5 @@ version = "2.0.10+zstd.1.5.6" features = ["experimental", "legacy", "zstdmt"] [dependencies.pyo3] -version = "0.21.2" +version = "0.22.2" features = ["extension-module"] From cff6b8ee36b674e216d52008e5f50763615337cc Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 26 Feb 2025 14:47:20 -0700 Subject: [PATCH 14/18] revert more unnecessary changes --- .github/workflows/test.yml | 7 -- ci/requirements.freethreading.in | 11 --- ci/requirements.freethreading.txt | 127 -------------------------- tests/test_compressor_fuzzing.py | 3 - tests/test_data_structures_fuzzing.py | 3 - tests/test_decompressor_fuzzing.py | 3 +- 6 files changed, 1 insertion(+), 153 deletions(-) delete mode 100644 ci/requirements.freethreading.in delete mode 100644 ci/requirements.freethreading.txt diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 467d56b4..93a41e16 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -78,17 +78,10 @@ jobs: - uses: actions/checkout@v4 - name: Install Dependencies - if: "!endsWith(matrix.py, 't')" shell: bash run: | python -m pip install --require-hashes -r ci/requirements.txt - - name: Install Dependencies (free-threading) - if: "endsWith(matrix.py, 't')" - shell: bash - run: | - python -m pip install --require-hashes -r ci/requirements.freethreading.txt - # TODO enable once PyO3 supports 3.13. - name: Build (Rust) if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') diff --git a/ci/requirements.freethreading.in b/ci/requirements.freethreading.in deleted file mode 100644 index 51b7e52b..00000000 --- a/ci/requirements.freethreading.in +++ /dev/null @@ -1,11 +0,0 @@ -# This is a dependency of pytest on Windows but isn't picked up by pip-compile. -atomicwrites -cibuildwheel -#cffi -colorama -hypothesis -mypy -pycparser -pytest-xdist -pytest -wheel diff --git a/ci/requirements.freethreading.txt b/ci/requirements.freethreading.txt deleted file mode 100644 index 4a2892f2..00000000 --- a/ci/requirements.freethreading.txt +++ /dev/null @@ -1,127 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.13 -# by the following command: -# -# pip-compile --generate-hashes --output-file=ci/requirements.freethreading.txt --pre ci/requirements.freethreading.in -# -atomicwrites==1.4.1 \ - --hash=sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11 - # via -r ci/requirements.freethreading.in -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via hypothesis -bashlex==0.18 \ - --hash=sha256:5bb03a01c6d5676338c36fd1028009c8ad07e7d61d8a1ce3f513b7fff52796ee \ - --hash=sha256:91d73a23a3e51711919c1c899083890cdecffc91d8c088942725ac13e9dcfffa - # via cibuildwheel -bracex==2.5.post1 \ - --hash=sha256:12c50952415bfa773d2d9ccb8e79651b8cdb1f31a42f6091b804f6ba2b4a66b6 \ - --hash=sha256:13e5732fec27828d6af308628285ad358047cec36801598368cb28bc631dbaf6 - # via cibuildwheel -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via cibuildwheel -cibuildwheel==2.21.3 \ - --hash=sha256:3ce23a9e5406b3eeb80039d7a6fdb218a2450932a8037c0bf76511cd88dfb74e \ - --hash=sha256:f1d036a13603a6ce4019d8b1bd52c296cf32461a3b3be8441434b60b8b378b80 - # via -r ci/requirements.freethreading.in -colorama==0.4.6 \ - --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ - --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 - # via -r ci/requirements.freethreading.in -execnet==2.1.1 \ - --hash=sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc \ - --hash=sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3 - # via pytest-xdist -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via cibuildwheel -hypothesis==6.116.0 \ - --hash=sha256:9c1ac9a2edb77aacae1950d8ded6b3f40dbf8483097c88336265c348d2132c71 \ - --hash=sha256:d30271214eae0d4758b72b408e9777405c7c7f687e14e8a42853adea887b2891 - # via -r ci/requirements.freethreading.in -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - # via pytest -mypy==1.13.0 \ - --hash=sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc \ - --hash=sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e \ - --hash=sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f \ - --hash=sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74 \ - --hash=sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a \ - --hash=sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2 \ - --hash=sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b \ - --hash=sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73 \ - --hash=sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e \ - --hash=sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d \ - --hash=sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d \ - --hash=sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6 \ - --hash=sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca \ - --hash=sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d \ - --hash=sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5 \ - --hash=sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62 \ - --hash=sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a \ - --hash=sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc \ - --hash=sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7 \ - --hash=sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb \ - --hash=sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7 \ - --hash=sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732 \ - --hash=sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80 \ - --hash=sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a \ - --hash=sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc \ - --hash=sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2 \ - --hash=sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0 \ - --hash=sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24 \ - --hash=sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7 \ - --hash=sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b \ - --hash=sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372 \ - --hash=sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8 - # via -r ci/requirements.freethreading.in -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via mypy -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # cibuildwheel - # pytest -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via cibuildwheel -pluggy==1.5.0 \ - --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ - --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 - # via pytest -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via -r ci/requirements.freethreading.in -pytest==8.3.3 \ - --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ - --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 - # via - # -r ci/requirements.freethreading.in - # pytest-xdist -pytest-xdist==3.6.1 \ - --hash=sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7 \ - --hash=sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d - # via -r ci/requirements.freethreading.in -sortedcontainers==2.4.0 \ - --hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \ - --hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 - # via hypothesis -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via mypy -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r ci/requirements.freethreading.in diff --git a/tests/test_compressor_fuzzing.py b/tests/test_compressor_fuzzing.py index a51d68e6..032b4428 100644 --- a/tests/test_compressor_fuzzing.py +++ b/tests/test_compressor_fuzzing.py @@ -2,9 +2,6 @@ import os import unittest -import pytest - - try: import hypothesis import hypothesis.strategies as strategies diff --git a/tests/test_data_structures_fuzzing.py b/tests/test_data_structures_fuzzing.py index 4313aeec..87c3b902 100644 --- a/tests/test_data_structures_fuzzing.py +++ b/tests/test_data_structures_fuzzing.py @@ -1,9 +1,6 @@ import os import unittest -import pytest - - try: import hypothesis import hypothesis.strategies as strategies diff --git a/tests/test_decompressor_fuzzing.py b/tests/test_decompressor_fuzzing.py index a148cb97..53ecf0cc 100644 --- a/tests/test_decompressor_fuzzing.py +++ b/tests/test_decompressor_fuzzing.py @@ -1,7 +1,6 @@ import io import os import unittest -import pytest try: import hypothesis @@ -532,7 +531,7 @@ def test_random_output_sizes( ), read_sizes=strategies.data(), ) - def test_read_across_frames_false( # fails + def test_read_across_frames_false( self, chunks, level, write_size, read_sizes ): cctx = zstd.ZstdCompressor(level=level) From 4537fae5df16733966d38da0544a5a7ed29df59f Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 26 Feb 2025 14:53:19 -0700 Subject: [PATCH 15/18] Don't test the rust backend on 3.13 --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 93a41e16..56ed6e8a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -108,7 +108,7 @@ jobs: pytest --numprocesses=auto --hypothesis-profile=${HYPOTHESIS_PROFILE} -v tests/ - name: Test Rust Backend - if: matrix.arch == 'x64' + if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') # Rust backend is currently experimental. So ignore failures in it. continue-on-error: true env: From 4f389760b2c3ca971b19d896228cb5e7bc0cf1d6 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 26 Feb 2025 14:57:25 -0700 Subject: [PATCH 16/18] remove unnecessary architecture option for setup-uv --- .github/workflows/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 56ed6e8a..2f1e10e8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -63,7 +63,6 @@ jobs: uses: astral-sh/setup-uv@v5 with: python-version: ${{ matrix.py }} - architecture: ${{ matrix.arch }} - name: install pip run: | From d1ee7b09a4e68bc8f63ecda3a5d7176bebdc00d3 Mon Sep 17 00:00:00 2001 From: Nathan Goldbaum Date: Wed, 26 Feb 2025 15:03:34 -0700 Subject: [PATCH 17/18] fix issues with setup-uv --- .github/workflows/test.yml | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2f1e10e8..997915cb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -59,14 +59,16 @@ jobs: # Activate Python development mode so we get warnings. PYTHONDEVMODE: '1' steps: + - uses: actions/checkout@v4 + - name: Set up Python uses: astral-sh/setup-uv@v5 with: python-version: ${{ matrix.py }} - - name: install pip + - name: Install pip run: | - uv pip install pip + uv pip install --python=${{ matrix.py }} pip - name: Install Rust if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13') @@ -74,8 +76,6 @@ jobs: with: toolchain: stable - - uses: actions/checkout@v4 - - name: Install Dependencies shell: bash run: | diff --git a/Cargo.toml b/Cargo.toml index 69907af7..445183fc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,5 +26,5 @@ version = "2.0.10+zstd.1.5.6" features = ["experimental", "legacy", "zstdmt"] [dependencies.pyo3] -version = "0.22.2" +version = "0.21.2" features = ["extension-module"] From 24b926914d617f061d9d9b5f947412e52167894e Mon Sep 17 00:00:00 2001 From: parmeggiani Date: Tue, 25 Mar 2025 20:42:19 +0100 Subject: [PATCH 18/18] revert to actions/setup-python --- .github/workflows/test.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 997915cb..2a92b6e2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -62,13 +62,10 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python - uses: astral-sh/setup-uv@v5 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.py }} - - - name: Install pip - run: | - uv pip install --python=${{ matrix.py }} pip + architecture: ${{ matrix.arch }} - name: Install Rust if: matrix.arch == 'x64' && !startsWith(matrix.py, '3.13')