Skip to content

Commit f7bf632

Browse files
authored
python312Packages.s3fs: 2024.12.0 -> 2025.2.0 (#382057)
2 parents 8d753f4 + 232ef93 commit f7bf632

File tree

3 files changed

+182
-79
lines changed

3 files changed

+182
-79
lines changed

pkgs/development/python-modules/ome-zarr/default.nix

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,32 @@
11
{
22
lib,
33
buildPythonPackage,
4-
pythonOlder,
54
fetchFromGitHub,
6-
pytestCheckHook,
5+
6+
# build-system
7+
setuptools,
8+
setuptools-scm,
9+
10+
# dependencies
711
aiohttp,
812
dask,
913
distributed,
1014
fsspec,
1115
numpy,
1216
requests,
1317
scikit-image,
14-
setuptools,
1518
toolz,
1619
zarr,
20+
21+
# tests
22+
pytestCheckHook,
1723
}:
1824

1925
buildPythonPackage rec {
2026
pname = "ome-zarr";
2127
version = "0.10.3";
2228
pyproject = true;
2329

24-
disabled = pythonOlder "3.9";
25-
2630
src = fetchFromGitHub {
2731
owner = "ome";
2832
repo = "ome-zarr-py";
@@ -32,21 +36,24 @@ buildPythonPackage rec {
3236

3337
build-system = [
3438
setuptools
39+
setuptools-scm
3540
];
3641

3742
dependencies = [
38-
numpy
43+
aiohttp
3944
dask
4045
distributed
41-
zarr
4246
fsspec
43-
aiohttp
47+
numpy
4448
requests
4549
scikit-image
4650
toolz
51+
zarr
4752
] ++ fsspec.optional-dependencies.s3;
4853

49-
nativeCheckInputs = [ pytestCheckHook ];
54+
nativeCheckInputs = [
55+
pytestCheckHook
56+
];
5057

5158
disabledTests = [
5259
# attempts to access network
@@ -90,7 +97,7 @@ buildPythonPackage rec {
9097
meta = {
9198
description = "Implementation of next-generation file format (NGFF) specifications for storing bioimaging data in the cloud";
9299
homepage = "https://pypi.org/project/ome-zarr";
93-
changelog = "https://github.com/ome/ome-zarr-py/blob/${src.tag}/CHANGELOG.md";
100+
changelog = "https://github.com/ome/ome-zarr-py/blob/v${version}/CHANGELOG.md";
94101
license = lib.licenses.bsd2;
95102
maintainers = [ lib.maintainers.bcdarwin ];
96103
mainProgram = "ome_zarr";

pkgs/development/python-modules/pyiceberg/default.nix

Lines changed: 65 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
{
22
lib,
3+
stdenv,
34
buildPythonPackage,
45
fetchFromGitHub,
56

@@ -49,7 +50,6 @@
4950
pytest-mock,
5051
pytest-timeout,
5152
requests-mock,
52-
pythonOlder,
5353
}:
5454

5555
buildPythonPackage rec {
@@ -185,61 +185,70 @@ buildPythonPackage rec {
185185
"tests/integration"
186186
];
187187

188-
disabledTests = [
189-
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
190-
"test_checking_if_a_file_exists"
191-
"test_closing_a_file"
192-
"test_fsspec_file_tell"
193-
"test_fsspec_getting_length_of_file"
194-
"test_fsspec_pickle_round_trip_s3"
195-
"test_fsspec_raise_on_opening_file_not_found"
196-
"test_fsspec_read_specified_bytes_for_file"
197-
"test_fsspec_write_and_read_file"
198-
"test_writing_avro_file"
199-
200-
# Require unpackaged gcsfs
201-
"test_fsspec_converting_an_outputfile_to_an_inputfile_gcs"
202-
"test_fsspec_new_input_file_gcs"
203-
"test_fsspec_new_output_file_gcs"
204-
"test_fsspec_pickle_roundtrip_gcs"
205-
206-
# Timeout (network access)
207-
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
208-
"test_fsspec_new_abfss_output_file_adls"
209-
"test_fsspec_new_input_file_adls"
210-
"test_fsspec_pickle_round_trip_aldfs"
211-
212-
# TypeError: pyarrow.lib.large_list() takes no keyword argument
213-
# From tests/io/test_pyarrow_stats.py:
214-
"test_bounds"
215-
"test_column_metrics_mode"
216-
"test_column_sizes"
217-
"test_metrics_mode_counts"
218-
"test_metrics_mode_full"
219-
"test_metrics_mode_non_default_trunc"
220-
"test_metrics_mode_none"
221-
"test_null_and_nan_counts"
222-
"test_offsets"
223-
"test_read_missing_statistics"
224-
"test_record_count"
225-
"test_value_counts"
226-
"test_write_and_read_stats_schema"
227-
# From tests/io/test_pyarrow.py:
228-
"test_list_type_to_pyarrow"
229-
"test_projection_add_column"
230-
"test_projection_list_of_structs"
231-
"test_read_list"
232-
"test_schema_compatible_missing_nullable_field_nested"
233-
"test_schema_compatible_nested"
234-
"test_schema_mismatch_missing_required_field_nested"
235-
"test_schema_to_pyarrow_schema_exclude_field_ids"
236-
"test_schema_to_pyarrow_schema_include_field_ids"
237-
# From tests/io/test_pyarrow_visitor.py
238-
"test_round_schema_conversion_nested"
239-
240-
# Hangs forever (from tests/io/test_pyarrow.py)
241-
"test_getting_length_of_file_gcs"
242-
];
188+
disabledTests =
189+
[
190+
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
191+
"test_checking_if_a_file_exists"
192+
"test_closing_a_file"
193+
"test_fsspec_file_tell"
194+
"test_fsspec_getting_length_of_file"
195+
"test_fsspec_pickle_round_trip_s3"
196+
"test_fsspec_raise_on_opening_file_not_found"
197+
"test_fsspec_read_specified_bytes_for_file"
198+
"test_fsspec_write_and_read_file"
199+
"test_writing_avro_file"
200+
201+
# Require unpackaged gcsfs
202+
"test_fsspec_converting_an_outputfile_to_an_inputfile_gcs"
203+
"test_fsspec_new_input_file_gcs"
204+
"test_fsspec_new_output_file_gcs"
205+
"test_fsspec_pickle_roundtrip_gcs"
206+
207+
# Timeout (network access)
208+
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
209+
"test_fsspec_new_abfss_output_file_adls"
210+
"test_fsspec_new_input_file_adls"
211+
"test_fsspec_pickle_round_trip_aldfs"
212+
213+
# TypeError: pyarrow.lib.large_list() takes no keyword argument
214+
# From tests/io/test_pyarrow_stats.py:
215+
"test_bounds"
216+
"test_column_metrics_mode"
217+
"test_column_sizes"
218+
"test_metrics_mode_counts"
219+
"test_metrics_mode_full"
220+
"test_metrics_mode_non_default_trunc"
221+
"test_metrics_mode_none"
222+
"test_null_and_nan_counts"
223+
"test_offsets"
224+
"test_read_missing_statistics"
225+
"test_record_count"
226+
"test_value_counts"
227+
"test_write_and_read_stats_schema"
228+
# From tests/io/test_pyarrow.py:
229+
"test_list_type_to_pyarrow"
230+
"test_projection_add_column"
231+
"test_projection_list_of_structs"
232+
"test_read_list"
233+
"test_schema_compatible_missing_nullable_field_nested"
234+
"test_schema_compatible_nested"
235+
"test_schema_mismatch_missing_required_field_nested"
236+
"test_schema_to_pyarrow_schema_exclude_field_ids"
237+
"test_schema_to_pyarrow_schema_include_field_ids"
238+
# From tests/io/test_pyarrow_visitor.py
239+
"test_round_schema_conversion_nested"
240+
241+
# Hangs forever (from tests/io/test_pyarrow.py)
242+
"test_getting_length_of_file_gcs"
243+
]
244+
++ lib.optionals stdenv.hostPlatform.isDarwin [
245+
# ImportError: The pyarrow installation is not built with support for 'GcsFileSystem'
246+
"test_converting_an_outputfile_to_an_inputfile_gcs"
247+
"test_new_input_file_gcs"
248+
"test_new_output_file_gc"
249+
];
250+
251+
__darwinAllowLocalNetworking = true;
243252

244253
meta = {
245254
description = "Python library for programmatic access to Apache Iceberg";

pkgs/development/python-modules/s3fs/default.nix

Lines changed: 100 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,46 @@
11
{
22
lib,
3-
aiobotocore,
4-
aiohttp,
53
buildPythonPackage,
4+
fetchFromGitHub,
5+
6+
# buildInputs
67
docutils,
7-
fetchPypi,
8+
9+
# build-system
10+
setuptools,
11+
12+
# dependencies
13+
aiobotocore,
14+
aiohttp,
15+
fsspec,
16+
17+
# tests
818
flask,
919
flask-cors,
10-
fsspec,
1120
moto,
1221
pytestCheckHook,
13-
pythonOlder,
14-
setuptools,
1522
}:
1623

1724
buildPythonPackage rec {
1825
pname = "s3fs";
19-
version = "2024.12.0";
26+
version = "2025.2.0";
2027
pyproject = true;
2128

22-
disabled = pythonOlder "3.9";
23-
24-
src = fetchPypi {
25-
inherit pname version;
26-
hash = "sha256-Gw86j1lGzKW6KYcdZ5KrHkUo7XYjJ9iu+vyBtzuZ/VY=";
29+
src = fetchFromGitHub {
30+
owner = "fsspec";
31+
repo = "s3fs";
32+
tag = version;
33+
hash = "sha256-nnfvccORDspj54sRxL3d0hn4MpzKYGKE2Kl0v/wLaNw=";
2734
};
2835

2936
buildInputs = [ docutils ];
3037

3138
build-system = [ setuptools ];
3239

40+
pythonRelaxDeps = [
41+
"fsspec"
42+
];
43+
3344
dependencies = [
3445
aiobotocore
3546
aiohttp
@@ -48,12 +59,88 @@ buildPythonPackage rec {
4859
disabledTests = [
4960
# require network access
5061
"test_async_close"
62+
63+
# AssertionError: assert ['x', 'y'] == []
64+
"test_with_data"
65+
66+
# AssertionError: assert ['1', 'x', 'y'] == []
67+
"test_clear_empty"
68+
"test_no_dircache"
69+
70+
# KeyError: 'ChecksumAlgorithm'
71+
"test_info"
72+
73+
# KeyError:
74+
# del d[1]
75+
"test_complex_keys"
76+
77+
# TypeError: string indices must be integers, not 'str'
78+
"test_bucket_versioning"
79+
"test_bulk_delete"
80+
"test_copy_with_source_and_destination_as_list"
81+
"test_cp_directory_recursive"
82+
"test_dynamic_add_rm"
83+
"test_get_directory_to_existing_directory"
84+
"test_get_directory_to_new_directory"
85+
"test_get_directory_without_files_with_same_name_prefix"
86+
"test_get_file_info_with_selector"
87+
"test_get_file_to_existing_directory"
88+
"test_get_file_to_file_in_existing_directory"
89+
"test_get_file_to_file_in_new_directory"
90+
"test_get_file_to_new_directory"
91+
"test_get_glob_edge_cases"
92+
"test_get_glob_to_existing_directory"
93+
"test_get_glob_to_new_directory"
94+
"test_get_list_of_files_to_existing_directory"
95+
"test_get_list_of_files_to_new_directory"
96+
"test_get_with_source_and_destination_as_list"
97+
"test_move[False]"
98+
"test_move[True]"
99+
"test_new_bucket"
100+
"test_new_bucket_auto"
101+
"test_pipe_exclusive"
102+
"test_put_directory_recursive"
103+
"test_put_directory_to_existing_directory"
104+
"test_put_directory_to_new_directory"
105+
"test_put_directory_without_files_with_same_name_prefix"
106+
"test_put_file_to_existing_directory"
107+
"test_put_file_to_file_in_existing_directory"
108+
"test_put_file_to_file_in_new_directory"
109+
"test_put_file_to_new_directory"
110+
"test_put_glob_edge_cases"
111+
"test_put_glob_to_existing_directory"
112+
"test_put_glob_to_new_directory"
113+
"test_put_list_of_files_to_existing_directory"
114+
"test_put_list_of_files_to_new_directory"
115+
"test_rm"
116+
"test_rm_invalidates_cache"
117+
"test_rm_recursive_folder"
118+
"test_s3_big_ls"
119+
"test_s3fs_etag_preserving_multipart_copy"
120+
"test_tags"
121+
122+
# ExceptionGroup: errors while tearing down <Function test_copy_two_files_new_directory> (2 sub-exceptions)
123+
"test_copy_directory_to_existing_directory"
124+
"test_copy_directory_to_new_directory"
125+
"test_copy_directory_without_files_with_same_name_prefix"
126+
"test_copy_file_to_existing_directory"
127+
"test_copy_file_to_file_in_existing_directory"
128+
"test_copy_file_to_file_in_new_directory"
129+
"test_copy_file_to_new_directory"
130+
"test_copy_glob_edge_cases"
131+
"test_copy_glob_to_existing_directory"
132+
"test_copy_glob_to_new_directory"
133+
"test_copy_list_of_files_to_existing_directory"
134+
"test_copy_list_of_files_to_new_directory"
135+
"test_copy_two_files_new_directory"
51136
];
52137

138+
__darwinAllowLocalNetworking = true;
139+
53140
meta = {
54141
description = "Pythonic file interface for S3";
55142
homepage = "https://github.com/fsspec/s3fs";
56-
changelog = "https://github.com/fsspec/s3fs/raw/${version}/docs/source/changelog.rst";
143+
changelog = "https://github.com/fsspec/s3fs/blob/${version}/docs/source/changelog.rst";
57144
license = lib.licenses.bsd3;
58145
maintainers = with lib.maintainers; [ teh ];
59146
};

0 commit comments

Comments
 (0)