Skip to content

Commit 232ef93

Browse files
committed
python312Packages.pyiceberg: fix on darwin
1 parent 3b988f7 commit 232ef93

File tree

1 file changed

+65
-56
lines changed

1 file changed

+65
-56
lines changed

pkgs/development/python-modules/pyiceberg/default.nix

Lines changed: 65 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
{
22
lib,
3+
stdenv,
34
buildPythonPackage,
45
fetchFromGitHub,
56

@@ -49,7 +50,6 @@
4950
pytest-mock,
5051
pytest-timeout,
5152
requests-mock,
52-
pythonOlder,
5353
}:
5454

5555
buildPythonPackage rec {
@@ -185,61 +185,70 @@ buildPythonPackage rec {
185185
"tests/integration"
186186
];
187187

188-
disabledTests = [
189-
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
190-
"test_checking_if_a_file_exists"
191-
"test_closing_a_file"
192-
"test_fsspec_file_tell"
193-
"test_fsspec_getting_length_of_file"
194-
"test_fsspec_pickle_round_trip_s3"
195-
"test_fsspec_raise_on_opening_file_not_found"
196-
"test_fsspec_read_specified_bytes_for_file"
197-
"test_fsspec_write_and_read_file"
198-
"test_writing_avro_file"
199-
200-
# Require unpackaged gcsfs
201-
"test_fsspec_converting_an_outputfile_to_an_inputfile_gcs"
202-
"test_fsspec_new_input_file_gcs"
203-
"test_fsspec_new_output_file_gcs"
204-
"test_fsspec_pickle_roundtrip_gcs"
205-
206-
# Timeout (network access)
207-
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
208-
"test_fsspec_new_abfss_output_file_adls"
209-
"test_fsspec_new_input_file_adls"
210-
"test_fsspec_pickle_round_trip_aldfs"
211-
212-
# TypeError: pyarrow.lib.large_list() takes no keyword argument
213-
# From tests/io/test_pyarrow_stats.py:
214-
"test_bounds"
215-
"test_column_metrics_mode"
216-
"test_column_sizes"
217-
"test_metrics_mode_counts"
218-
"test_metrics_mode_full"
219-
"test_metrics_mode_non_default_trunc"
220-
"test_metrics_mode_none"
221-
"test_null_and_nan_counts"
222-
"test_offsets"
223-
"test_read_missing_statistics"
224-
"test_record_count"
225-
"test_value_counts"
226-
"test_write_and_read_stats_schema"
227-
# From tests/io/test_pyarrow.py:
228-
"test_list_type_to_pyarrow"
229-
"test_projection_add_column"
230-
"test_projection_list_of_structs"
231-
"test_read_list"
232-
"test_schema_compatible_missing_nullable_field_nested"
233-
"test_schema_compatible_nested"
234-
"test_schema_mismatch_missing_required_field_nested"
235-
"test_schema_to_pyarrow_schema_exclude_field_ids"
236-
"test_schema_to_pyarrow_schema_include_field_ids"
237-
# From tests/io/test_pyarrow_visitor.py
238-
"test_round_schema_conversion_nested"
239-
240-
# Hangs forever (from tests/io/test_pyarrow.py)
241-
"test_getting_length_of_file_gcs"
242-
];
188+
disabledTests =
189+
[
190+
# botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL
191+
"test_checking_if_a_file_exists"
192+
"test_closing_a_file"
193+
"test_fsspec_file_tell"
194+
"test_fsspec_getting_length_of_file"
195+
"test_fsspec_pickle_round_trip_s3"
196+
"test_fsspec_raise_on_opening_file_not_found"
197+
"test_fsspec_read_specified_bytes_for_file"
198+
"test_fsspec_write_and_read_file"
199+
"test_writing_avro_file"
200+
201+
# Require unpackaged gcsfs
202+
"test_fsspec_converting_an_outputfile_to_an_inputfile_gcs"
203+
"test_fsspec_new_input_file_gcs"
204+
"test_fsspec_new_output_file_gcs"
205+
"test_fsspec_pickle_roundtrip_gcs"
206+
207+
# Timeout (network access)
208+
"test_fsspec_converting_an_outputfile_to_an_inputfile_adls"
209+
"test_fsspec_new_abfss_output_file_adls"
210+
"test_fsspec_new_input_file_adls"
211+
"test_fsspec_pickle_round_trip_aldfs"
212+
213+
# TypeError: pyarrow.lib.large_list() takes no keyword argument
214+
# From tests/io/test_pyarrow_stats.py:
215+
"test_bounds"
216+
"test_column_metrics_mode"
217+
"test_column_sizes"
218+
"test_metrics_mode_counts"
219+
"test_metrics_mode_full"
220+
"test_metrics_mode_non_default_trunc"
221+
"test_metrics_mode_none"
222+
"test_null_and_nan_counts"
223+
"test_offsets"
224+
"test_read_missing_statistics"
225+
"test_record_count"
226+
"test_value_counts"
227+
"test_write_and_read_stats_schema"
228+
# From tests/io/test_pyarrow.py:
229+
"test_list_type_to_pyarrow"
230+
"test_projection_add_column"
231+
"test_projection_list_of_structs"
232+
"test_read_list"
233+
"test_schema_compatible_missing_nullable_field_nested"
234+
"test_schema_compatible_nested"
235+
"test_schema_mismatch_missing_required_field_nested"
236+
"test_schema_to_pyarrow_schema_exclude_field_ids"
237+
"test_schema_to_pyarrow_schema_include_field_ids"
238+
# From tests/io/test_pyarrow_visitor.py
239+
"test_round_schema_conversion_nested"
240+
241+
# Hangs forever (from tests/io/test_pyarrow.py)
242+
"test_getting_length_of_file_gcs"
243+
]
244+
++ lib.optionals stdenv.hostPlatform.isDarwin [
245+
# ImportError: The pyarrow installation is not built with support for 'GcsFileSystem'
246+
"test_converting_an_outputfile_to_an_inputfile_gcs"
247+
"test_new_input_file_gcs"
248+
"test_new_output_file_gc"
249+
];
250+
251+
__darwinAllowLocalNetworking = true;
243252

244253
meta = {
245254
description = "Python library for programmatic access to Apache Iceberg";

0 commit comments

Comments
 (0)