Skip to content

Commit 7f0ff48

Browse files
committed
Ruff C416: use list() instead of comprehension where possible
1 parent 5ad0a0e commit 7f0ff48

File tree

8 files changed

+17
-17
lines changed

8 files changed

+17
-17
lines changed

duckdb_packaging/pypi_cleanup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ def session_with_retries() -> Generator[Session, None, None]:
122122
connect=3, # try 3 times before giving up on connection errors
123123
read=3, # try 3 times before giving up on read errors
124124
status=3, # try 3 times before giving up on status errors (see forcelist below)
125-
status_forcelist=[429] + [status for status in range(500, 512)],
125+
status_forcelist=[429] + list(range(500, 512)),
126126
other=0, # whatever else may cause an error should break
127127
backoff_factor=0.1, # [0.0s, 0.2s, 0.4s]
128128
raise_on_redirect=True, # raise exception when redirect error retries are exhausted

tests/fast/arrow/test_arrow_list.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def __init__(self, list, list_view) -> None:
4646

4747

4848
def generate_list(child_size) -> ListGenerationResult:
49-
input = [i for i in range(child_size)]
49+
input = list(range(child_size))
5050
offsets = []
5151
sizes = []
5252
lists = []

tests/fast/arrow/test_arrow_offsets.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def test_struct_of_bools(self, duckdb_cursor, col1_null, col2_null):
140140
)
141141
@null_test_parameters()
142142
def test_struct_of_dates(self, duckdb_cursor, constructor, expected, col1_null, col2_null):
143-
tuples = [i for i in range(MAGIC_ARRAY_SIZE)]
143+
tuples = list(range(MAGIC_ARRAY_SIZE))
144144

145145
col1 = tuples
146146
if col1_null:
@@ -230,7 +230,7 @@ def test_struct_of_time(self, duckdb_cursor, constructor, unit, expected, col1_n
230230
# TODO: We limit the size because we don't support time values > 24 hours # noqa: TD002, TD003
231231
size = 86400 # The amount of seconds in a day
232232

233-
col1 = [i for i in range(size)]
233+
col1 = list(range(size))
234234
if col1_null:
235235
col1[-1] = None
236236
# "a" in the struct matches the value for col1
@@ -300,7 +300,7 @@ def test_struct_of_interval(self, duckdb_cursor, constructor, expected, converte
300300
def test_struct_of_duration(self, duckdb_cursor, constructor, unit, expected, col1_null, col2_null):
301301
size = MAGIC_ARRAY_SIZE
302302

303-
col1 = [i for i in range(size)]
303+
col1 = list(range(size))
304304
if col1_null:
305305
col1[-1] = None
306306
# "a" in the struct matches the value for col1
@@ -336,7 +336,7 @@ def test_struct_of_timestamp_tz(self, duckdb_cursor, constructor, unit, expected
336336
size = MAGIC_ARRAY_SIZE
337337

338338
duckdb_cursor.execute("set timezone='UTC'")
339-
col1 = [i for i in range(size)]
339+
col1 = list(range(size))
340340
if col1_null:
341341
col1[-1] = None
342342
# "a" in the struct matches the value for col1
@@ -549,7 +549,7 @@ def test_struct_of_list_of_blobs(self, duckdb_cursor, col1_null, col2_null):
549549

550550
@null_test_parameters()
551551
def test_struct_of_list_of_list(self, duckdb_cursor, col1_null, col2_null):
552-
col1 = [i for i in range(MAGIC_ARRAY_SIZE)]
552+
col1 = list(range(MAGIC_ARRAY_SIZE))
553553
if col1_null:
554554
col1[-1] = None
555555
# "a" in the struct matches the value for col1

tests/fast/arrow/test_arrow_recordbatchreader.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def test_parallel_reader(self, duckdb_cursor):
2626
format="parquet",
2727
)
2828

29-
batches = [r for r in userdata_parquet_dataset.to_batches()]
29+
batches = list(userdata_parquet_dataset.to_batches())
3030
reader = pyarrow.dataset.Scanner.from_batches(batches, schema=userdata_parquet_dataset.schema).to_reader()
3131

3232
rel = duckdb_conn.from_arrow(reader)
@@ -52,7 +52,7 @@ def test_parallel_reader_replacement_scans(self, duckdb_cursor):
5252
format="parquet",
5353
)
5454

55-
batches = [r for r in userdata_parquet_dataset.to_batches()]
55+
batches = list(userdata_parquet_dataset.to_batches())
5656
reader = pyarrow.dataset.Scanner.from_batches(batches, schema=userdata_parquet_dataset.schema).to_reader() # noqa: F841
5757

5858
assert (
@@ -83,7 +83,7 @@ def test_parallel_reader_register(self, duckdb_cursor):
8383
format="parquet",
8484
)
8585

86-
batches = [r for r in userdata_parquet_dataset.to_batches()]
86+
batches = list(userdata_parquet_dataset.to_batches())
8787
reader = pyarrow.dataset.Scanner.from_batches(batches, schema=userdata_parquet_dataset.schema).to_reader()
8888

8989
duckdb_conn.register("bla", reader)
@@ -109,7 +109,7 @@ def test_parallel_reader_default_conn(self, duckdb_cursor):
109109
format="parquet",
110110
)
111111

112-
batches = [r for r in userdata_parquet_dataset.to_batches()]
112+
batches = list(userdata_parquet_dataset.to_batches())
113113
reader = pyarrow.dataset.Scanner.from_batches(batches, schema=userdata_parquet_dataset.schema).to_reader()
114114

115115
rel = duckdb.from_arrow(reader)

tests/fast/arrow/test_arrow_run_end_encoding.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ def test_arrow_ree_struct(self, duckdb_cursor):
307307
iterables = [x.iterchunks() for x in columns]
308308
zipped = zip(*iterables)
309309

310-
structured_chunks = [pa.StructArray.from_arrays([y for y in x], names=names) for x in zipped]
310+
structured_chunks = [pa.StructArray.from_arrays(list(x), names=names) for x in zipped]
311311
structured = pa.chunked_array(structured_chunks)
312312

313313
arrow_tbl = pa.Table.from_arrays([structured], names=["ree"]) # noqa: F841
@@ -453,7 +453,7 @@ def test_arrow_ree_dictionary(self, duckdb_cursor):
453453
for chunk in columns[0].iterchunks():
454454
ree = chunk
455455
chunk_length = len(ree)
456-
offsets = [i for i in reversed(range(chunk_length))]
456+
offsets = list(reversed(range(chunk_length)))
457457

458458
new_array = pa.DictionaryArray.from_arrays(indices=offsets, dictionary=ree)
459459
structured_chunks.append(new_array)

tests/fast/pandas/test_df_analyze.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def test_reset_analyze_sample_setting(self, duckdb_cursor):
6868
@pytest.mark.parametrize("pandas", [NumpyPandas(), ArrowPandas()])
6969
def test_10750(self, duckdb_cursor, pandas):
7070
max_row_number = 2000
71-
data = {"id": [i for i in range(max_row_number + 1)], "content": [None for _ in range(max_row_number + 1)]}
71+
data = {"id": list(range(max_row_number + 1)), "content": [None for _ in range(max_row_number + 1)]}
7272

7373
pdf = pandas.DataFrame(data=data)
7474
duckdb_cursor.register("content", pdf)

tests/fast/pandas/test_pandas_na.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def test_pandas_na(self, duckdb_cursor):
4141
null_index = 3
4242
df = pd.DataFrame(pd.Series([3, 1, 2, pd.NA, 8, 6])) # noqa: F841
4343
res = duckdb_cursor.execute("select * from df").fetchall()
44-
items = [x[0] for x in [y for y in res]]
44+
items = [x[0] for x in list(res)]
4545
assert_nullness(items, [null_index])
4646

4747
# Test if pd.NA behaves the same as np.nan once converted
@@ -84,5 +84,5 @@ def test_pandas_na(self, duckdb_cursor):
8484
na_string_df = pd.DataFrame({"a": [str(pd.NA), str(pd.NA), pd.NA, str(pd.NA), pd.NA, pd.NA, pd.NA, str(pd.NA)]}) # noqa: F841
8585
null_indices = [2, 4, 5, 6]
8686
res = duckdb_cursor.execute("select * from na_string_df").fetchall()
87-
items = [x[0] for x in [y for y in res]]
87+
items = [x[0] for x in list(res)]
8888
assert_nullness(items, null_indices)

tests/slow/test_h2oai_arrow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ def arrow_dataset_register():
206206
connect=3, # try 3 times before giving up on connection errors
207207
read=3, # try 3 times before giving up on read errors
208208
status=3, # try 3 times before giving up on status errors (see forcelist below)
209-
status_forcelist=[429] + [status for status in range(500, 512)],
209+
status_forcelist=[429] + list(range(500, 512)),
210210
other=0, # whatever else may cause an error should break
211211
backoff_factor=0.1, # [0.0s, 0.2s, 0.4s]
212212
raise_on_redirect=True, # raise exception when redirect error retries are exhausted

0 commit comments

Comments
 (0)