Skip to content

Commit 36ae44d

Browse files
committed
lint
1 parent 497e0d1 commit 36ae44d

File tree

8 files changed

+43
-31
lines changed

8 files changed

+43
-31
lines changed

docs/source/developers/python/development.rst

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ Unit Testing
4242
============
4343

4444
We are using `pytest <https://docs.pytest.org/en/latest/>`_ to develop our unit
45-
test suite. After `building the project <build_pyarrow>`_ you can run its unit tests
45+
test suite. After `building the project <building.html>`_ you can run its unit tests
4646
like so:
4747

4848
.. code-block::
@@ -111,7 +111,7 @@ included in the distributed wheel packages.
111111
Running Type Checkers
112112
---------------------
113113

114-
We support multiple type checkers. The configuration for each is in
114+
We support multiple type checkers. Their configurations are in
115115
``pyproject.toml``.
116116

117117
**mypy**
@@ -157,19 +157,17 @@ When adding or modifying public APIs:
157157
to reflect the new or changed function/class signatures.
158158

159159
2. **Include type annotations** where possible. For Cython modules or
160-
dynamically, generated APIs such as compute kernels add the corresponding
160+
dynamically generated APIs such as compute kernels add the corresponding
161161
stub in ``pyarrow-stubs/``.
162162

163163
3. **Run type checkers** to ensure the stubs are correct and complete.
164164

165-
The stub files are automatically copied to the built wheel during the build
165+
The stub files are automatically copied into the built wheel during the build
166166
process and will be included when users install PyArrow, enabling type checking
167-
in downstream projects.
167+
in downstream projects and for users' IDEs.
168168

169-
.. note::
170-
171-
The ``py.typed`` marker file in the ``pyarrow/`` directory indicates to type
172-
checkers that PyArrow supports type checking according to :pep:`561`.
169+
Note: ``py.typed`` marker file in the ``pyarrow/`` directory indicates to type
170+
checkers that PyArrow supports type checking according to :pep:`561`.
173171

174172
Doctest
175173
=======

python/pyarrow/parquet/core.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1413,7 +1413,9 @@ def __init__(self, path_or_paths, filesystem=None, schema=None, *, filters=None,
14131413
else:
14141414
single_file = path_or_paths
14151415

1416-
parquet_format = ds.ParquetFileFormat(**read_options) # type: ignore[invalid-argument-type]
1416+
parquet_format = ds.ParquetFileFormat(
1417+
**read_options # type: ignore[invalid-argument-type]
1418+
)
14171419

14181420
if single_file is not None:
14191421
fragment = parquet_format.make_fragment(single_file, filesystem)

python/pyarrow/tests/parquet/test_pandas.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -571,15 +571,18 @@ def test_write_to_dataset_pandas_preserve_extensiondtypes(tempdir):
571571
table, str(tempdir / "case1"), partition_cols=['part'],
572572
)
573573
result = pq.read_table(str(tempdir / "case1")).to_pandas()
574-
tm.assert_frame_equal(cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
574+
tm.assert_frame_equal(
575+
cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
575576

576577
pq.write_to_dataset(table, str(tempdir / "case2"))
577578
result = pq.read_table(str(tempdir / "case2")).to_pandas()
578-
tm.assert_frame_equal(cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
579+
tm.assert_frame_equal(
580+
cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
579581

580582
pq.write_table(table, str(tempdir / "data.parquet"))
581583
result = pq.read_table(str(tempdir / "data.parquet")).to_pandas()
582-
tm.assert_frame_equal(cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
584+
tm.assert_frame_equal(
585+
cast(pd.DataFrame, result[["col"]]), cast(pd.DataFrame, df[["col"]]))
583586

584587

585588
@pytest.mark.pandas

python/pyarrow/tests/parquet/test_parquet_file.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -262,9 +262,10 @@ def get_all_batches(f):
262262

263263
tm.assert_frame_equal(
264264
batches[batch_no].to_pandas().reset_index(drop=True),
265-
file_.read_row_groups([i]).to_pandas().iloc[900:].reset_index( # type: ignore[arg-type]
266-
drop=True
267-
)
265+
file_
266+
.read_row_groups([i])
267+
.to_pandas().iloc[900:]
268+
.reset_index(drop=True) # type: ignore[arg-type]
268269
)
269270

270271
batch_no += 1

python/pyarrow/tests/test_acero.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -269,20 +269,23 @@ def test_order_by():
269269
table = pa.table({'a': [1, 2, 3, 4], 'b': [1, 3, None, 2]})
270270
table_source = Declaration("table_source", TableSourceNodeOptions(table))
271271

272-
sort_keys: list[tuple[str, Literal["ascending", "descending"]]] = [("b", "ascending")]
272+
sort_keys = [("b", "ascending")]
273+
sort_keys = cast(list[tuple[str, Literal["ascending", "descending"]]], sort_keys)
273274
ord_opts = OrderByNodeOptions(sort_keys)
274275
decl = Declaration.from_sequence([table_source, Declaration("order_by", ord_opts)])
275276
result = decl.to_table()
276277
expected = pa.table({"a": [1, 4, 2, 3], "b": [1, 2, 3, None]})
277278
assert result.equals(expected)
278279

279-
ord_opts = OrderByNodeOptions([(field("b"), "descending")]) # type: ignore[arg-type]
280+
ord_opts = OrderByNodeOptions(
281+
[(field("b"), "descending")]) # type: ignore[arg-type]
280282
decl = Declaration.from_sequence([table_source, Declaration("order_by", ord_opts)])
281283
result = decl.to_table()
282284
expected = pa.table({"a": [2, 4, 1, 3], "b": [3, 2, 1, None]})
283285
assert result.equals(expected)
284286

285-
ord_opts = OrderByNodeOptions([(1, "descending")], null_placement="at_start") # type: ignore[arg-type]
287+
ord_opts = OrderByNodeOptions(
288+
[(1, "descending")], null_placement="at_start") # type: ignore[arg-type]
286289
decl = Declaration.from_sequence([table_source, Declaration("order_by", ord_opts)])
287290
result = decl.to_table()
288291
expected = pa.table({"a": [3, 2, 4, 1], "b": [None, 3, 2, 1]})

python/pyarrow/tests/test_array.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1192,15 +1192,15 @@ def test_map_from_arrays():
11921192
assert result.equals(expected)
11931193

11941194
# pass in the type explicitly
1195-
result = pa.MapArray.from_arrays(offsets, keys, items, pa.map_( # type: ignore[arg-type]
1196-
keys.type,
1197-
items.type
1198-
))
1195+
result = pa.MapArray.from_arrays(offsets, keys, items, # type: ignore[arg-type]
1196+
pa.map_(keys.type, items.type))
11991197
assert result.equals(expected)
12001198

12011199
# pass in invalid types
12021200
with pytest.raises(pa.ArrowTypeError, match='Expected map type, got string'):
1203-
pa.MapArray.from_arrays(offsets, keys, items, pa.string()) # type: ignore[arg-type]
1201+
pa.MapArray.from_arrays(
1202+
offsets, keys, items, pa.string() # type: ignore[arg-type]
1203+
)
12041204

12051205
with pytest.raises(pa.ArrowTypeError, match='Mismatching map items type'):
12061206
pa.MapArray.from_arrays(offsets, keys, items, pa.map_( # type: ignore[arg-type]

python/pyarrow/tests/test_fs.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1440,20 +1440,23 @@ def test_s3_proxy_options(monkeypatch, pickle_module):
14401440
S3FileSystem(proxy_options=('http', 'localhost', 9090))
14411441
# Missing scheme
14421442
with pytest.raises(KeyError):
1443-
S3FileSystem(proxy_options={'host': 'localhost', 'port': 9090}) # type: ignore[missing-typed-dict-key]
1443+
S3FileSystem(proxy_options={ # type: ignore[missing-typed-dict-key]
1444+
'host': 'localhost', 'port': 9090})
14441445
# Missing host
14451446
with pytest.raises(KeyError):
1446-
S3FileSystem(proxy_options={'scheme': 'https', 'port': 9090}) # type: ignore[missing-typed-dict-key]
1447+
S3FileSystem(proxy_options={ # type: ignore[missing-typed-dict-key]
1448+
'scheme': 'https', 'port': 9090})
14471449
# Missing port
14481450
with pytest.raises(KeyError):
1449-
S3FileSystem(proxy_options={'scheme': 'http', 'host': 'localhost'}) # type: ignore[missing-typed-dict-key]
1451+
S3FileSystem(proxy_options={ # type: ignore[missing-typed-dict-key]
1452+
'scheme': 'http', 'host': 'localhost'})
14501453
# Invalid proxy URI (invalid scheme httpsB)
14511454
with pytest.raises(pa.ArrowInvalid):
14521455
S3FileSystem(proxy_options='httpsB://localhost:9000')
14531456
# Invalid proxy_options dict (invalid scheme httpA)
14541457
with pytest.raises(pa.ArrowInvalid):
1455-
S3FileSystem(proxy_options={'scheme': 'httpA', 'host': 'localhost', # type: ignore[typeddict-item]
1456-
'port': 8999})
1458+
S3FileSystem(proxy_options={ # type: ignore[typeddict-item]
1459+
'scheme': 'httpA', 'host': 'localhost', 'port': 8999})
14571460

14581461

14591462
@pytest.mark.s3

python/pyarrow/tests/test_ipc.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,8 @@ def test_stream_write_table_batches(stream_fixture):
393393
'one': np.random.randn(20),
394394
})
395395

396-
b1 = pa.RecordBatch.from_pandas(df[:10], preserve_index=False) # type: ignore[arg-type]
396+
b1 = pa.RecordBatch.from_pandas(
397+
df[:10], preserve_index=False) # type: ignore[arg-type]
397398
b2 = pa.RecordBatch.from_pandas(df, preserve_index=False)
398399

399400
table = pa.Table.from_batches([b1, b2, b1])
@@ -976,7 +977,8 @@ def test_batches_with_custom_metadata_roundtrip(ipc_type):
976977

977978
with file_factory(sink, batch.schema) as writer:
978979
for i in range(batch_count):
979-
writer.write_batch(batch, custom_metadata={"batch_id": str(i)}) # type: ignore[arg-type]
980+
writer.write_batch(batch, custom_metadata={ # type: ignore[arg-type]
981+
"batch_id": str(i)})
980982
# write a batch without custom metadata
981983
writer.write_batch(batch)
982984

0 commit comments

Comments
 (0)