Skip to content

Commit ca133a0

Browse files
committed
Linting
1 parent 0e068fe commit ca133a0

File tree

1 file changed

+3
-13
lines changed

1 file changed

+3
-13
lines changed

awswrangler/s3.py

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1688,12 +1688,7 @@ def read_parquet(
16881688
data=data, columns=columns, categories=categories, use_threads=use_threads, validate_schema=validate_schema
16891689
)
16901690
return _read_parquet_chunked(
1691-
data=data,
1692-
columns=columns,
1693-
categories=categories,
1694-
chunked=chunked,
1695-
use_threads=use_threads,
1696-
validate_schema=validate_schema,
1691+
data=data, columns=columns, categories=categories, chunked=chunked, use_threads=use_threads
16971692
)
16981693

16991694

@@ -1728,22 +1723,17 @@ def _read_parquet_chunked(
17281723
data: pyarrow.parquet.ParquetDataset,
17291724
columns: Optional[List[str]] = None,
17301725
categories: List[str] = None,
1731-
validate_schema: bool = True,
17321726
chunked: Union[bool, int] = True,
17331727
use_threads: bool = True,
17341728
) -> Iterator[pd.DataFrame]:
1735-
promote: bool = not validate_schema
17361729
next_slice: Optional[pd.DataFrame] = None
17371730
for piece in data.pieces:
17381731
df: pd.DataFrame = _table2df(
17391732
table=piece.read(
1740-
columns=columns,
1741-
use_threads=use_threads,
1742-
partitions=data.partitions,
1743-
use_pandas_metadata=False
1733+
columns=columns, use_threads=use_threads, partitions=data.partitions, use_pandas_metadata=False
17441734
),
17451735
categories=categories,
1746-
use_threads=use_threads
1736+
use_threads=use_threads,
17471737
)
17481738
if chunked is True:
17491739
yield df

0 commit comments

Comments
 (0)