Skip to content

Commit e993c7d

Browse files
Add unit test for dtype backend use for test_s3_read_parquet_table (#2307)
1 parent 9029a49 commit e993c7d

File tree

2 files changed

+12
-1
lines changed

2 files changed

+12
-1
lines changed

awswrangler/s3/_read_parquet.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -780,7 +780,9 @@ def read_parquet_table(
780780
)
781781

782782
partial_cast_function = functools.partial(
783-
_data_types.cast_pandas_with_athena_types, dtype=_extract_partitions_dtypes_from_table_details(response=res)
783+
_data_types.cast_pandas_with_athena_types,
784+
dtype=_extract_partitions_dtypes_from_table_details(response=res),
785+
dtype_backend=dtype_backend,
784786
)
785787
if _utils.is_pandas_frame(df):
786788
return partial_cast_function(df)

tests/unit/test_pandas_pyarrow_dtype_backend.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,15 @@ def test_s3_read_parquet(path: str) -> None:
2626
assert_pandas_equals(df, df2)
2727

2828

29+
def test_s3_read_parquet_table(path: str, glue_database: str, glue_table: str) -> None:
30+
df = get_df_dtype_backend(dtype_backend="pyarrow")
31+
wr.s3.to_parquet(df=df, path=path, dataset=True, database=glue_database, table=glue_table)
32+
33+
df2 = wr.s3.read_parquet_table(database=glue_database, table=glue_table, dtype_backend="pyarrow")
34+
35+
assert_pandas_equals(df, df2)
36+
37+
2938
def test_s3_read_csv(path: str) -> None:
3039
df = pd.DataFrame({"id": [1, 2, 3], "val": ["foo", "boo", "bar"]})
3140
wr.s3.to_csv(df=df, path=f"{path}.csv", index=False)

0 commit comments

Comments
 (0)