Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions doc/source/whatsnew/v2.2.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ Deprecations
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_json` except ``path_or_buf``. (:issue:`54229`)
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_latex` except ``buf``. (:issue:`54229`)
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_markdown` except ``buf``. (:issue:`54229`)
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_parquet` except ``path``. (:issue:`54229`)
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_pickle` except ``path``. (:issue:`54229`)
- Deprecated allowing non-keyword arguments in :meth:`DataFrame.to_string` except ``buf``. (:issue:`54229`)
- Deprecated not passing a tuple to :class:`DataFrameGroupBy.get_group` or :class:`SeriesGroupBy.get_group` when grouping by a length-1 list-like (:issue:`25971`)
Expand Down
3 changes: 3 additions & 0 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2878,6 +2878,9 @@ def to_parquet(
) -> None:
...

@deprecate_nonkeyword_arguments(
version="3.0", allowed_args=["self", "path"], name="to_parquet"
)
@doc(storage_options=_shared_docs["storage_options"])
def to_parquet(
self,
Expand Down
18 changes: 15 additions & 3 deletions pandas/tests/io/test_parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,18 @@ def test_cross_engine_fp_pa(df_cross_compat, pa, fp):
tm.assert_frame_equal(result, df[["a", "d"]])


def test_parquet_pos_args_deprecation():
# GH-54229
df = pd.DataFrame({"a": [1, 2, 3]})
msg = (
r"Starting with pandas version 3.0 all arguments of to_parquet except for the "
r"argument 'path' will be keyword-only."
)
with tm.assert_produces_warning(FutureWarning, match=msg, check_stacklevel=False):
buffer = BytesIO()
df.to_parquet(buffer, "auto")


class Base:
def check_error_on_write(self, df, engine, exc, err_msg):
# check that we are raising the exception on writing
Expand Down Expand Up @@ -998,7 +1010,7 @@ def test_filter_row_groups(self, pa):
pytest.importorskip("pyarrow")
df = pd.DataFrame({"a": list(range(0, 3))})
with tm.ensure_clean() as path:
df.to_parquet(path, pa)
df.to_parquet(path, engine=pa)
result = read_parquet(
path, pa, filters=[("a", "==", 0)], use_legacy_dataset=False
)
Expand All @@ -1011,7 +1023,7 @@ def test_read_parquet_manager(self, pa, using_array_manager):
)

with tm.ensure_clean() as path:
df.to_parquet(path, pa)
df.to_parquet(path, engine=pa)
result = read_parquet(path, pa)
if using_array_manager:
assert isinstance(result._mgr, pd.core.internals.ArrayManager)
Expand Down Expand Up @@ -1177,7 +1189,7 @@ def test_filter_row_groups(self, fp):
d = {"a": list(range(0, 3))}
df = pd.DataFrame(d)
with tm.ensure_clean() as path:
df.to_parquet(path, fp, compression=None, row_group_offsets=1)
df.to_parquet(path, engine=fp, compression=None, row_group_offsets=1)
result = read_parquet(path, fp, filters=[("a", "==", 0)])
assert len(result) == 1

Expand Down