Skip to content

Commit 7d036b1

Browse files
committed
Fix merge conflicts
1 parent 18392d1 commit 7d036b1

File tree

3 files changed

+9
-18
lines changed

3 files changed

+9
-18
lines changed

pyiceberg/table/__init__.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -476,7 +476,7 @@ def delete(self, delete_filter: Union[str, BooleanExpression], snapshot_properti
476476
self.table_metadata.properties.get(TableProperties.DELETE_MODE, TableProperties.DELETE_MODE_COPY_ON_WRITE)
477477
== TableProperties.DELETE_MODE_MERGE_ON_READ
478478
):
479-
raise NotImplementedError("Merge on read is not yet supported")
479+
warnings.warn("Merge on read is not yet supported, falling back to copy-on-write")
480480

481481
if isinstance(delete_filter, str):
482482
delete_filter = _parse_row_filter(delete_filter)
@@ -1443,7 +1443,6 @@ def overwrite(
14431443
with self.transaction() as tx:
14441444
tx.overwrite(df=df, overwrite_filter=overwrite_filter, snapshot_properties=snapshot_properties)
14451445

1446-
<<<<<<< HEAD
14471446
def delete(
14481447
self, delete_filter: Union[BooleanExpression, str] = ALWAYS_TRUE, snapshot_properties: Dict[str, str] = EMPTY_DICT
14491448
) -> None:
@@ -1457,10 +1456,7 @@ def delete(
14571456
with self.transaction() as tx:
14581457
tx.delete(delete_filter=delete_filter, snapshot_properties=snapshot_properties)
14591458

1460-
def add_files(self, file_paths: List[str]) -> None:
1461-
=======
14621459
def add_files(self, file_paths: List[str], snapshot_properties: Dict[str, str] = EMPTY_DICT) -> None:
1463-
>>>>>>> aa361d1485f4a914bc0bbc2e574becaec9a773ac
14641460
"""
14651461
Shorthand API for adding files as data files to the table.
14661462

tests/integration/test_deletes.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -333,7 +333,7 @@ def test_delete_overwrite(session_catalog: RestCatalog) -> None:
333333
assert [snapshot.summary.operation for snapshot in tbl.snapshots()] == [
334334
Operation.APPEND,
335335
Operation.OVERWRITE,
336-
Operation.OVERWRITE,
336+
Operation.APPEND,
337337
]
338338

339339
assert tbl.scan().to_arrow()['ints'] == [[3, 4], [1]]

tests/integration/test_writes/test_writes.py

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -36,17 +36,12 @@
3636
from pyiceberg.catalog.hive import HiveCatalog
3737
from pyiceberg.catalog.sql import SqlCatalog
3838
from pyiceberg.exceptions import NoSuchTableError
39-
<<<<<<< HEAD
4039
from pyiceberg.io.pyarrow import _dataframe_to_data_files
41-
from pyiceberg.table import TableProperties
42-
=======
4340
from pyiceberg.partitioning import PartitionField, PartitionSpec
4441
from pyiceberg.schema import Schema
45-
from pyiceberg.table import TableProperties, _dataframe_to_data_files
42+
from pyiceberg.table import TableProperties
4643
from pyiceberg.transforms import IdentityTransform
4744
from pyiceberg.types import IntegerType, NestedField
48-
from tests.conftest import TEST_DATA_WITH_NULL
49-
>>>>>>> aa361d1485f4a914bc0bbc2e574becaec9a773ac
5045
from utils import _create_table
5146

5247

@@ -194,7 +189,7 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi
194189
).collect()
195190

196191
operations = [row.operation for row in rows]
197-
assert operations == ['append', 'append', 'delete', 'overwrite']
192+
assert operations == ['append', 'append', 'delete', 'append']
198193

199194
summaries = [row.summary for row in rows]
200195

@@ -269,9 +264,9 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w
269264
"""
270265
).collect()
271266

272-
assert [row.added_data_files_count for row in rows] == [1, 0, 1, 0, 1, 1]
273-
assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0, 0]
274-
assert [row.deleted_data_files_count for row in rows] == [0, 1, 0, 1, 0, 0]
267+
assert [row.added_data_files_count for row in rows] == [1, 0, 1, 1, 1]
268+
assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0]
269+
assert [row.deleted_data_files_count for row in rows] == [0, 1, 0, 0, 0]
275270

276271

277272
@pytest.mark.integration
@@ -522,7 +517,7 @@ def test_summaries_with_only_nulls(
522517
).collect()
523518

524519
operations = [row.operation for row in rows]
525-
assert operations == ['append', 'append', 'delete', 'overwrite']
520+
assert operations == ['append', 'append', 'delete', 'append']
526521

527522
summaries = [row.summary for row in rows]
528523

@@ -788,7 +783,7 @@ def test_inspect_snapshots(
788783
assert df['parent_id'][0].as_py() is None
789784
assert df['parent_id'][1:].to_pylist() == df['snapshot_id'][:-1].to_pylist()
790785

791-
assert [operation.as_py() for operation in df['operation']] == ['append', 'delete', 'overwrite', 'append']
786+
assert [operation.as_py() for operation in df['operation']] == ['append', 'delete', 'append', 'append']
792787

793788
for manifest_list in df['manifest_list']:
794789
assert manifest_list.as_py().startswith("s3://")

0 commit comments

Comments
 (0)