|
36 | 36 | from pyiceberg.catalog.hive import HiveCatalog
|
37 | 37 | from pyiceberg.catalog.sql import SqlCatalog
|
38 | 38 | from pyiceberg.exceptions import NoSuchTableError
|
39 |
| -<<<<<<< HEAD |
40 | 39 | from pyiceberg.io.pyarrow import _dataframe_to_data_files
|
41 |
| -from pyiceberg.table import TableProperties |
42 |
| -======= |
43 | 40 | from pyiceberg.partitioning import PartitionField, PartitionSpec
|
44 | 41 | from pyiceberg.schema import Schema
|
45 |
| -from pyiceberg.table import TableProperties, _dataframe_to_data_files |
| 42 | +from pyiceberg.table import TableProperties |
46 | 43 | from pyiceberg.transforms import IdentityTransform
|
47 | 44 | from pyiceberg.types import IntegerType, NestedField
|
48 |
| -from tests.conftest import TEST_DATA_WITH_NULL |
49 |
| ->>>>>>> aa361d1485f4a914bc0bbc2e574becaec9a773ac |
50 | 45 | from utils import _create_table
|
51 | 46 |
|
52 | 47 |
|
@@ -194,7 +189,7 @@ def test_summaries(spark: SparkSession, session_catalog: Catalog, arrow_table_wi
|
194 | 189 | ).collect()
|
195 | 190 |
|
196 | 191 | operations = [row.operation for row in rows]
|
197 |
| - assert operations == ['append', 'append', 'delete', 'overwrite'] |
| 192 | + assert operations == ['append', 'append', 'delete', 'append'] |
198 | 193 |
|
199 | 194 | summaries = [row.summary for row in rows]
|
200 | 195 |
|
@@ -269,9 +264,9 @@ def test_data_files(spark: SparkSession, session_catalog: Catalog, arrow_table_w
|
269 | 264 | """
|
270 | 265 | ).collect()
|
271 | 266 |
|
272 |
| - assert [row.added_data_files_count for row in rows] == [1, 0, 1, 0, 1, 1] |
273 |
| - assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0, 0] |
274 |
| - assert [row.deleted_data_files_count for row in rows] == [0, 1, 0, 1, 0, 0] |
| 267 | + assert [row.added_data_files_count for row in rows] == [1, 0, 1, 1, 1] |
| 268 | + assert [row.existing_data_files_count for row in rows] == [0, 0, 0, 0, 0] |
| 269 | + assert [row.deleted_data_files_count for row in rows] == [0, 1, 0, 0, 0] |
275 | 270 |
|
276 | 271 |
|
277 | 272 | @pytest.mark.integration
|
@@ -522,7 +517,7 @@ def test_summaries_with_only_nulls(
|
522 | 517 | ).collect()
|
523 | 518 |
|
524 | 519 | operations = [row.operation for row in rows]
|
525 |
| - assert operations == ['append', 'append', 'delete', 'overwrite'] |
| 520 | + assert operations == ['append', 'append', 'delete', 'append'] |
526 | 521 |
|
527 | 522 | summaries = [row.summary for row in rows]
|
528 | 523 |
|
@@ -788,7 +783,7 @@ def test_inspect_snapshots(
|
788 | 783 | assert df['parent_id'][0].as_py() is None
|
789 | 784 | assert df['parent_id'][1:].to_pylist() == df['snapshot_id'][:-1].to_pylist()
|
790 | 785 |
|
791 |
| - assert [operation.as_py() for operation in df['operation']] == ['append', 'delete', 'overwrite', 'append'] |
| 786 | + assert [operation.as_py() for operation in df['operation']] == ['append', 'delete', 'append', 'append'] |
792 | 787 |
|
793 | 788 | for manifest_list in df['manifest_list']:
|
794 | 789 | assert manifest_list.as_py().startswith("s3://")
|
|
0 commit comments