Skip to content

Commit 509713b

Browse files
authored
Remove _task_to_table (#1643)
Seems not being used. Less is more! Noticed this while reviewing #1388
1 parent 4ff1558 commit 509713b

File tree

1 file changed

+0
-31
lines changed

1 file changed

+0
-31
lines changed

pyiceberg/io/pyarrow.py

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1432,37 +1432,6 @@ def _task_to_record_batches(
14321432
yield result_batch
14331433

14341434

1435-
def _task_to_table(
1436-
fs: FileSystem,
1437-
task: FileScanTask,
1438-
bound_row_filter: BooleanExpression,
1439-
projected_schema: Schema,
1440-
projected_field_ids: Set[int],
1441-
positional_deletes: Optional[List[ChunkedArray]],
1442-
case_sensitive: bool,
1443-
name_mapping: Optional[NameMapping] = None,
1444-
use_large_types: bool = True,
1445-
) -> Optional[pa.Table]:
1446-
batches = list(
1447-
_task_to_record_batches(
1448-
fs,
1449-
task,
1450-
bound_row_filter,
1451-
projected_schema,
1452-
projected_field_ids,
1453-
positional_deletes,
1454-
case_sensitive,
1455-
name_mapping,
1456-
use_large_types,
1457-
)
1458-
)
1459-
1460-
if len(batches) > 0:
1461-
return pa.Table.from_batches(batches)
1462-
else:
1463-
return None
1464-
1465-
14661435
def _read_all_delete_files(io: FileIO, tasks: Iterable[FileScanTask]) -> Dict[str, List[ChunkedArray]]:
14671436
deletes_per_file: Dict[str, List[ChunkedArray]] = {}
14681437
unique_deletes = set(itertools.chain.from_iterable([task.delete_files for task in tasks]))

0 commit comments

Comments
 (0)