@@ -1776,37 +1776,3 @@ def test_write_optional_list(session_catalog: Catalog) -> None:
1776
1776
session_catalog .load_table (identifier ).append (df_2 )
1777
1777
1778
1778
assert len (session_catalog .load_table (identifier ).scan ().to_arrow ()) == 4
1779
-
1780
-
1781
- @pytest .mark .integration
1782
- @pytest .mark .parametrize ("format_version" , [1 , 2 ])
1783
- def test_evolve_and_write (
1784
- spark : SparkSession , session_catalog : Catalog , arrow_table_with_null : pa .Table , format_version : int
1785
- ) -> None :
1786
- identifier = "default.test_evolve_and_write"
1787
- tbl = _create_table (session_catalog , identifier , properties = {"format-version" : format_version }, schema = Schema ())
1788
- other_table = session_catalog .load_table (identifier )
1789
-
1790
- numbers = pa .array ([1 , 2 , 3 , 4 ], type = pa .int32 ())
1791
-
1792
- with tbl .update_schema () as upd :
1793
- # This is not known by other_table
1794
- upd .add_column ("id" , IntegerType ())
1795
-
1796
- with other_table .transaction () as tx :
1797
- # Refreshes the underlying metadata, and the schema
1798
- other_table .refresh ()
1799
- tx .append (
1800
- pa .Table .from_arrays (
1801
- [
1802
- numbers ,
1803
- ],
1804
- schema = pa .schema (
1805
- [
1806
- pa .field ("id" , pa .int32 (), nullable = True ),
1807
- ]
1808
- ),
1809
- )
1810
- )
1811
-
1812
- assert session_catalog .load_table (identifier ).scan ().to_arrow ().column (0 ).combine_chunks () == numbers
0 commit comments