Skip to content

Commit 09a4266

Browse files
committed
fix test
1 parent 062b835 commit 09a4266

File tree

1 file changed

+7
-7
lines changed
  • tests/integration/test_storage_iceberg

1 file changed

+7
-7
lines changed

tests/integration/test_storage_iceberg/test.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3945,9 +3945,9 @@ def test_iceberg_write_minmax(started_cluster):
39453945
@pytest.mark.parametrize("storage_type", ["s3", "azure"])
39463946
@pytest.mark.parametrize("cluster_table_function_buckets_batch_size", [0, 100, 1000])
39473947
@pytest.mark.parametrize("input_format_parquet_use_native_reader_v3", [0, 1])
3948-
def test_cluster_table_function_split_by_row_groups(started_cluster_iceberg_with_spark, format_version, storage_type, cluster_table_function_buckets_batch_size,input_format_parquet_use_native_reader_v3):
3949-
instance = started_cluster_iceberg_with_spark.instances["node1"]
3950-
spark = started_cluster_iceberg_with_spark.spark_session
3948+
def test_cluster_table_function_split_by_row_groups(started_cluster, format_version, storage_type, cluster_table_function_buckets_batch_size,input_format_parquet_use_native_reader_v3):
3949+
instance = started_cluster.instances["node1"]
3950+
spark = started_cluster.spark_session
39513951

39523952
TABLE_NAME = (
39533953
"test_iceberg_cluster_"
@@ -3968,7 +3968,7 @@ def add_df(mode):
39683968
)
39693969

39703970
files = default_upload_directory(
3971-
started_cluster_iceberg_with_spark,
3971+
started_cluster,
39723972
storage_type,
39733973
f"/iceberg_data/default/{TABLE_NAME}/",
39743974
f"/iceberg_data/default/{TABLE_NAME}/",
@@ -3979,15 +3979,15 @@ def add_df(mode):
39793979
return files
39803980

39813981
files = add_df(mode="overwrite")
3982-
for i in range(1, 5 * len(started_cluster_iceberg_with_spark.instances)):
3982+
for i in range(1, 5 * len(started_cluster.instances)):
39833983
files = add_df(mode="append")
39843984

39853985
clusters = instance.query(f"SELECT * FROM system.clusters")
39863986
logging.info(f"Clusters setup: {clusters}")
39873987

39883988
# Regular Query only node1
39893989
table_function_expr = get_creation_expression(
3990-
storage_type, TABLE_NAME, started_cluster_iceberg_with_spark, table_function=True
3990+
storage_type, TABLE_NAME, started_cluster, table_function=True
39913991
)
39923992
select_regular = (
39933993
instance.query(f"SELECT * FROM {table_function_expr} ORDER BY ALL").strip().split()
@@ -3997,7 +3997,7 @@ def add_df(mode):
39973997
table_function_expr_cluster = get_creation_expression(
39983998
storage_type,
39993999
TABLE_NAME,
4000-
started_cluster_iceberg_with_spark,
4000+
started_cluster,
40014001
table_function=True,
40024002
run_on_cluster=True,
40034003
)

0 commit comments

Comments
 (0)