Skip to content

Commit 34e9bcf

Browse files
committed
Adapt test_system_ddl_worker_queue/test.py::test_distributed_ddl_rubbish to 24.8
1 parent 7a49bd3 commit 34e9bcf

File tree

1 file changed

+14
-8
lines changed
  • tests/integration/test_system_ddl_worker_queue

1 file changed

+14
-8
lines changed

tests/integration/test_system_ddl_worker_queue/test.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
import pytest
22
import time
3+
from io import StringIO
4+
import csv
5+
import logging
36

47
from helpers.cluster import ClickHouseCluster
58

@@ -95,10 +98,13 @@ def test_distributed_ddl_rubbish(started_cluster):
9598
settings={"replication_alter_partitions_sync": "2"},
9699
)
97100

98-
zk_content = node1.query(
99-
"SELECT name, value, path FROM system.zookeeper WHERE path LIKE '/clickhouse/task_queue/ddl%' SETTINGS allow_unrestricted_reads_from_keeper=true",
100-
parse=True,
101-
).to_dict("records")
101+
zk_content_raw = node1.query(
102+
"SELECT name, value, path FROM system.zookeeper WHERE path LIKE '/clickhouse/task_queue/ddl%' SETTINGS allow_unrestricted_reads_from_keeper=true FORMAT TabSeparatedWithNames",
103+
# parse=True,
104+
) # .to_dict("records")
105+
106+
dict_reader = csv.DictReader(StringIO(zk_content_raw), delimiter='\t')
107+
zk_content = [row for row in dict_reader]
102108

103109
original_query = ""
104110
new_query = "query-artificial-" + str(time.monotonic_ns())
@@ -150,7 +156,7 @@ def test_distributed_ddl_rubbish(started_cluster):
150156
== 4
151157
)
152158

153-
node1.query(
154-
f"ALTER TABLE testdb.{test_table} ON CLUSTER test_cluster DROP COLUMN somenewcolumn",
155-
settings={"replication_alter_partitions_sync": "2"},
156-
)
159+
# node1.query(
160+
# f"ALTER TABLE testdb.{test_table} ON CLUSTER test_cluster DROP COLUMN somenewcolumn",
161+
# settings={"replication_alter_partitions_sync": "2"},
162+
# )

0 commit comments

Comments
 (0)