@@ -94,7 +94,7 @@ def query_datasource(self, url, sql, expected, iterations):
9494Query tasks
9595===========""" )
9696tasks = druid .get_tasks (
97- url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default.{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/tasks" ,
97+ url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default-metrics .{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/tasks" ,
9898)
9999task_count = len (json .loads (tasks ))
100100print (f"existing tasks: { task_count } " )
@@ -103,7 +103,7 @@ def query_datasource(self, url, sql, expected, iterations):
103103Start ingestion task
104104====================""" )
105105ingestion = druid .post_task (
106- url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default.{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/task" ,
106+ url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default-metrics .{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/task" ,
107107 input = "/tmp/druid-quickstartimport.json" ,
108108)
109109task_id = json .loads (ingestion )["task" ]
@@ -113,11 +113,11 @@ def query_datasource(self, url, sql, expected, iterations):
113113Re-query tasks
114114==============""" )
115115tasks = druid .get_tasks (
116- url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default.{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/tasks" ,
116+ url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default-metrics .{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/tasks" ,
117117)
118118new_task_count = len (json .loads (tasks ))
119119print (f"new tasks: { new_task_count } " )
120- print (f"assert { new_task_count } == { task_count + 1 } " )
120+ print (f"assert { new_task_count } == { task_count + 1 } " )
121121assert new_task_count == task_count + 1
122122
123123print ("""
@@ -127,13 +127,13 @@ def query_datasource(self, url, sql, expected, iterations):
127127while not job_finished :
128128 time .sleep (5 )
129129 task = druid .get (
130- url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default.{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/task/{ url_encoded_taskid } /status" ,
130+ url = f"{ protocol } ://{ druid_cluster_name } -coordinator-default-0.{ druid_cluster_name } -coordinator-default-metrics .{ namespace } .svc.cluster.local:{ coordinator_port } /druid/indexer/v1/task/{ url_encoded_taskid } /status" ,
131131 )
132132 task_status = json .loads (task )["status" ]["statusCode" ]
133133 print (f"Current task status: [{ task_status } ]" )
134- assert (
135- task_status == "RUNNING" or task_status == "SUCCESS "
136- ), f"Taskstatus not running or succeeeded: { task_status } "
134+ assert task_status == "RUNNING" or task_status == "SUCCESS" , (
135+ f"Taskstatus not running or succeeeded: { task_status } "
136+ )
137137 job_finished = task_status == "SUCCESS"
138138
139139print ("""
@@ -143,7 +143,7 @@ def query_datasource(self, url, sql, expected, iterations):
143143while not broker_ready :
144144 time .sleep (2 )
145145 broker_ready_rc = druid .check_rc (
146- f"{ protocol } ://{ druid_cluster_name } -broker-default-0.{ druid_cluster_name } -broker-default.{ namespace } .svc.cluster.local:{ broker_port } /druid/broker/v1/readiness"
146+ f"{ protocol } ://{ druid_cluster_name } -broker-default-0.{ druid_cluster_name } -broker-default-metrics .{ namespace } .svc.cluster.local:{ broker_port } /druid/broker/v1/readiness"
147147 )
148148 broker_ready = broker_ready_rc == 200
149149 print (f"Broker respondend with [{ broker_ready_rc } ] to readiness check" )
@@ -153,7 +153,7 @@ def query_datasource(self, url, sql, expected, iterations):
153153==============""" )
154154sample_data_size = 39244
155155result = druid .query_datasource (
156- url = f"{ protocol } ://{ druid_cluster_name } -broker-default-0.{ druid_cluster_name } -broker-default.{ namespace } .svc.cluster.local:{ broker_port } /druid/v2/sql" ,
156+ url = f"{ protocol } ://{ druid_cluster_name } -broker-default-0.{ druid_cluster_name } -broker-default-metrics .{ namespace } .svc.cluster.local:{ broker_port } /druid/v2/sql" ,
157157 sql = {"query" : 'select count(*) as c from "wikipedia-2015-09-12"' },
158158 expected = sample_data_size ,
159159 iterations = 12 ,
0 commit comments