File tree Expand file tree Collapse file tree 2 files changed +10
-9
lines changed
src/query_farm_airport_test_server Expand file tree Collapse file tree 2 files changed +10
-9
lines changed Original file line number Diff line number Diff line change @@ -179,25 +179,23 @@ def custom_sorted(values: Sequence[Any]) -> list[Any]:
179179# Require statement will ensure this test is run with this extension loaded
180180require airport
181181
182- require-env AIRPORT_TEST_SERVER_AVAILABLE
183-
184182# Create the initial secret, the token value doesn't matter.
185183statement ok
186184CREATE SECRET airport_testing (
187185 type airport,
188186 auth_token uuid(),
189- scope 'grpc://localhost:50003 /');
187+ scope 'grpc+tls ://airport-ci.query.farm /');
190188
191189# Reset the test server
192190statement ok
193- CALL airport_action('grpc://localhost:50003 /', 'reset');
191+ CALL airport_action('grpc+tls ://airport-ci.query.farm /', 'reset');
194192
195193# Create the initial database
196194statement ok
197- CALL airport_action('grpc://localhost:50003 /', 'create_database', 'test1');
195+ CALL airport_action('grpc+tls ://airport-ci.query.farm /', 'create_database', 'test1');
198196
199197statement ok
200- ATTACH 'test1' (TYPE AIRPORT, location 'grpc://localhost:50003 /');
198+ ATTACH 'test1' (TYPE AIRPORT, location 'grpc+tls ://airport-ci.query.farm /');
201199""" ,
202200 file = f ,
203201 )
@@ -251,7 +249,7 @@ def custom_sorted(values: Sequence[Any]) -> list[Any]:
251249 print (
252250 """# Reset the test server
253251 statement ok
254- CALL airport_action('grpc://localhost:50003 /', 'reset');
252+ CALL airport_action('grpc+tls ://airport-ci.query.farm /', 'reset');
255253 """ ,
256254 file = f ,
257255 )
Original file line number Diff line number Diff line change @@ -875,10 +875,13 @@ def in_out_handler(
875875 if input_chunk is None :
876876 break
877877
878- assert parameters .parameters
878+ assert parameters .parameters is not None
879+ parameter_value = parameters .parameters .column (0 ).to_pylist ()[0 ]
880+
881+ # Since input chunks could be different sizes, standardize it.
879882 result = pa .RecordBatch .from_arrays (
880883 [
881- parameters . parameters . column ( 0 ),
884+ [ parameter_value ] * len ( input_chunk ),
882885 input_chunk .column (0 ),
883886 ],
884887 schema = output_schema ,
You can’t perform that action at this time.
0 commit comments