@@ -109,7 +109,7 @@ public void testDownsamplingPassthroughDimensions() throws Exception {
109109 assertDownsampleIndexFieldsAndDimensions (sourceIndex , targetIndex , downsampleConfig );
110110 }
111111
112- public void testEsqlTSAfterDownsampling () throws Exception {
112+ public void testAggMetricInEsqlTSAfterDownsampling () throws Exception {
113113 String dataStreamName = "metrics-foo" ;
114114 Settings settings = Settings .builder ().put ("mode" , "time_series" ).putList ("routing_path" , List .of ("host" , "cluster" )).build ();
115115 putTSDBIndexTemplate ("my-template" , List .of ("metrics-foo" ), settings , """
@@ -198,7 +198,8 @@ public void testEsqlTSAfterDownsampling() throws Exception {
198198 );
199199 assertAcked (client ().execute (TransportDeleteIndexAction .TYPE , new DeleteIndexRequest (sourceIndex )).actionGet ());
200200
201- // index to the next backing index
201+ // index to the next backing index; random time between 31 and 59m in the future to because default look_ahead_time is 30m and we
202+ // don't want to conflict with the previous backing index
202203 Supplier <XContentBuilder > nextSourceSupplier = () -> {
203204 String ts = randomDateForRange (now .plusSeconds (60 * 31 ).toEpochMilli (), now .plusSeconds (60 * 59 ).toEpochMilli ());
204205 try {
@@ -231,30 +232,29 @@ public void testEsqlTSAfterDownsampling() throws Exception {
231232 )
232233 )
233234 );
234-
235235 }
236- // test that implicit casting within time aggregation query works
237- try (
238- var resp = esqlCommand (
239- "TS "
240- + dataStreamName
241- + " | STATS min = sum(min_over_time(cpu)), max = sum(max_over_time(cpu)) by cluster, bucket(@timestamp, 1 hour)"
242- )
243- ) {
244- var columns = resp . columns ( );
245- assertThat (columns , hasSize ( 4 ));
246- assertThat (
247- resp . columns (),
248- equalTo (
249- List . of (
250- new ColumnInfoImpl ("min " , "double " , null ),
251- new ColumnInfoImpl ("max " , "double " , null ),
252- new ColumnInfoImpl ( "cluster" , "keyword" , null ),
253- new ColumnInfoImpl ( "bucket(@timestamp, 1 hour)" , "date" , null )
254- )
255- )
256- );
257- // TODO: verify the numbers are accurate
236+
237+ // test _over_time commands with implicit casting of aggregate_metric_double
238+ for ( String innerCommand : List . of ( "min_over_time" , "max_over_time" , "avg_over_time" , "count_over_time" )) {
239+ for ( String outerCommand : List . of ( "min" , "max" , "sum" , "count" )) {
240+ String command = outerCommand + " (" + innerCommand + "(cpu))" ;
241+ String expectedType = innerCommand . equals ( "count_over_time" ) || outerCommand . equals ( "count" ) ? "long" : "double" ;
242+ try ( var resp = esqlCommand ( "TS " + dataStreamName + " | STATS " + command + " by cluster, bucket(@timestamp, 1 hour)" )) {
243+ var columns = resp . columns ();
244+ assertThat ( columns , hasSize ( 3 ) );
245+ assertThat (
246+ resp . columns (),
247+ equalTo (
248+ List . of (
249+ new ColumnInfoImpl ( command , expectedType , null ),
250+ new ColumnInfoImpl ("cluster " , "keyword " , null ),
251+ new ColumnInfoImpl ("bucket(@timestamp, 1 hour) " , "date " , null )
252+ )
253+ )
254+ );
255+ // TODO: verify the numbers are accurate
256+ }
257+ }
258258 }
259259 }
260260
0 commit comments