@@ -70,7 +70,7 @@ func (d *Distributor) QueryStream(ctx context.Context, from, to model.Time, matc
70
70
}
71
71
72
72
if s := opentracing .SpanFromContext (ctx ); s != nil {
73
- s .LogKV ("chunk-series" , len (result .GetChunkseries ()), "time-series" , len ( result . GetTimeseries ()) )
73
+ s .LogKV ("chunk-series" , len (result .GetChunkseries ()))
74
74
}
75
75
return nil
76
76
})
@@ -253,15 +253,11 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSet ri
253
253
return nil , validation .LimitError (chunkLimitErr .Error ())
254
254
}
255
255
256
- s := make ([][]cortexpb.LabelAdapter , 0 , len (resp .Chunkseries )+ len ( resp . Timeseries ) )
256
+ s := make ([][]cortexpb.LabelAdapter , 0 , len (resp .Chunkseries ))
257
257
for _ , series := range resp .Chunkseries {
258
258
s = append (s , series .Labels )
259
259
}
260
260
261
- for _ , series := range resp .Timeseries {
262
- s = append (s , series .Labels )
263
- }
264
-
265
261
if limitErr := queryLimiter .AddSeries (s ... ); limitErr != nil {
266
262
return nil , validation .LimitError (limitErr .Error ())
267
263
}
@@ -275,7 +271,6 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSet ri
275
271
}
276
272
277
273
result .Chunkseries = append (result .Chunkseries , resp .Chunkseries ... )
278
- result .Timeseries = append (result .Timeseries , resp .Timeseries ... )
279
274
}
280
275
return result , nil
281
276
})
@@ -286,7 +281,6 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSet ri
286
281
span , _ := opentracing .StartSpanFromContext (ctx , "Distributor.MergeIngesterStreams" )
287
282
defer span .Finish ()
288
283
hashToChunkseries := map [string ]ingester_client.TimeSeriesChunk {}
289
- hashToTimeSeries := map [string ]cortexpb.TimeSeries {}
290
284
291
285
for _ , result := range results {
292
286
response := result .(* ingester_client.QueryStreamResponse )
@@ -299,40 +293,23 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, replicationSet ri
299
293
existing .Chunks = append (existing .Chunks , series .Chunks ... )
300
294
hashToChunkseries [key ] = existing
301
295
}
302
-
303
- // Parse any time series
304
- for _ , series := range response .Timeseries {
305
- key := ingester_client .LabelsToKeyString (cortexpb .FromLabelAdaptersToLabels (series .Labels ))
306
- existing := hashToTimeSeries [key ]
307
- existing .Labels = series .Labels
308
- if existing .Samples == nil {
309
- existing .Samples = series .Samples
310
- } else {
311
- existing .Samples = mergeSamples (existing .Samples , series .Samples )
312
- }
313
- hashToTimeSeries [key ] = existing
314
- }
315
296
}
316
297
317
298
resp := & ingester_client.QueryStreamResponse {
318
299
Chunkseries : make ([]ingester_client.TimeSeriesChunk , 0 , len (hashToChunkseries )),
319
- Timeseries : make ([]cortexpb.TimeSeries , 0 , len (hashToTimeSeries )),
320
300
}
321
301
for _ , series := range hashToChunkseries {
322
302
resp .Chunkseries = append (resp .Chunkseries , series )
323
303
}
324
- for _ , series := range hashToTimeSeries {
325
- resp .Timeseries = append (resp .Timeseries , series )
326
- }
327
304
328
305
respSize := resp .Size ()
329
306
chksSize := resp .ChunksSize ()
330
307
chksCount := resp .ChunksCount ()
331
- span .SetTag ("fetched_series" , len (resp .Chunkseries )+ len ( resp . Timeseries ) )
308
+ span .SetTag ("fetched_series" , len (resp .Chunkseries ))
332
309
span .SetTag ("fetched_chunks" , chksCount )
333
310
span .SetTag ("fetched_data_bytes" , respSize )
334
311
span .SetTag ("fetched_chunks_bytes" , chksSize )
335
- reqStats .AddFetchedSeries (uint64 (len (resp .Chunkseries ) + len ( resp . Timeseries ) ))
312
+ reqStats .AddFetchedSeries (uint64 (len (resp .Chunkseries )))
336
313
reqStats .AddFetchedChunkBytes (uint64 (chksSize ))
337
314
reqStats .AddFetchedDataBytes (uint64 (respSize ))
338
315
reqStats .AddFetchedChunks (uint64 (chksCount ))
0 commit comments