Skip to content

Commit b3eb28f

Browse files
committed
Add /api/v1/info_labels endpoint for PromQL info() autocomplete
Port the /api/v1/info_labels endpoint from mimir-prometheus to enable autocomplete suggestions for the PromQL info() function. This endpoint returns data labels from info metrics (like target_info). The endpoint is routed through the labels middleware for caching, validation, and retry behavior consistent with other label queries. Changes: - Update mimir-prometheus dependency to arve/info-autocomplete branch - Register /api/v1/info_labels route in API and handlers - Add IsInfoLabelsQuery() detection in query frontend middleware - Handle info_labels in codec's DecodeLabelsSeriesQueryRequest - Add comprehensive test coverage Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
1 parent c8804b6 commit b3eb28f

File tree

31 files changed

+1605
-148
lines changed

31 files changed

+1605
-148
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
* [FEATURE] Distributor, Ingester: Add experimental reactive limiter setting `-distributor.reactive-limiter.max-limit-factor-decay`. #14007
4141
* [FEATURE] Ingester: Added experimental per-tenant early head compaction. New per-tenant limits `-ingester.early-head-compaction-owned-series-threshold` and `-ingester.early-head-compaction-min-estimated-series-reduction-percentage` trigger compaction based on owned series count. #13980
4242
* [FEATURE] Ingester: Added experimental support to run ingesters with no tokens in the ring when ingest storage is enabled. You can set `-ingester.ring.num-tokens=0` to enable this feature. #14024
43+
* [FEATURE] Querier: Add `/api/v1/info_labels` endpoint for PromQL `info()` function autocomplete. Returns data labels from info metrics (like `target_info`) for use in autocomplete suggestions. #XXXXX
4344
* [ENHANCEMENT] Compactor, Store-gateway: Remove experimental setting `-compactor.upload-sparse-index-headers` and always upload sparse index-headers. This improves lazy loading performance in the store-gateway. #13089 #13882
4445
* [ENHANCEMENT] Querier: Reduce memory consumption of queries samples for a single series are retrieved from multiple ingesters or store-gateways. #13806
4546
* [ENHANCEMENT] Store-gateway: Verify CRC32 checksums for 1 out of every 128 chunks read from object storage and the chunks cache to detect corruption. #13151

go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,7 @@ require (
354354
sigs.k8s.io/yaml v1.6.0 // indirect
355355
)
356356

357-
replace github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v1.8.2-0.20260121014331-78e801ba42c3
357+
replace github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v1.8.2-0.20260126071905-b42a5d46a126
358358

359359
// Replace memberlist with our fork which includes some changes that haven't been
360360
// merged upstream yet for years and we don't expect to change anytime soon.

go.sum

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -599,8 +599,8 @@ github.com/grafana/memberlist v0.3.1-0.20251126142931-6f9f62ab6f86 h1:aTwfQuroOm
599599
github.com/grafana/memberlist v0.3.1-0.20251126142931-6f9f62ab6f86/go.mod h1:h60o12SZn/ua/j0B6iKAZezA4eDaGsIuPO70eOaJ6WE=
600600
github.com/grafana/mimir-otlptranslator v0.0.0-20251017074411-ea1e8f863e1d h1:k4NIVPYPP0sLJoGNzGwoQs2MpnWTvTcgbWPCzfdX66c=
601601
github.com/grafana/mimir-otlptranslator v0.0.0-20251017074411-ea1e8f863e1d/go.mod h1:vRYWnXvI6aWGpsdY/mOT/cbeVRBlPWtBNDb7kGR3uKM=
602-
github.com/grafana/mimir-prometheus v1.8.2-0.20260121014331-78e801ba42c3 h1:GLFngBDQaEhc3iCv1kqhCta8urYOuZ9/kXOqHHowcHE=
603-
github.com/grafana/mimir-prometheus v1.8.2-0.20260121014331-78e801ba42c3/go.mod h1:/rHCod81SRvABiU+mIGi4KqNNhGw20WJNAo+Sb76vp8=
602+
github.com/grafana/mimir-prometheus v1.8.2-0.20260126071905-b42a5d46a126 h1:CVQazQMCXMyF3uHLsAfDMUZUxMsHk52ZueIrrMl7yws=
603+
github.com/grafana/mimir-prometheus v1.8.2-0.20260126071905-b42a5d46a126/go.mod h1:yO3BMDkb3uGykrypgh5HpRSqKDVq8qnQ/SVwnb7nhWc=
604604
github.com/grafana/opentracing-contrib-go-stdlib v0.0.0-20230509071955-f410e79da956 h1:em1oddjXL8c1tL0iFdtVtPloq2hRPen2MJQKoAWpxu0=
605605
github.com/grafana/opentracing-contrib-go-stdlib v0.0.0-20230509071955-f410e79da956/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU=
606606
github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8=
Lines changed: 235 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,235 @@
1+
// SPDX-License-Identifier: AGPL-3.0-only
2+
//go:build requires_docker
3+
4+
package integration
5+
6+
import (
7+
"encoding/json"
8+
"fmt"
9+
"net/http"
10+
"net/url"
11+
"sort"
12+
"testing"
13+
"time"
14+
15+
"github.com/grafana/e2e"
16+
e2edb "github.com/grafana/e2e/db"
17+
"github.com/prometheus/prometheus/model/labels"
18+
"github.com/prometheus/prometheus/prompb"
19+
"github.com/stretchr/testify/assert"
20+
"github.com/stretchr/testify/require"
21+
22+
"github.com/grafana/mimir/integration/e2emimir"
23+
)
24+
25+
func TestQuerierInfoLabels(t *testing.T) {
26+
s, err := e2e.NewScenario(networkName)
27+
require.NoError(t, err)
28+
defer s.Close()
29+
30+
// Start dependencies.
31+
consul := e2edb.NewConsul()
32+
require.NoError(t, s.StartAndWaitReady(consul))
33+
34+
flags := mergeFlags(BlocksStorageFlags(), BlocksStorageS3Flags(), map[string]string{})
35+
36+
// Start minio.
37+
minio := e2edb.NewMinio(9000, flags["-blocks-storage.s3.bucket-name"])
38+
require.NoError(t, s.StartAndWaitReady(minio))
39+
40+
// Start Mimir components.
41+
distributor := e2emimir.NewDistributor("distributor", consul.NetworkHTTPEndpoint(), flags)
42+
ingester := e2emimir.NewIngester("ingester", consul.NetworkHTTPEndpoint(), flags)
43+
querier := e2emimir.NewQuerier("querier", consul.NetworkHTTPEndpoint(), flags)
44+
require.NoError(t, s.StartAndWaitReady(distributor, ingester, querier))
45+
46+
// Wait until distributor and querier have updated the ring.
47+
require.NoError(t, distributor.WaitSumMetricsWithOptions(e2e.Equals(1), []string{"cortex_ring_members"}, e2e.WithLabelMatchers(
48+
labels.MustNewMatcher(labels.MatchEqual, "name", "ingester"),
49+
labels.MustNewMatcher(labels.MatchEqual, "state", "ACTIVE"))))
50+
require.NoError(t, querier.WaitSumMetricsWithOptions(e2e.Equals(1), []string{"cortex_ring_members"}, e2e.WithLabelMatchers(
51+
labels.MustNewMatcher(labels.MatchEqual, "name", "ingester"),
52+
labels.MustNewMatcher(labels.MatchEqual, "state", "ACTIVE"))))
53+
54+
// Create client for pushing and querying.
55+
client, err := e2emimir.NewClient(distributor.HTTPEndpoint(), querier.HTTPEndpoint(), "", "", userID)
56+
require.NoError(t, err)
57+
58+
now := time.Now()
59+
60+
// Push target_info metrics with data labels.
61+
// target_info is the standard info metric that contains identifying labels (job, instance)
62+
// and data labels (version, commit, etc.) that describe the target.
63+
targetInfoSeries := []prompb.TimeSeries{
64+
{
65+
Labels: []prompb.Label{
66+
{Name: "__name__", Value: "target_info"},
67+
{Name: "job", Value: "prometheus"},
68+
{Name: "instance", Value: "localhost:9090"},
69+
{Name: "version", Value: "2.45.0"},
70+
{Name: "commit", Value: "abc123"},
71+
},
72+
Samples: []prompb.Sample{{Value: 1, Timestamp: now.UnixMilli()}},
73+
},
74+
{
75+
Labels: []prompb.Label{
76+
{Name: "__name__", Value: "target_info"},
77+
{Name: "job", Value: "prometheus"},
78+
{Name: "instance", Value: "localhost:9091"},
79+
{Name: "version", Value: "2.44.0"},
80+
{Name: "commit", Value: "def456"},
81+
},
82+
Samples: []prompb.Sample{{Value: 1, Timestamp: now.UnixMilli()}},
83+
},
84+
{
85+
Labels: []prompb.Label{
86+
{Name: "__name__", Value: "target_info"},
87+
{Name: "job", Value: "grafana"},
88+
{Name: "instance", Value: "localhost:3000"},
89+
{Name: "version", Value: "10.0.0"},
90+
{Name: "edition", Value: "oss"},
91+
},
92+
Samples: []prompb.Sample{{Value: 1, Timestamp: now.UnixMilli()}},
93+
},
94+
}
95+
96+
// Also push a build_info metric to test metric_match parameter.
97+
buildInfoSeries := []prompb.TimeSeries{
98+
{
99+
Labels: []prompb.Label{
100+
{Name: "__name__", Value: "build_info"},
101+
{Name: "job", Value: "mimir"},
102+
{Name: "instance", Value: "localhost:8080"},
103+
{Name: "branch", Value: "main"},
104+
{Name: "goversion", Value: "go1.21.0"},
105+
},
106+
Samples: []prompb.Sample{{Value: 1, Timestamp: now.UnixMilli()}},
107+
},
108+
}
109+
110+
// Push all series.
111+
res, err := client.Push(targetInfoSeries)
112+
require.NoError(t, err)
113+
require.Equal(t, http.StatusOK, res.StatusCode)
114+
115+
res, err = client.Push(buildInfoSeries)
116+
require.NoError(t, err)
117+
require.Equal(t, http.StatusOK, res.StatusCode)
118+
119+
t.Run("basic info_labels query returns data labels from target_info", func(t *testing.T) {
120+
result, err := queryInfoLabels(client, querier.HTTPEndpoint(), nil)
121+
require.NoError(t, err)
122+
123+
// Should return data labels from target_info (excludes identifying labels like job, instance, __name__)
124+
// Data labels are: version, commit, edition
125+
assert.Contains(t, result, "version")
126+
assert.Contains(t, result, "commit")
127+
assert.Contains(t, result, "edition")
128+
129+
// Verify values are sorted and correct
130+
sort.Strings(result["version"])
131+
assert.Equal(t, []string{"10.0.0", "2.44.0", "2.45.0"}, result["version"])
132+
})
133+
134+
t.Run("info_labels with metric_match returns data labels from specified info metric", func(t *testing.T) {
135+
result, err := queryInfoLabels(client, querier.HTTPEndpoint(), map[string]string{
136+
"metric_match": "build_info",
137+
})
138+
require.NoError(t, err)
139+
140+
// Should return data labels from build_info
141+
assert.Contains(t, result, "branch")
142+
assert.Contains(t, result, "goversion")
143+
assert.Equal(t, []string{"main"}, result["branch"])
144+
assert.Equal(t, []string{"go1.21.0"}, result["goversion"])
145+
146+
// Should NOT contain target_info labels
147+
assert.NotContains(t, result, "version")
148+
assert.NotContains(t, result, "commit")
149+
})
150+
151+
t.Run("info_labels with match[] filters to base metrics", func(t *testing.T) {
152+
result, err := queryInfoLabels(client, querier.HTTPEndpoint(), map[string]string{
153+
"match[]": `{job="prometheus"}`,
154+
})
155+
require.NoError(t, err)
156+
157+
// Should only return labels from target_info series where job=prometheus
158+
assert.Contains(t, result, "version")
159+
assert.Contains(t, result, "commit")
160+
161+
// Should have prometheus versions only
162+
sort.Strings(result["version"])
163+
assert.Equal(t, []string{"2.44.0", "2.45.0"}, result["version"])
164+
165+
// Should NOT have grafana-specific labels
166+
assert.NotContains(t, result, "edition")
167+
})
168+
169+
t.Run("info_labels with limit truncates values", func(t *testing.T) {
170+
result, err := queryInfoLabels(client, querier.HTTPEndpoint(), map[string]string{
171+
"limit": "1",
172+
})
173+
require.NoError(t, err)
174+
175+
// Each label should have at most 1 value due to limit
176+
for labelName, values := range result {
177+
assert.LessOrEqual(t, len(values), 1, "label %s should have at most 1 value", labelName)
178+
}
179+
})
180+
181+
t.Run("info_labels with time range", func(t *testing.T) {
182+
// Query with a time range that includes our data
183+
start := now.Add(-1 * time.Hour)
184+
end := now.Add(1 * time.Hour)
185+
result, err := queryInfoLabels(client, querier.HTTPEndpoint(), map[string]string{
186+
"start": fmt.Sprintf("%d", start.Unix()),
187+
"end": fmt.Sprintf("%d", end.Unix()),
188+
})
189+
require.NoError(t, err)
190+
191+
// Should return data
192+
assert.NotEmpty(t, result)
193+
assert.Contains(t, result, "version")
194+
})
195+
}
196+
197+
// infoLabelsResponse represents the API response from /api/v1/info_labels.
198+
type infoLabelsResponse struct {
199+
Status string `json:"status"`
200+
Data map[string][]string `json:"data"`
201+
}
202+
203+
// queryInfoLabels queries the /api/v1/info_labels endpoint with optional parameters.
204+
func queryInfoLabels(client *e2emimir.Client, querierAddress string, params map[string]string) (map[string][]string, error) {
205+
u, err := url.Parse(fmt.Sprintf("http://%s/prometheus/api/v1/info_labels", querierAddress))
206+
if err != nil {
207+
return nil, err
208+
}
209+
210+
q := u.Query()
211+
for k, v := range params {
212+
q.Set(k, v)
213+
}
214+
u.RawQuery = q.Encode()
215+
216+
resp, body, err := client.DoGetBody(u.String())
217+
if err != nil {
218+
return nil, err
219+
}
220+
221+
if resp.StatusCode != http.StatusOK {
222+
return nil, fmt.Errorf("unexpected status code: %d, body: %s", resp.StatusCode, string(body))
223+
}
224+
225+
var result infoLabelsResponse
226+
if err := json.Unmarshal(body, &result); err != nil {
227+
return nil, fmt.Errorf("failed to unmarshal response: %w, body: %s", err, string(body))
228+
}
229+
230+
if result.Status != "success" {
231+
return nil, fmt.Errorf("unexpected status: %s", result.Status)
232+
}
233+
234+
return result.Data, nil
235+
}

pkg/api/api.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -465,6 +465,7 @@ func (a *API) RegisterQueryAPI(handler http.Handler, buildInfoHandler http.Handl
465465
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/query_exemplars"), handler, true, true, "GET", "POST")
466466
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/labels"), handler, true, true, "GET", "POST")
467467
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/label/{name}/values"), handler, true, true, "GET")
468+
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/info_labels"), handler, true, true, "GET", "POST")
468469
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/series"), handler, true, true, "GET", "POST", "DELETE")
469470
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/status/buildinfo"), buildInfoHandler, false, true, "GET")
470471
a.RegisterRoute(path.Join(a.cfg.PrometheusHTTPPrefix, "/api/v1/metadata"), handler, true, true, "GET")

pkg/api/handlers.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,6 +311,7 @@ func NewQuerierHandler(
311311
router.Path(path.Join(promPrefix, "/query_exemplars")).Methods("GET", "POST").Handler(exemplarsQueryStats.Wrap(promRouter))
312312
router.Path(path.Join(promPrefix, "/labels")).Methods("GET", "POST").Handler(labelsQueryStats.Wrap(promRouter))
313313
router.Path(path.Join(promPrefix, "/label/{name}/values")).Methods("GET").Handler(labelsQueryStats.Wrap(promRouter))
314+
router.Path(path.Join(promPrefix, "/info_labels")).Methods("GET", "POST").Handler(labelsQueryStats.Wrap(unlimitedMemoryTrackerMiddleware.Wrap(promRouter)))
314315
router.Path(path.Join(promPrefix, "/series")).Methods("GET", "POST", "DELETE").Handler(seriesQueryStats.Wrap(unlimitedMemoryTrackerMiddleware.Wrap(promRouter)))
315316
router.Path(path.Join(promPrefix, "/metadata")).Methods("GET").Handler(metadataQueryStats.Wrap(querier.NewMetadataHandler(metadataSupplier)))
316317
router.Path(path.Join(promPrefix, "/cardinality/label_names")).Methods("GET", "POST").Handler(cardinalityQueryStats.Wrap(querier.LabelNamesCardinalityHandler(distributor, limits)))

pkg/frontend/querymiddleware/codec.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,7 @@ func (Codec) DecodeLabelsSeriesQueryRequest(_ context.Context, r *http.Request)
459459
Limit: limit,
460460
}, nil
461461
}
462-
if IsLabelNamesQuery(r.URL.Path) {
462+
if IsLabelNamesQuery(r.URL.Path) || IsInfoLabelsQuery(r.URL.Path) {
463463
return &PrometheusLabelNamesQueryRequest{
464464
Path: r.URL.Path,
465465
Headers: headers,

pkg/frontend/querymiddleware/codec_test.go

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -723,6 +723,38 @@ func TestCodec_DecodeEncodeLabelsQueryRequest(t *testing.T) {
723723
expectedGetStartOrDefault: 1708502400 * 1e3,
724724
expectedGetEndOrDefault: 1708588800 * 1e3,
725725
},
726+
{
727+
name: "info_labels with start and end timestamps, no matcher sets",
728+
url: "/api/v1/info_labels?end=1708588800&start=1708502400",
729+
expectedURL: "/api/v1/info_labels?end=1708588800&start=1708502400",
730+
expectedStruct: &PrometheusLabelNamesQueryRequest{
731+
Path: "/api/v1/info_labels",
732+
Start: 1708502400 * 1e3,
733+
End: 1708588800 * 1e3,
734+
LabelMatcherSets: nil,
735+
},
736+
expectedGetLabelName: "",
737+
expectedGetStartOrDefault: 1708502400 * 1e3,
738+
expectedGetEndOrDefault: 1708588800 * 1e3,
739+
},
740+
{
741+
name: "info_labels with matcher sets and limit",
742+
url: "/api/v1/info_labels?end=1708588800&limit=10&match%5B%5D=up%7Bjob%3D%22prometheus%22%7D&start=1708502400",
743+
expectedURL: "/api/v1/info_labels?end=1708588800&limit=10&match%5B%5D=up%7Bjob%3D%22prometheus%22%7D&start=1708502400",
744+
expectedStruct: &PrometheusLabelNamesQueryRequest{
745+
Path: "/api/v1/info_labels",
746+
Start: 1708502400 * 1e3,
747+
End: 1708588800 * 1e3,
748+
Limit: 10,
749+
LabelMatcherSets: []string{
750+
`up{job="prometheus"}`,
751+
},
752+
},
753+
expectedGetLabelName: "",
754+
expectedLimit: 10,
755+
expectedGetStartOrDefault: 1708502400 * 1e3,
756+
expectedGetEndOrDefault: 1708588800 * 1e3,
757+
},
726758
} {
727759
t.Run(testCase.name, func(t *testing.T) {
728760
for _, reqMethod := range []string{http.MethodGet, http.MethodPost} {

pkg/frontend/querymiddleware/labels_query_cache.go

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import (
2525
const (
2626
labelNamesQueryCachePrefix = "ln:"
2727
labelValuesQueryCachePrefix = "lv:"
28+
infoLabelsQueryCachePrefix = "il:"
2829

2930
stringParamSeparator = rune(0)
3031
)
@@ -59,9 +60,20 @@ func (g DefaultCacheKeyGenerator) LabelValues(r *http.Request) (*GenericQueryCac
5960
}
6061

6162
var cacheKeyPrefix string
63+
var metricMatch string
6264
switch labelValuesReq.(type) {
6365
case *PrometheusLabelNamesQueryRequest:
64-
cacheKeyPrefix = labelNamesQueryCachePrefix
66+
if IsInfoLabelsQuery(r.URL.Path) {
67+
cacheKeyPrefix = infoLabelsQueryCachePrefix
68+
// Include metric_match parameter in cache key for info_labels requests.
69+
// Default is "target_info" if not specified.
70+
metricMatch = r.FormValue("metric_match")
71+
if metricMatch == "" {
72+
metricMatch = "target_info"
73+
}
74+
} else {
75+
cacheKeyPrefix = labelNamesQueryCachePrefix
76+
}
6577
case *PrometheusLabelValuesQueryRequest:
6678
cacheKeyPrefix = labelValuesQueryCachePrefix
6779
}
@@ -80,6 +92,7 @@ func (g DefaultCacheKeyGenerator) LabelValues(r *http.Request) (*GenericQueryCac
8092
labelValuesReq.GetLabelName(),
8193
labelMatcherSets,
8294
labelValuesReq.GetLimit(),
95+
metricMatch,
8396
)
8497

8598
return &GenericQueryCacheKey{
@@ -88,7 +101,7 @@ func (g DefaultCacheKeyGenerator) LabelValues(r *http.Request) (*GenericQueryCac
88101
}, nil
89102
}
90103

91-
func generateLabelsQueryRequestCacheKey(startTime, endTime int64, labelName string, matcherSets [][]*labels.Matcher, limit uint64) string {
104+
func generateLabelsQueryRequestCacheKey(startTime, endTime int64, labelName string, matcherSets [][]*labels.Matcher, limit uint64, metricMatch string) string {
92105
var (
93106
twoHoursMillis = (2 * time.Hour).Milliseconds()
94107
b = strings.Builder{}
@@ -129,6 +142,12 @@ func generateLabelsQueryRequestCacheKey(startTime, endTime int64, labelName stri
129142
b.WriteString(strconv.Itoa(int(limit)))
130143
}
131144

145+
// Add metric_match for info_labels queries.
146+
if metricMatch != "" {
147+
b.WriteRune(stringParamSeparator)
148+
b.WriteString(metricMatch)
149+
}
150+
132151
return b.String()
133152
}
134153

0 commit comments

Comments
 (0)