@@ -5,15 +5,26 @@ package target
55
66import (
77 "encoding/binary"
8- "slices"
98 "strconv"
109 "strings"
10+ "sync"
1111
1212 "github.com/cespare/xxhash/v2"
1313 "github.com/prometheus/common/model"
1414 "github.com/prometheus/prometheus/model/labels"
1515)
1616
17+ // seps is the separator used between label name/value pairs in hash computation.
18+ // This matches Prometheus's label hashing approach.
19+ var seps = []byte {'\xff' }
20+
21+ // hasherPool is a pool of xxhash digesters for efficient hash computation.
22+ var hasherPool = sync.Pool {
23+ New : func () any {
24+ return xxhash .New ()
25+ },
26+ }
27+
1728// nodeLabels are labels that are used to identify the node on which the given
1829// target is residing. To learn more about these labels, please refer to:
1930// https://prometheus.io/docs/prometheus/latest/configuration/configuration/#kubernetes_sd_config
@@ -37,40 +48,53 @@ func (h ItemHash) String() string {
3748
3849// Item represents a target to be scraped.
3950type Item struct {
40- JobName string
41- TargetURL string
42- Labels labels.Labels
43- // relabeledLabels contains the final labels after Prometheus relabeling processing.
44- relabeledLabels labels.Labels
45- CollectorName string
46- hash ItemHash
51+ JobName string
52+ TargetURL string
53+ Labels labels.Labels
54+ CollectorName string
55+ hash ItemHash
4756}
4857
4958type ItemOption func (* Item )
5059
51- func WithRelabeledLabels (lbs labels.Labels ) ItemOption {
60+ // WithHash sets a precomputed hash on the item.
61+ // Use this when the hash has been computed during relabeling to avoid recomputation.
62+ func WithHash (hash ItemHash ) ItemOption {
5263 return func (i * Item ) {
53- // In Prometheus, labels with the MetaLabelPrefix are discarded after relabeling, which means they are not used in hash calculation.
54- // For details, see https://github.com/prometheus/prometheus/blob/e6cfa720fbe6280153fab13090a483dbd40bece3/scrape/target.go#L534.
55- writeIndex := 0
56- relabeledLabels := make (labels.Labels , len (lbs ))
57- for _ , l := range lbs {
58- if ! strings .HasPrefix (l .Name , model .MetaLabelPrefix ) {
59- relabeledLabels [writeIndex ] = l
60- writeIndex ++
61- }
62- }
63- i .relabeledLabels = slices .Clip (relabeledLabels [:writeIndex ])
64+ i .hash = hash
6465 }
6566}
6667
6768func (t * Item ) Hash () ItemHash {
6869 if t .hash == 0 {
69- t .hash = ItemHash (LabelsHashWithJobName (t .relabeledLabels , t .JobName ))
70+ t .hash = ItemHash (LabelsHashWithJobName (t .Labels , t .JobName ))
7071 }
7172 return t .hash
7273}
7374
75+ // HashFromBuilder computes a hash from a labels.Builder, skipping meta labels.
76+ // This is used during relabeling to compute the hash efficiently without materializing
77+ // the filtered labels.
78+ func HashFromBuilder (builder * labels.Builder , jobName string ) ItemHash {
79+ hash := hasherPool .Get ().(* xxhash.Digest )
80+ hash .Reset ()
81+ builder .Range (func (l labels.Label ) {
82+ // Skip meta labels - they are discarded after relabeling in Prometheus.
83+ // For details, see https://github.com/prometheus/prometheus/blob/e6cfa720fbe6280153fab13090a483dbd40bece3/scrape/target.go#L534
84+ if strings .HasPrefix (l .Name , model .MetaLabelPrefix ) {
85+ return
86+ }
87+ _ , _ = hash .WriteString (l .Name )
88+ _ , _ = hash .Write (seps )
89+ _ , _ = hash .WriteString (l .Value )
90+ _ , _ = hash .Write (seps )
91+ })
92+ _ , _ = hash .WriteString (jobName )
93+ result := hash .Sum64 ()
94+ hasherPool .Put (hash )
95+ return ItemHash (result )
96+ }
97+
7498func (t * Item ) GetNodeName () string {
7599 relevantLabels := t .Labels .MatchLabels (true , relevantLabelNames ... )
76100 for _ , label := range nodeLabels {
@@ -95,14 +119,12 @@ func (t *Item) GetEndpointSliceName() string {
95119// NewItem Creates a new target item.
96120// INVARIANTS:
97121// * Item fields must not be modified after creation.
98- func NewItem (jobName string , targetURL string , labels labels.Labels , collectorName string , opts ... ItemOption ) * Item {
122+ func NewItem (jobName string , targetURL string , itemLabels labels.Labels , collectorName string , opts ... ItemOption ) * Item {
99123 item := & Item {
100- JobName : jobName ,
101- TargetURL : targetURL ,
102- Labels : labels ,
103- // relabeledLabels defaults to original labels if WithRelabeledLabels is not specified.
104- relabeledLabels : labels ,
105- CollectorName : collectorName ,
124+ JobName : jobName ,
125+ TargetURL : targetURL ,
126+ Labels : itemLabels ,
127+ CollectorName : collectorName ,
106128 }
107129 for _ , opt := range opts {
108130 opt (item )
@@ -116,10 +138,13 @@ func NewItem(jobName string, targetURL string, labels labels.Labels, collectorNa
116138// The scrape manager adds it later. Address is already included in the labels, so it is not needed here.
117139func LabelsHashWithJobName (ls labels.Labels , jobName string ) uint64 {
118140 labelsHash := ls .Hash ()
119- labelsHashBytes := make ([]byte , 8 )
120- _ , _ = binary .Encode (labelsHashBytes , binary .LittleEndian , labelsHash ) // nolint: errcheck // this can only fail if the buffer size is wrong
121- hash := xxhash .New ()
122- _ , _ = hash .Write (labelsHashBytes ) // nolint: errcheck // xxhash.Write can't fail
123- _ , _ = hash .Write ([]byte (jobName )) // nolint: errcheck // xxhash.Write can't fail
124- return hash .Sum64 ()
141+ var labelsHashBytes [8 ]byte
142+ binary .LittleEndian .PutUint64 (labelsHashBytes [:], labelsHash )
143+ hash := hasherPool .Get ().(* xxhash.Digest )
144+ hash .Reset ()
145+ _ , _ = hash .Write (labelsHashBytes [:]) // nolint: errcheck // xxhash.Write can't fail
146+ _ , _ = hash .WriteString (jobName ) // nolint: errcheck // xxhash.Write can't fail
147+ result := hash .Sum64 ()
148+ hasherPool .Put (hash )
149+ return result
125150}
0 commit comments