@@ -87,12 +87,12 @@ func init() {
8787// a listener can not unsubscribe as the lock is hold by `alerts.Lock`.
8888func TestAlertsSubscribePutStarvation (t * testing.T ) {
8989 marker := types .NewMarker (prometheus .NewRegistry ())
90- alerts , err := NewAlerts (context .Background (), marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), nil )
90+ alerts , err := NewAlerts (context .Background (), marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), prometheus . NewRegistry () )
9191 if err != nil {
9292 t .Fatal (err )
9393 }
9494
95- iterator := alerts .Subscribe ()
95+ iterator := alerts .Subscribe ("test" )
9696
9797 alertsToInsert := []* types.Alert {}
9898 // Exhaust alert channel
@@ -142,7 +142,7 @@ func TestDeadLock(t *testing.T) {
142142
143143 marker := types .NewMarker (prometheus .NewRegistry ())
144144 // Run gc every 5 milliseconds to increase the possibility of a deadlock with Subscribe()
145- alerts , err := NewAlerts (context .Background (), marker , 5 * time .Millisecond , noopCallback {}, promslog .NewNopLogger (), nil )
145+ alerts , err := NewAlerts (context .Background (), marker , 5 * time .Millisecond , noopCallback {}, promslog .NewNopLogger (), prometheus . NewRegistry () )
146146 if err != nil {
147147 t .Fatal (err )
148148 }
@@ -176,7 +176,7 @@ func TestDeadLock(t *testing.T) {
176176 for {
177177 select {
178178 case <- tick .C :
179- alerts .Subscribe ()
179+ alerts .Subscribe ("test" )
180180 case <- stopAfter :
181181 done <- true
182182 break
@@ -195,7 +195,7 @@ func TestDeadLock(t *testing.T) {
195195
196196func TestAlertsPut (t * testing.T ) {
197197 marker := types .NewMarker (prometheus .NewRegistry ())
198- alerts , err := NewAlerts (context .Background (), marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), nil )
198+ alerts , err := NewAlerts (context .Background (), marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), prometheus . NewRegistry () )
199199 if err != nil {
200200 t .Fatal (err )
201201 }
@@ -223,7 +223,7 @@ func TestAlertsSubscribe(t *testing.T) {
223223
224224 ctx , cancel := context .WithCancel (context .Background ())
225225 defer cancel ()
226- alerts , err := NewAlerts (ctx , marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), nil )
226+ alerts , err := NewAlerts (ctx , marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), prometheus . NewRegistry () )
227227 if err != nil {
228228 t .Fatal (err )
229229 }
@@ -250,7 +250,7 @@ func TestAlertsSubscribe(t *testing.T) {
250250 go func (i int ) {
251251 defer wg .Done ()
252252
253- it := alerts .Subscribe ()
253+ it := alerts .Subscribe ("test" )
254254 defer it .Close ()
255255
256256 received := make (map [model.Fingerprint ]struct {})
@@ -621,3 +621,92 @@ func TestAlertsConcurrently(t *testing.T) {
621621 }, 2 * expire , expire )
622622 require .Equal (t , int32 (0 ), callback .alerts .Load ())
623623}
624+
625+ func TestSubscriberChannelMetrics (t * testing.T ) {
626+ marker := types .NewMarker (prometheus .NewRegistry ())
627+ reg := prometheus .NewRegistry ()
628+ alerts , err := NewAlerts (context .Background (), marker , 30 * time .Minute , noopCallback {}, promslog .NewNopLogger (), reg )
629+ require .NoError (t , err )
630+
631+ subscriberName := "test_subscriber"
632+
633+ // Subscribe to alerts
634+ iterator := alerts .Subscribe (subscriberName )
635+ defer iterator .Close ()
636+
637+ // Consume alerts in the background
638+ go func () {
639+ for range iterator .Next () {
640+ // Just drain the channel
641+ }
642+ }()
643+
644+ // Helper function to get counter value
645+ getCounterValue := func (name , labelName , labelValue string ) float64 {
646+ metrics , err := reg .Gather ()
647+ require .NoError (t , err )
648+ for _ , mf := range metrics {
649+ if mf .GetName () == name {
650+ for _ , m := range mf .GetMetric () {
651+ for _ , label := range m .GetLabel () {
652+ if label .GetName () == labelName && label .GetValue () == labelValue {
653+ return m .GetCounter ().GetValue ()
654+ }
655+ }
656+ }
657+ }
658+ }
659+ return 0
660+ }
661+
662+ // Initially, the counter should be 0
663+ writeCount := getCounterValue ("alertmanager_alerts_subscriber_channel_writes_total" , "subscriber" , subscriberName )
664+ require .Equal (t , 0.0 , writeCount , "subscriberChannelWrites should start at 0" )
665+
666+ // Put some alerts
667+ now := time .Now ()
668+ alertsToSend := []* types.Alert {
669+ {
670+ Alert : model.Alert {
671+ Labels : model.LabelSet {"test" : "1" },
672+ Annotations : model.LabelSet {"foo" : "bar" },
673+ StartsAt : now ,
674+ EndsAt : now .Add (1 * time .Hour ),
675+ GeneratorURL : "http://example.com/prometheus" ,
676+ },
677+ UpdatedAt : now ,
678+ Timeout : false ,
679+ },
680+ {
681+ Alert : model.Alert {
682+ Labels : model.LabelSet {"test" : "2" },
683+ Annotations : model.LabelSet {"foo" : "bar" },
684+ StartsAt : now ,
685+ EndsAt : now .Add (1 * time .Hour ),
686+ GeneratorURL : "http://example.com/prometheus" ,
687+ },
688+ UpdatedAt : now ,
689+ Timeout : false ,
690+ },
691+ {
692+ Alert : model.Alert {
693+ Labels : model.LabelSet {"test" : "3" },
694+ Annotations : model.LabelSet {"foo" : "bar" },
695+ StartsAt : now ,
696+ EndsAt : now .Add (1 * time .Hour ),
697+ GeneratorURL : "http://example.com/prometheus" ,
698+ },
699+ UpdatedAt : now ,
700+ Timeout : false ,
701+ },
702+ }
703+
704+ err = alerts .Put (alertsToSend ... )
705+ require .NoError (t , err )
706+
707+ // Verify the counter incremented for each successful write
708+ require .Eventually (t , func () bool {
709+ writeCount := getCounterValue ("alertmanager_alerts_subscriber_channel_writes_total" , "subscriber" , subscriberName )
710+ return writeCount == float64 (len (alertsToSend ))
711+ }, 1 * time .Second , 10 * time .Millisecond , "subscriberChannelWrites should equal the number of alerts sent" )
712+ }
0 commit comments