Skip to content

Commit 6871e76

Browse files
committed
SD-11577: remove some functionality
1 parent 364711e commit 6871e76

File tree

2 files changed

+4
-50
lines changed

2 files changed

+4
-50
lines changed

cloudflare.go

Lines changed: 1 addition & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import (
99
cfaccounts "github.com/cloudflare/cloudflare-go/v4/accounts"
1010
cfload_balancers "github.com/cloudflare/cloudflare-go/v4/load_balancers"
1111
cfpagination "github.com/cloudflare/cloudflare-go/v4/packages/pagination"
12-
cfqueues "github.com/cloudflare/cloudflare-go/v4/queues"
12+
1313
cfrulesets "github.com/cloudflare/cloudflare-go/v4/rulesets"
1414
cfzero_trust "github.com/cloudflare/cloudflare-go/v4/zero_trust"
1515
cfzones "github.com/cloudflare/cloudflare-go/v4/zones"
@@ -1250,36 +1250,6 @@ func fetchWorkerSubrequests(accountID string) (*cloudflareResponseSubrequests, e
12501250
return &resp, nil
12511251
}
12521252

1253-
func fetchQueueNames(accountID string) (map[string]string, error) {
1254-
ctx, cancel := context.WithTimeout(context.Background(), cftimeout)
1255-
defer cancel()
1256-
page := cfclient.Queues.ListAutoPaging(ctx,
1257-
cfqueues.QueueListParams{
1258-
AccountID: cf.F(accountID),
1259-
})
1260-
if page.Err() != nil {
1261-
return nil, page.Err()
1262-
}
1263-
1264-
queueNames := make(map[string]string)
1265-
seenIDs := make(map[string]struct{})
1266-
for page.Next() {
1267-
if page.Err() != nil {
1268-
log.Errorf("error during paging queues: %v", page.Err())
1269-
break
1270-
}
1271-
q := page.Current()
1272-
if _, exists := seenIDs[q.QueueID]; exists {
1273-
log.Errorf("fetchQueueNames: duplicate queue ID detected (%s), breaking loop", q.QueueID)
1274-
break
1275-
}
1276-
seenIDs[q.QueueID] = struct{}{}
1277-
queueNames[q.QueueID] = q.QueueName
1278-
}
1279-
1280-
return queueNames, nil
1281-
}
1282-
12831253
func fetchQueueMetrics(accountID string) (*cloudflareResponseQueues, error) {
12841254
request := graphql.NewRequest(`
12851255
query ($accountID: String!, $mintime: Time!, $maxtime: Time!, $limit: Int!) {

prometheus.go

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -736,14 +736,6 @@ func fetchQueueAnalytics(account cfaccounts.Account, wg *sync.WaitGroup) {
736736
wg.Add(1)
737737
defer wg.Done()
738738

739-
queueNames, err := fetchQueueNames(account.ID)
740-
if err != nil {
741-
log.Error("failed to fetch queue names for account ", account.ID, ": ", err)
742-
return
743-
}
744-
745-
log.Info("fetched queue names")
746-
747739
r, err := fetchQueueMetrics(account.ID)
748740
if err != nil {
749741
log.Error("failed to fetch queue metrics for account ", account.ID, ": ", err)
@@ -755,19 +747,11 @@ func fetchQueueAnalytics(account cfaccounts.Account, wg *sync.WaitGroup) {
755747

756748
for _, a := range r.Viewer.Accounts {
757749
for _, b := range a.QueueBacklogAdaptiveGroups {
758-
queueName := b.Dimensions.QueueID
759-
if name, ok := queueNames[b.Dimensions.QueueID]; ok {
760-
queueName = name
761-
}
762-
queueBacklogMessages.With(prometheus.Labels{"queue_name": queueName, "account": accountName}).Set(b.Avg.Messages)
763-
queueBacklogBytes.With(prometheus.Labels{"queue_name": queueName, "account": accountName}).Set(b.Avg.Bytes)
750+
queueBacklogMessages.With(prometheus.Labels{"queue_name": b.Dimensions.QueueID, "account": accountName}).Set(b.Avg.Messages)
751+
queueBacklogBytes.With(prometheus.Labels{"queue_name": b.Dimensions.QueueID, "account": accountName}).Set(b.Avg.Bytes)
764752
}
765753
for _, c := range a.QueueConsumerMetricsAdaptiveGroups {
766-
queueName := c.Dimensions.QueueID
767-
if name, ok := queueNames[c.Dimensions.QueueID]; ok {
768-
queueName = name
769-
}
770-
queueConsumerConcurrency.With(prometheus.Labels{"queue_name": queueName, "account": accountName}).Set(c.Avg.Concurrency)
754+
queueConsumerConcurrency.With(prometheus.Labels{"queue_name": c.Dimensions.QueueID, "account": accountName}).Set(c.Avg.Concurrency)
771755
}
772756
}
773757
}

0 commit comments

Comments
 (0)