Skip to content

Commit a1a4811

Browse files
authored
Merge pull request #71 from sc0rp10/main
#21 implement the result cache
2 parents ba020ba + 8473ed9 commit a1a4811

File tree

7 files changed

+106
-15
lines changed

7 files changed

+106
-15
lines changed

README.md

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -82,16 +82,17 @@ Configuration specification in JSON and plain is supported both in command line
8282

8383
Configuration of targets can be done via docker and via prometheus
8484

85-
| Flag | Variable | Description | Default | Required |
86-
|------------------|----------------------|-----------------------------------------------|--------------------------------------------------|----------|
87-
| -api-key | PAGESPEED_API_KEY | sets the google API key used for pagespeed | | False |
88-
| -targets | PAGESPEED_TARGETS | comma separated list of targets to measure | | False |
89-
| -categories | PAGESPEED_CATEGORIES | comma separated list of categories to check | accessibility,best-practices,performance,pwa,seo | False |
90-
| -t | NONE | multi-value target array (check docker comp) | | False |
91-
| -listener | PAGESPEED_LISTENER | sets the listener address for the exporters | :9271 | False |
92-
| -parallel | PAGESPEED_PARALLEL | sets the execution of targets to be parallel | false | False |
93-
| -pushGatewayUrl | PUSHGATEWAY_URL | sets the pushgateway url to send the metrics | | False |
94-
| -pushGatewayJob | PUSHGATEWAY_JOB | sets the pushgateway job name | pagespeed_exporter | False |
85+
| Flag | Variable | Description | Default | Required |
86+
|------------------|----------------------|-----------------------------------------------|----------------------------------------------------------------------|----------|
87+
| -api-key | PAGESPEED_API_KEY | sets the google API key used for pagespeed | | False |
88+
| -targets | PAGESPEED_TARGETS | comma separated list of targets to measure | | False |
89+
| -categories | PAGESPEED_CATEGORIES | comma separated list of categories to check | accessibility,best-practices,performance,pwa,seo | False |
90+
| -t | NONE | multi-value target array (check docker comp) | | False |
91+
| -listener | PAGESPEED_LISTENER | sets the listener address for the exporters | :9271 | False |
92+
| -parallel | PAGESPEED_PARALLEL | sets the execution of targets to be parallel | false | False |
93+
| -pushGatewayUrl | PUSHGATEWAY_URL | sets the pushgateway url to send the metrics | | False |
94+
| -pushGatewayJob | PUSHGATEWAY_JOB | sets the pushgateway job name | pagespeed_exporter | False |
95+
| -cache-ttl | CACHE_TTL | cache TTL for API results (e.g. 60s, 5m); disables cache if unset | | False |
9596

9697
Note: google api key is required only if scraping more than 2 targets/second
9798

collector/cache.go

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
package collector
2+
3+
import (
4+
"sync"
5+
"time"
6+
"crypto/sha256"
7+
"encoding/json"
8+
)
9+
10+
type cacheEntry struct {
11+
Result *ScrapeResult
12+
ExpiresAt time.Time
13+
}
14+
15+
type scrapeCache struct {
16+
entries map[string]cacheEntry
17+
mutex sync.Mutex
18+
ttl time.Duration
19+
}
20+
21+
func newScrapeCache(ttl time.Duration) *scrapeCache {
22+
if ttl <= 0 {
23+
return nil
24+
}
25+
return &scrapeCache{
26+
entries: make(map[string]cacheEntry),
27+
ttl: ttl,
28+
}
29+
}
30+
31+
func (c *scrapeCache) get(key string) (*ScrapeResult, bool) {
32+
if c == nil {
33+
return nil, false
34+
}
35+
c.mutex.Lock()
36+
defer c.mutex.Unlock()
37+
entry, ok := c.entries[key]
38+
if !ok || time.Now().After(entry.ExpiresAt) {
39+
if ok {
40+
delete(c.entries, key)
41+
}
42+
return nil, false
43+
}
44+
return entry.Result, true
45+
}
46+
47+
func (c *scrapeCache) set(key string, result *ScrapeResult) {
48+
if c == nil {
49+
return
50+
}
51+
c.mutex.Lock()
52+
defer c.mutex.Unlock()
53+
c.entries[key] = cacheEntry{
54+
Result: result,
55+
ExpiresAt: time.Now().Add(c.ttl),
56+
}
57+
}
58+
59+
func cacheKeyFromRequest(req ScrapeRequest) string {
60+
b, _ := json.Marshal(req)
61+
h := sha256.Sum256(b)
62+
return string(h[:])
63+
}

collector/collector.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ func newCollector(config Config) (coll prometheus.Collector, err error) {
7070
options = append(options, option.WithCredentialsFile(config.CredentialsFile))
7171
}
7272

73-
svc, err := newPagespeedScrapeService(config.ScrapeTimeout, options...)
73+
svc, err := newPagespeedScrapeService(config.ScrapeTimeout, config.CacheTTL, options...)
7474
if err != nil {
7575
return nil, err
7676
}

collector/model.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ type Config struct {
7878
CredentialsFile string
7979
Parallel bool
8080
ScrapeTimeout time.Duration
81+
CacheTTL time.Duration // cache duration, 0 disables cache
8182
}
8283

8384
func CalculateScrapeRequests(targets, categories []string) []ScrapeRequest {

collector/scrape.go

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ type scrapeService interface {
2323

2424
// newPagespeedScrapeService creates a new HTTP client service for pagespeed.
2525
// If the client timeout is set to 0 there will be no timeout
26-
func newPagespeedScrapeService(clientTimeout time.Duration, options ...option.ClientOption) (scrapeService, error) {
26+
func newPagespeedScrapeService(clientTimeout time.Duration, cacheTTL time.Duration, options ...option.ClientOption) (scrapeService, error) {
2727
transport, err := googlehttp.NewTransport(context.Background(), http.DefaultTransport, options...)
2828
if err != nil {
2929
return nil, err
@@ -40,12 +40,14 @@ func newPagespeedScrapeService(clientTimeout time.Duration, options ...option.Cl
4040
return &pagespeedScrapeService{
4141
scrapeClient: client,
4242
options: options,
43+
cache: newScrapeCache(cacheTTL),
4344
}, nil
4445
}
4546

4647
type pagespeedScrapeService struct {
4748
scrapeClient *http.Client
4849
options []option.ClientOption
50+
cache *scrapeCache
4951
}
5052

5153
func (pss *pagespeedScrapeService) Scrape(parallel bool, requests []ScrapeRequest) (scrapes []*ScrapeResult, err error) {
@@ -99,6 +101,12 @@ func (pss *pagespeedScrapeService) Scrape(parallel bool, requests []ScrapeReques
99101
}
100102

101103
func (pss pagespeedScrapeService) scrape(request ScrapeRequest) (scrape *ScrapeResult, err error) {
104+
cacheKey := cacheKeyFromRequest(request)
105+
if pss.cache != nil {
106+
if cached, ok := pss.cache.get(cacheKey); ok {
107+
return cached, nil
108+
}
109+
}
102110
opts := []option.ClientOption{
103111
option.WithHTTPClient(pss.scrapeClient),
104112
}
@@ -134,8 +142,12 @@ func (pss pagespeedScrapeService) scrape(request ScrapeRequest) (scrape *ScrapeR
134142
return nil, errResult
135143
}
136144

137-
return &ScrapeResult{
145+
scrapeResult := &ScrapeResult{
138146
Request: request,
139147
Result: result,
140-
}, nil
148+
}
149+
if pss.cache != nil {
150+
pss.cache.set(cacheKey, scrapeResult)
151+
}
152+
return scrapeResult, nil
141153
}

collector/scrape_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ func Test_PagespeedScrapeService(t *testing.T) {
3232
t.Skip("skipping testing unless API key or credentials file is set")
3333
}
3434

35-
service, err := newPagespeedScrapeService(30*time.Second, options...)
35+
service, err := newPagespeedScrapeService(30*time.Second, 0, options...) // cache disabled for test
3636
if err != nil {
3737
t.Fatalf("newPagespeedScrapeService should not throw an error: %v", err)
3838
}

pagespeed_exporter.go

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55
"net/http"
66
"os"
77
"strings"
8+
"time"
89

910
"github.com/foomo/pagespeed_exporter/collector"
1011
"github.com/foomo/pagespeed_exporter/handler"
@@ -24,6 +25,7 @@ var (
2425
parallel bool
2526
pushGatewayUrl string
2627
pushGatewayJob string
28+
cacheTTL string // as duration string, e.g. "60s"
2729
)
2830

2931
type arrayFlags []string
@@ -47,11 +49,22 @@ func main() {
4749
if len(targets) > 0 {
4850
requests := collector.CalculateScrapeRequests(targets, categories)
4951

52+
var parsedCacheTTL time.Duration
53+
if cacheTTL != "" {
54+
var err error
55+
parsedCacheTTL, err = time.ParseDuration(cacheTTL)
56+
if err != nil {
57+
log.WithError(err).Warn("invalid CACHE_TTL, disabling cache")
58+
parsedCacheTTL = 0
59+
}
60+
}
61+
5062
psc, errCollector := collectorFactory.Create(collector.Config{
5163
ScrapeRequests: requests,
5264
GoogleAPIKey: googleApiKey,
5365
CredentialsFile: credentialsFile,
5466
Parallel: parallel,
67+
CacheTTL: parsedCacheTTL,
5568
})
5669
if errCollector != nil {
5770
log.WithError(errCollector).Fatal("could not instantiate collector")
@@ -73,6 +86,7 @@ func main() {
7386
}
7487

7588
func parseFlags() {
89+
flag.StringVar(&cacheTTL, "cache-ttl", getenv("CACHE_TTL", ""), "cache TTL for API results, e.g. 60s. If empty, disables cache")
7690
flag.StringVar(&googleApiKey, "api-key", getenv("PAGESPEED_API_KEY", ""), "sets the google API key used for pagespeed")
7791
flag.StringVar(&credentialsFile, "credentials-file", getenv("PAGESPEED_CREDENTIALS_FILE", ""), "sets the location of the credentials file used for pagespeed")
7892
flag.StringVar(&listenerAddress, "listener", getenv("PAGESPEED_LISTENER", ":9271"), "sets the listener address for the exporters")

0 commit comments

Comments
 (0)