Skip to content

Commit e69faa0

Browse files
authored
new metric for length of time supervisor takes uploading snapshots to s3 (#119)
1 parent ff9a996 commit e69faa0

File tree

1 file changed

+14
-1
lines changed

1 file changed

+14
-1
lines changed

pkg/supervisor/archived_snapshot.go

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,15 @@ import (
77
"net/url"
88
"os"
99
"strings"
10+
"time"
1011

1112
"github.com/aws/aws-sdk-go/aws/session"
1213
"github.com/aws/aws-sdk-go/service/s3/s3manager"
1314
"github.com/pkg/errors"
14-
"github.com/segmentio/ctlstore/pkg/utils"
1515
"github.com/segmentio/events/v2"
1616
"github.com/segmentio/stats/v4"
17+
18+
"github.com/segmentio/ctlstore/pkg/utils"
1719
)
1820

1921
type archivedSnapshot interface {
@@ -76,9 +78,13 @@ func (c *s3Snapshot) Upload(ctx context.Context, path string) error {
7678
reader = gpr
7779
}
7880
events.Log("Uploading %{file}s (%d bytes) to %{bucket}s/%{key}s", path, size, c.Bucket, key)
81+
82+
start := time.Now()
7983
if err = c.sendToS3(ctx, key, c.Bucket, reader); err != nil {
8084
return errors.Wrap(err, "send to s3")
8185
}
86+
stats.Observe("ldb-upload-time", time.Since(start), stats.T("compressed", isCompressed(gpr)))
87+
8288
events.Log("Successfully uploaded %{file}s to %{bucket}s/%{key}s", path, c.Bucket, key)
8389
if gpr != nil {
8490
stats.Set("ldb-size-bytes-compressed", gpr.bytesRead)
@@ -91,6 +97,13 @@ func (c *s3Snapshot) Upload(ctx context.Context, path string) error {
9197
return nil
9298
}
9399

100+
func isCompressed(gpr *gzipCompressionReader) string {
101+
if gpr == nil {
102+
return "false"
103+
}
104+
return "true"
105+
}
106+
94107
func (c *s3Snapshot) sendToS3(ctx context.Context, key string, bucket string, body io.Reader) error {
95108
if c.sendToS3Func != nil {
96109
return c.sendToS3Func(ctx, key, bucket, body)

0 commit comments

Comments
 (0)