Skip to content

Commit 05247d0

Browse files
authored
cache k0s and metadata files in S3 bucket (#242)
* initial script to upload k0s binaries to bucket * update path * minimal metadata upload * call as part of release process * upload to infra-staging on pr * chmod +x * welp * logic fix * move export * prod export * proper name? * test uploading k0s binary * override logic * reupload override versions * quotes * ? * quotes * wat * strip quotes * remove debug * use static metadata name in dev and staging * ensure reuploading metadata works * remove old comment
1 parent 14d5178 commit 05247d0

File tree

4 files changed

+119
-0
lines changed

4 files changed

+119
-0
lines changed

.github/workflows/pull-request.yaml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,3 +59,14 @@ jobs:
5959
- name: Build Linux AMD64
6060
run: |
6161
make embedded-cluster-linux-amd64 VERSION=dev-$SHORT_SHA
62+
./output/bin/embedded-cluster version metadata > metadata.json
63+
64+
- name: Cache files
65+
env:
66+
S3_BUCKET: "tf-infra-staging-emb-cluster-bin"
67+
AWS_ACCESS_KEY_ID: ${{ secrets.INFRA_EMBEDDED_CLUSTER_UPLOAD_IAM_KEY_ID }}
68+
AWS_SECRET_ACCESS_KEY: ${{ secrets.INFRA_EMBEDDED_CLUSTER_UPLOAD_IAM_SECRET }}
69+
AWS_REGION: "us-east-1"
70+
run: |
71+
export EC_VERSION="dev-metadata"
72+
./scripts/cache-files.sh

.github/workflows/release-dev.yaml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,15 @@ jobs:
2222
make embedded-cluster-linux-amd64
2323
tar -C output/bin -czvf embedded-cluster-linux-amd64.tgz embedded-cluster
2424
./output/bin/embedded-cluster version metadata > metadata.json
25+
- name: Cache files
26+
env:
27+
S3_BUCKET: "tf-staging-embedded-cluster-bin"
28+
AWS_ACCESS_KEY_ID: ${{ secrets.STAGING_EMBEDDED_CLUSTER_UPLOAD_IAM_KEY_ID }}
29+
AWS_SECRET_ACCESS_KEY: ${{ secrets.STAGING_EMBEDDED_CLUSTER_UPLOAD_IAM_SECRET }}
30+
AWS_REGION: "us-east-1"
31+
run: |
32+
export EC_VERSION="staging-metadata"
33+
./scripts/cache-files.sh
2534
- name: Publish development release
2635
uses: marvinpinto/action-automatic-releases@latest
2736
with:

.github/workflows/release-prod.yaml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,16 @@ jobs:
2424
make embedded-cluster-linux-amd64 VERSION=$TAG_NAME
2525
tar -C output/bin -czvf embedded-cluster-linux-amd64.tgz embedded-cluster
2626
./output/bin/embedded-cluster version metadata > metadata.json
27+
- name: Cache files
28+
- name: Cache files
29+
env:
30+
S3_BUCKET: "tf-embedded-cluster-binaries"
31+
AWS_ACCESS_KEY_ID: ${{ secrets.PROD_EMBEDDED_CLUSTER_UPLOAD_IAM_KEY_ID }}
32+
AWS_SECRET_ACCESS_KEY: ${{ secrets.PROD_EMBEDDED_CLUSTER_UPLOAD_IAM_SECRET }}
33+
AWS_REGION: "us-east-1"
34+
run: |
35+
export EC_VERSION="$TAG_NAME"
36+
./scripts/cache-files.sh
2737
- name: Publish release
2838
uses: marvinpinto/action-automatic-releases@latest
2939
with:

scripts/cache-files.sh

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
#!/bin/bash
2+
3+
set -eo pipefail
4+
5+
function require() {
6+
if [ -z "$2" ]; then
7+
echo "validation failed: $1 unset"
8+
exit 1
9+
fi
10+
}
11+
12+
require AWS_ACCESS_KEY_ID "${AWS_ACCESS_KEY_ID}"
13+
require AWS_SECRET_ACCESS_KEY "${AWS_SECRET_ACCESS_KEY}"
14+
require AWS_REGION "${AWS_REGION}"
15+
require S3_BUCKET "${S3_BUCKET}"
16+
17+
function retry() {
18+
local retries=$1
19+
shift
20+
21+
local count=0
22+
until "$@"; do
23+
exit=$?
24+
wait=$((2 ** $count))
25+
count=$(($count + 1))
26+
if [ $count -lt $retries ]; then
27+
echo "Retry $count/$retries exited $exit, retrying in $wait seconds..."
28+
sleep $wait
29+
else
30+
echo "Retry $count/$retries exited $exit, no more retries left."
31+
return $exit
32+
fi
33+
done
34+
return 0
35+
}
36+
37+
function k0sbin() {
38+
# first, figure out what version of k0s is in the current build
39+
local k0s_version=
40+
k0s_version=$(awk '/^K0S_VERSION/{print $3}' Makefile)
41+
local k0s_override=
42+
k0s_override=$(awk '/^K0S_BINARY_SOURCE_OVERRIDE/{gsub("\"", "", $3); print $3}' Makefile)
43+
44+
# check if the binary already exists in the bucket
45+
local k0s_binary_exists=
46+
k0s_binary_exists=$(aws s3api head-object --bucket "${S3_BUCKET}" --key "k0s-binaries/${k0s_version}" || true)
47+
48+
# if the binary already exists, we don't need to upload it again
49+
if [ -n "${k0s_binary_exists}" ]; then
50+
echo "k0s binary ${k0s_version} already exists in bucket ${S3_BUCKET}, skipping upload"
51+
return 0
52+
fi
53+
54+
# if the override is set, the binary will have been added to the bucket through another process
55+
if [ -n "${k0s_override}" ] && [ "${k0s_override}" != '' ]; then
56+
echo "K0S_BINARY_SOURCE_OVERRIDE is set to '${k0s_override}', using that source"
57+
curl -L -o "${k0s_version}" "${k0s_override}"
58+
else
59+
# download the k0s binary from official sources
60+
curl -L -o "${k0s_version}" "https://github.com/k0sproject/k0s/releases/download/${k0s_version}/k0s-${k0s_version}-amd64"
61+
fi
62+
63+
# upload the binary to the bucket
64+
retry 3 aws s3 cp "${k0s_version}" "s3://${S3_BUCKET}/k0s-binaries/${k0s_version}"
65+
}
66+
67+
function metadata() {
68+
if [ -z "${EC_VERSION}" ]; then
69+
echo "EC_VERSION unset, not uploading metadata.json"
70+
return 0
71+
fi
72+
73+
# check if a file 'metadata.json' exists in the directory
74+
# if it does, upload it as metadata/${ec_version}.json
75+
if [ -f metadata.json ]; then
76+
retry 3 aws s3 cp metadata.json "s3://${S3_BUCKET}/metadata/${EC_VERSION}.json"
77+
else
78+
echo "metadata.json not found, skipping upload"
79+
fi
80+
81+
}
82+
83+
# there are two files to be uploaded for each release - the k0s binary and the metadata file
84+
function main() {
85+
k0sbin
86+
metadata
87+
}
88+
89+
main "$@"

0 commit comments

Comments
 (0)