Skip to content

Commit f1bed66

Browse files
authored
S3 cache (#2111)
* s3 cache
1 parent c4e2700 commit f1bed66

File tree

4 files changed

+185
-6
lines changed

4 files changed

+185
-6
lines changed

action.yml

Lines changed: 63 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,19 @@ inputs:
211211
description: "allows overriding of the terraform cache dir which defaults to ${github.workspace}/cache"
212212
required: false
213213
default: ''
214+
cache-dependencies-s3:
215+
description: "Use S3 for caching terraform/terragrunt dependencies"
216+
required: false
217+
default: 'false'
218+
cache-dependencies-s3-bucket:
219+
description: "S3 bucket URL for caching (e.g., s3://mybucket/cache)"
220+
required: false
221+
default: ''
222+
cache-dependencies-s3-region:
223+
description: "AWS region for S3 cache bucket"
224+
required: false
225+
default: 'us-east-1'
226+
214227

215228
digger-spec:
216229
description: "(orchestrator only) the spec to pass onto digger cli"
@@ -316,6 +329,24 @@ runs:
316329
digger-cache
317330
if: ${{ inputs.cache-dependencies == 'true' }}
318331

332+
- name: restore-s3-cache
333+
shell: bash
334+
run: |
335+
BUCKET=$(echo "${{ inputs.cache-dependencies-s3-bucket }}" | sed 's|^s3://||')
336+
REGION="${{ inputs.cache-dependencies-s3-region }}"
337+
338+
SCRIPT_PATH="${{ github.action_path }}/scripts/s3-cache-download.bash"
339+
if [ ! -f "$SCRIPT_PATH" ]; then
340+
echo "::error::S3 cache download script not found at $SCRIPT_PATH"
341+
echo "Please make sure the script exists and is properly installed."
342+
exit 1
343+
fi
344+
345+
chmod +x "$SCRIPT_PATH"
346+
"$SCRIPT_PATH" "$BUCKET" "$REGION" "$TF_PLUGIN_CACHE_DIR"
347+
if: ${{ inputs.cache-dependencies-s3 == 'true' }}
348+
349+
# Then terraform setup happens...
319350
- name: Setup Terraform
320351
uses: hashicorp/setup-terraform@v3
321352
with:
@@ -429,8 +460,11 @@ runs:
429460

430461
NO_BACKEND: ${{ inputs.no-backend }}
431462
DEBUG: 'true'
432-
TG_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
433-
TERRAGRUNT_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
463+
TG_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
464+
TERRAGRUNT_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
465+
TF_PLUGIN_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
466+
TG_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
467+
TERRAGRUNT_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
434468
DIGGER_RUN_SPEC: ${{inputs.digger-spec}}
435469
run: |
436470
if [[ ${{ inputs.ee }} == "true" ]]; then
@@ -476,8 +510,11 @@ runs:
476510
INPUT_DRIFT_DETECTION_SLACK_NOTIFICATION_URL: ${{ inputs.drift-detection-slack-notification-url }}
477511
INPUT_DRIFT_DETECTION_ADVANCED_SLACK_NOTIFICATION_URL: ${{ inputs.drift-detection-advanced-slack-notification-url }}
478512
NO_BACKEND: ${{ inputs.no-backend }}
479-
TG_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
480-
TERRAGRUNT_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
513+
TG_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
514+
TERRAGRUNT_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
515+
TF_PLUGIN_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
516+
TG_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
517+
TERRAGRUNT_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
481518
DIGGER_RUN_SPEC: ${{inputs.digger-spec}}
482519
id: digger
483520
shell: bash
@@ -524,8 +561,11 @@ runs:
524561
INPUT_DRIFT_DETECTION_SLACK_NOTIFICATION_URL: ${{ inputs.drift-detection-slack-notification-url }}
525562
INPUT_DRIFT_DETECTION_ADVANCED_SLACK_NOTIFICATION_URL: ${{ inputs.drift-detection-advanced-slack-notification-url }}
526563
NO_BACKEND: ${{ inputs.no-backend }}
527-
TG_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
528-
TERRAGRUNT_PROVIDER_CACHE: ${{ inputs.cache-dependencies == 'true' && 1 || 0 }}
564+
TG_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
565+
TERRAGRUNT_PROVIDER_CACHE: ${{ (inputs.cache-dependencies == 'true' || inputs.cache-dependencies-s3 == 'true') && 1 || 0 }}
566+
TF_PLUGIN_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
567+
TG_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
568+
TERRAGRUNT_PROVIDER_CACHE_DIR: ${{ env.TF_PLUGIN_CACHE_DIR }}
529569
DIGGER_RUN_SPEC: ${{inputs.digger-spec}}
530570
id: digger-local-run
531571
shell: bash
@@ -567,6 +607,23 @@ runs:
567607
path: ${{ env.TF_PLUGIN_CACHE_DIR }}
568608
key: digger-cache-${{ hashFiles('**/cache') }}
569609

610+
- name: save-s3-cache
611+
shell: bash
612+
run: |
613+
BUCKET=$(echo "${{ inputs.cache-dependencies-s3-bucket }}" | sed 's|^s3://||')
614+
REGION="${{ inputs.cache-dependencies-s3-region }}"
615+
616+
SCRIPT_PATH="${{ github.action_path }}/scripts/s3-cache-upload.bash"
617+
if [ ! -f "$SCRIPT_PATH" ]; then
618+
echo "::error::S3 cache upload script not found at $SCRIPT_PATH"
619+
echo "Please make sure the script exists and is properly installed."
620+
exit 1
621+
fi
622+
623+
chmod +x "$SCRIPT_PATH"
624+
"$SCRIPT_PATH" "$BUCKET" "$REGION" "$TF_PLUGIN_CACHE_DIR"
625+
if: ${{ always() && inputs.cache-dependencies-s3 == 'true' }}
626+
570627
branding:
571628
icon: globe
572629
color: purple

docs/ce/howto/caching-strategies.mdx

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,17 @@ The hashing of directories ensures that the cache is only saved if the contents
2121
cache is immutable so new caches will be new entries in the cache. Digger currently does not prune older caches so you may need
2222
to clean up older caches using a certain cycle.
2323

24+
## S3 caching
25+
26+
You can cache in AWS s3 rather than artefacts. This requires a bucket for caching and setting similar settings:
27+
28+
``` - uses: diggerhq/digger@vLatest
29+
with:
30+
cache-dependencies-s3: true
31+
cache-dependencies-s3-bucket: s3://terraform-cache-1756322349465/cache
32+
cache-dependencies-s3-region: us-east-1
33+
```
34+
2435
## Self-hosted runners and volumes caching
2536

2637
If you are using self-hosted runners with github it might be overkill on your bandwidth to load and restore from cache. In this case it might be better to mount

scripts/s3-cache-download.bash

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
#!/bin/bash
2+
set -e
3+
4+
# S3 Cache Download Script
5+
# Downloads terraform/terragrunt provider cache from S3
6+
7+
BUCKET="$1"
8+
REGION="$2"
9+
CACHE_DIR="$3"
10+
11+
# Validation
12+
if [ -z "$BUCKET" ]; then
13+
echo "Error: S3 bucket name is required"
14+
exit 1
15+
fi
16+
17+
if [ -z "$REGION" ]; then
18+
echo "Error: AWS region is required"
19+
exit 1
20+
fi
21+
22+
if [ -z "$CACHE_DIR" ]; then
23+
echo "Error: Cache directory path is required"
24+
exit 1
25+
fi
26+
27+
# Ensure cache directory exists
28+
mkdir -p "$CACHE_DIR"
29+
echo "Ensuring cache directory exists: $CACHE_DIR"
30+
31+
# Download cache from S3
32+
echo "Restoring cache from S3 bucket: $BUCKET (region: $REGION)"
33+
if aws s3 sync "s3://$BUCKET" "$CACHE_DIR" --region "$REGION" --quiet 2>/dev/null; then
34+
CACHED_FILES=$(find "$CACHE_DIR" -type f 2>/dev/null | wc -l)
35+
echo "Cache restored successfully ($CACHED_FILES files)"
36+
37+
if [ "$CACHED_FILES" -gt 0 ]; then
38+
echo "Sample cached artifacts:"
39+
find "$CACHE_DIR" -type f 2>/dev/null | head -3
40+
fi
41+
else
42+
echo "No existing cache found or failed to restore (this is normal for first run)"
43+
fi
44+
45+
echo "Cache download completed"

scripts/s3-cache-upload.bash

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
#!/bin/bash
2+
set -e
3+
4+
# S3 Cache Upload Script
5+
# Uploads terraform/terragrunt provider cache to S3
6+
7+
BUCKET="$1"
8+
REGION="$2"
9+
CACHE_DIR="$3"
10+
11+
# Validation
12+
if [ -z "$BUCKET" ]; then
13+
echo "Error: S3 bucket name is required"
14+
exit 1
15+
fi
16+
17+
if [ -z "$REGION" ]; then
18+
echo "Error: AWS region is required"
19+
exit 1
20+
fi
21+
22+
if [ -z "$CACHE_DIR" ]; then
23+
echo "Error: Cache directory path is required"
24+
exit 1
25+
fi
26+
27+
# Check if cache directory exists and has files
28+
if [ ! -d "$CACHE_DIR" ]; then
29+
echo "No cache directory found at: $CACHE_DIR"
30+
echo "Nothing to upload"
31+
exit 0
32+
fi
33+
34+
ARTIFACT_COUNT=$(find "$CACHE_DIR" -type f 2>/dev/null | wc -l)
35+
36+
if [ "$ARTIFACT_COUNT" -eq 0 ]; then
37+
echo "No files to cache - cache directory is empty"
38+
echo "Nothing to upload"
39+
exit 0
40+
fi
41+
42+
# Check if AWS CLI is available and credentials are configured
43+
if ! command -v aws &> /dev/null; then
44+
echo "Error: AWS CLI is not installed or not in PATH"
45+
exit 1
46+
fi
47+
48+
# Verify AWS credentials are available
49+
if ! aws sts get-caller-identity --region "$REGION" &> /dev/null; then
50+
echo "Error: AWS credentials are not properly configured or are invalid"
51+
echo "Please ensure AWS credentials are set up correctly before running this script"
52+
exit 1
53+
fi
54+
55+
# Upload cache to S3
56+
echo "Saving cache to S3 bucket: $BUCKET (region: $REGION)"
57+
echo "Uploading $ARTIFACT_COUNT files"
58+
59+
if aws s3 sync "$CACHE_DIR" "s3://$BUCKET" --region "$REGION" --quiet 2>/dev/null; then
60+
echo "Cache saved successfully"
61+
else
62+
echo "Warning: Failed to save cache (this won't fail the build)"
63+
exit 0 # Don't fail the build on cache upload failure
64+
fi
65+
66+
echo "Cache upload completed"

0 commit comments

Comments
 (0)