Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
55bf4ea
feat: workflow for s3 added
serdarozerr Nov 5, 2025
88d7f9f
fix: repo root added
serdarozerr Nov 5, 2025
90d2450
fix: build from current repo
serdarozerr Nov 6, 2025
39bfa69
fix: path changed
serdarozerr Nov 6, 2025
0b76cbb
fix: AWS_ROLE_ARN is used
serdarozerr Nov 6, 2025
6af21a3
feat: each parallel test converted to steps
serdarozerr Nov 6, 2025
d25af46
fix: role_arn added
serdarozerr Nov 6, 2025
4629501
fix: s3 link updated
serdarozerr Nov 6, 2025
5c9b7a9
fix: unnecessary bas64 removed
serdarozerr Nov 6, 2025
6ddff76
fix: bas64 is needed actually
serdarozerr Nov 6, 2025
ccdf19f
feat: bucket with new name created
serdarozerr Nov 7, 2025
0f21850
feat: testing public read integration
serdarozerr Nov 7, 2025
a89bd49
fix: role arn added
serdarozerr Nov 7, 2025
7375ccb
fix: role arn with empty string
serdarozerr Nov 7, 2025
01c8759
feat: test frankfurt integration
serdarozerr Nov 7, 2025
e48f65a
fix: s3 endpoint changed with region
serdarozerr Nov 7, 2025
d600bed
fix: AWS V4 ONLY REGION tests are removed
serdarozerr Nov 7, 2025
d17d57a
fix: remove unnecessary role_arn
serdarozerr Nov 7, 2025
2349ffa
feat: action for setup, run and teardown added
serdarozerr Nov 10, 2025
f20fffe
feat: run only s3 compatible tests
serdarozerr Nov 13, 2025
3301028
fix: file location fixed
serdarozerr Nov 13, 2025
1cd96e8
fix: unbound var release_dir fixed
serdarozerr Nov 13, 2025
ac988b6
fix: ginkgo used directly
serdarozerr Nov 13, 2025
0349845
fix: https protocol added
serdarozerr Nov 13, 2025
c1214ed
fix: bucket name added as env var
serdarozerr Nov 13, 2025
4bf9ce5
fix: s3-compatible-integration job needs deep dive investigation not…
serdarozerr Nov 13, 2025
7312ee6
feat: path for pr added
serdarozerr Nov 17, 2025
d4969a5
fix: bucket name reverted
serdarozerr Nov 17, 2025
160eb6a
fix: environment names is changed
serdarozerr Nov 18, 2025
cb8ccf6
fix: environment tag in workflow is removed
serdarozerr Nov 20, 2025
9184adb
docs: paths are changed based on new repo
serdarozerr Nov 21, 2025
5c79e0d
feat: push main and manual trigger are added
serdarozerr Nov 21, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 61 additions & 0 deletions .github/actions/s3-integration-run/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: Run AWS S3 Integration Tests
description: Runs integration tests against to aws infrastructure.

inputs:
access_key_id:
description: 'AWS Access Key ID'
required: true
secret_access_key:
description: 'AWS Secret Access Key'
required: true
region_name:
description: 'AWS Region Name'
required: true
stack_name:
description: 'CloudFormation Stack Name (required for IAM tests)'
required: true
test_type:
description: 'Type of test to run (e.g.,aws, aws-iam, aws-assume)'
required: true
focus_regex:
description: 'Ginkgo Focus Regex for tests to run'
required: false
s3_endpoint_host:
description: 'Custom S3 Endpoint Host'
required: false
role_arn:
description: 'AWS Role ARN to test assume role functionality'
required: false
default: ''

runs:
using: 'composite'
steps:
- name: Run AWS S3 Integration Tests
shell: bash
run: |
set -e
export access_key_id="${{inputs.access_key_id}}"
export secret_access_key="${{inputs.secret_access_key}}"
export region_name="${{inputs.region_name}}"
export stack_name="${{inputs.stack_name}}"

if [[ "${{inputs.test_type}}" == "aws" ]]; then
export role_arn="${{inputs.role_arn}}"
export s3_endpoint_host="${{inputs.s3_endpoint_host}}"
export focus_regex="${{inputs.focus_regex}}"
echo "Running standard AWS integration tests..."
./.github/scripts/s3/run-integration-aws.sh
elif [[ "${{inputs.test_type}}" == "aws-iam" ]]; then
echo "Running AWS IAM role tests..."
./.github/scripts/s3/run-integration-aws-iam.sh
elif [[ "${{inputs.test_type}}" == "aws-assume" ]]; then
export assume_role_arn="${{inputs.role_arn}}"
export focus_regex="${{inputs.focus_regex}}"
echo "Running AWS assume role tests..."
./.github/scripts/s3/run-integration-aws-assume.sh
else
echo "Error: Unknown test_type '${{inputs.test_type}}'"
echo "Valid options are: aws, aws-iam, aws-assume"
exit 1
fi
34 changes: 34 additions & 0 deletions .github/actions/s3-integration-setup/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Set up AWS S3 Integration Infrastructure
description: Sets up AWS S3 Integration Infrastructure for testing purposes.

inputs:
access_key_id:
description: 'AWS Access Key ID'
required: true
secret_access_key:
description: 'AWS Secret Access Key'
required: true
region_name:
description: 'AWS Region Name'
required: true
stack_name:
description: 'CloudFormation Stack Name'
required: true
role_arn:
description: 'AWS Role ARN'
required: false
default: ''

runs:
using: 'composite'
steps:
- name: Set up AWS Infrastructure
shell: bash
run: |
set -e
export access_key_id="${{inputs.access_key_id}}"
export secret_access_key="${{inputs.secret_access_key}}"
export role_arn="${{inputs.role_arn}}"
export region_name="${{inputs.region_name}}"
export stack_name="${{inputs.stack_name}}"
./.github/scripts/s3/setup-aws-infrastructure.sh
28 changes: 28 additions & 0 deletions .github/actions/s3-integration-teardown/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: 'Tear down AWS S3 Integration Infrastructure'
description: 'Tears down AWS S3 Integration Infrastructure used for testing purposes.'
inputs:
access_key_id:
description: 'AWS Access Key ID'
required: true
secret_access_key:
description: 'AWS Secret Access Key'
required: true
region_name:
description: 'AWS Region Name'
required: true
stack_name:
description: 'CloudFormation Stack Name'
required: true

runs:
using: 'composite'
steps:
- name: Teardown AWS Infrastructure
shell: bash
run: |
set -e
export access_key_id="${{inputs.access_key_id}}"
export secret_access_key="${{inputs.secret_access_key}}"
export region_name="${{inputs.region_name}}"
export stack_name="${{inputs.stack_name}}"
./.github/scripts/s3/teardown-infrastructure.sh
84 changes: 84 additions & 0 deletions .github/scripts/s3/assets/cloudformation-s3cli-iam.template.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
{
"Resources": {
"S3Bucket": {
"Type": "AWS::S3::Bucket",
"DeletionPolicy": "Delete",
"Properties": {
"AccessControl": "Private"
}
},
"Role": {
"Type": "AWS::IAM::Role",
"Properties": {
"AssumeRolePolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
},
"Action": [
"sts:AssumeRole"
]
}
]
},
"Path": "/",
"Policies": [
{
"PolicyName": "S3CLIPermissions",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:PutLogEvents"
],
"Effect": "Allow",
"Resource": "arn:aws:logs:*:*:*"
},
{
"Action": [
"s3:GetObject*",
"s3:PutObject*",
"s3:List*",
"s3:DeleteObject*"
],
"Effect": "Allow",
"Resource": [
{
"Fn::Join": [
"",
[
"arn:aws:s3:::",
{ "Ref": "S3Bucket" }
]
]
},
{
"Fn::Join": [
"",
[
"arn:aws:s3:::",
{ "Ref": "S3Bucket" },
"/*"
]
]
}
]
}
]
}
}
]
}
}
},
"Outputs": {
"BucketName": { "Value": { "Ref": "S3Bucket" }},
"IamRoleArn": { "Value": {"Fn::GetAtt" : ["Role", "Arn"] }}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"Resources": {
"S3Bucket": {
"Type": "AWS::S3::Bucket",
"DeletionPolicy": "Delete",
"Properties": {
"AccessControl": "Private"
}
}
},
"Outputs": {
"BucketName": { "Value": { "Ref": "S3Bucket" }}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"Resources": {
"S3PublicReadBucket": {
"Type": "AWS::S3::Bucket",
"DeletionPolicy": "Delete",
"Properties": {
"PublicAccessBlockConfiguration": {
"BlockPublicAcls": false,
"BlockPublicPolicy": false,
"IgnorePublicAcls": false,
"RestrictPublicBuckets": false
},
"OwnershipControls": {
"Rules": [
{
"ObjectOwnership": "ObjectWriter"
}
]
}
}
}
},
"Outputs": {
"BucketName": {
"Value": {
"Ref": "S3PublicReadBucket"
}
}
}
}
22 changes: 22 additions & 0 deletions .github/scripts/s3/assets/lambda_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
import logging
import subprocess

def test_runner_handler(event, context):
os.environ['S3_CLI_PATH'] = './s3cli'
os.environ['BUCKET_NAME'] = event['bucket_name']
os.environ['REGION'] = event['region']
os.environ['S3_HOST'] = event['s3_host']

logger = logging.getLogger()
logger.setLevel(logging.DEBUG)

try:
output = subprocess.check_output(['./integration.test', '-ginkgo.focus', 'AWS STANDARD IAM ROLE'],
env=os.environ, stderr=subprocess.STDOUT)
logger.debug("INTEGRATION TEST OUTPUT:")
logger.debug(output)
except subprocess.CalledProcessError as e:
logger.debug("INTEGRATION TEST EXITED WITH STATUS: " + str(e.returncode))
logger.debug(e.output)
raise
36 changes: 36 additions & 0 deletions .github/scripts/s3/run-integration-aws-assume.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/usr/bin/env bash
set -euo pipefail


# Get the directory where this script is located
script_dir="$( cd "$(dirname "${0}")" && pwd )"
repo_root="$(cd "${script_dir}/../../.." && pwd)"

# Source utils from the same directory
source "${script_dir}/utils.sh"

: "${access_key_id:?}"
: "${secret_access_key:?}"
: "${region_name:=unset}"
: "${focus_regex:?}"
: "${assume_role_arn:=unset}"
: "${s3_endpoint_host:=unset}"


# Just need these to get the stack info
export AWS_ACCESS_KEY_ID=${access_key_id}
export AWS_SECRET_ACCESS_KEY=${secret_access_key}
export AWS_DEFAULT_REGION=${region_name}
export ASSUME_ROLE_ARN=${assume_role_arn}

# Some of these are optional
export ACCESS_KEY_ID=${access_key_id}
export SECRET_ACCESS_KEY=${secret_access_key}
export REGION=${region_name}
export S3_HOST=${s3_endpoint_host}


pushd "${repo_root}" > /dev/null
echo -e "\n running tests with $(go version)..."
ginkgo -r --focus="${focus_regex}" s3/integration/
popd > /dev/null
Loading
Loading