Skip to content

Commit f6d3a47

Browse files
version 1.1.0 (#577)
* Created initial s3 step * Fixing some syntax issues Remove subfolder in S3 bucket for build ID Adding IMAGE NAME and VERSION for easier testing Make script executable Make non public Add parameter for integration name Signed-off-by: Laurent Rochette <[email protected]> * fixed aws export commands, output url, cf api key * updated working Dir and script * updated steps.yaml for public * upated steps.yaml to rquire sourcedir * adding test for directory Signed-off-by: Laurent Rochette <[email protected]> * Reduced image size, update step, updaet script * updated steps.yaml with corrections * updated URL to image * version 1.1.0 Adding SA support Catching error when upload fails so step errors out * if instead of fi --------- Signed-off-by: Laurent Rochette <[email protected]> Co-authored-by: Luke Goodfellow <[email protected]>
1 parent aaef0b0 commit f6d3a47

File tree

3 files changed

+42
-13
lines changed

3 files changed

+42
-13
lines changed

incubating/aws-s3/Dockerfile

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,3 @@ RUN yum install -y tar gzip jq && \
66
yum clean all && rm -rf /var/cache/yum
77

88
COPY ./src/start.sh /
9-

incubating/aws-s3/src/start.sh

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,19 @@
1-
export AWS_ACCESS_KEY_ID=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.accessKeyId')
2-
export AWS_SECRET_ACCESS_KEY=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.secretAccessKey')
1+
#!/bin/bash
2+
3+
# Test for SA being set
4+
if [ -z ${AWS_ROLE_ARN+x} ];
5+
then
6+
if [ -z ${S3_INTEGRATION+x} ] ;then
7+
echo "You must set S3_INEGRATION or associate a Service Account to your pipeline"
8+
exit 1
9+
else
10+
export AWS_ACCESS_KEY_ID=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.accessKeyId')
11+
export AWS_SECRET_ACCESS_KEY=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.secretAccessKey')
12+
fi
13+
else
14+
echo "Service Account is set. Ignoring S3_INTEGRATION if defined."
15+
fi
16+
317
export AWS_DEFAULT_REGION=$(echo "${REGION,,}")
418
export AWS_PAGER=""
519

@@ -13,8 +27,13 @@ else
1327
aws s3 cp $SOURCE s3://$BUCKET/$S3_PREFIX
1428
fi
1529

30+
if [ $? -ne 0 ] ; then
31+
echo "Error uploading $SOURCE to s3://$BUCKET/$S3_PREFIX"
32+
exit 1
33+
fi
34+
1635
echo ""
17-
echo "Finished uplaoding files to S3"
36+
echo "Finished uploading files to S3"
1837
echo ""
1938

2039
export uploadToS3_CF_OUTPUT_URL="https://s3.console.aws.amazon.com/s3/buckets/$BUCKET?region=$AWS_DEFAULT_REGION&prefix=$S3_PREFIX/"

incubating/aws-s3/step.yaml

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ kind: step-type
22
version: '1.0'
33
metadata:
44
name: aws-s3
5-
version: 1.0.0
5+
version: 1.1.0
66
isPublic: true
77
description: Integration that uploads files to S3
88
sources:
@@ -20,7 +20,7 @@ metadata:
2020
url: https://raw.githubusercontent.com/codefresh-io/steps/master/incubating/aws-s3/image/s3-logo.svg
2121
background: "#f4f4f4"
2222
examples:
23-
- description: aws-s3
23+
- description: aws-s3-with-integration
2424
workflow:
2525
uploadToS3:
2626
title: Uploading to S3
@@ -32,6 +32,17 @@ metadata:
3232
S3_PREFIX: myDirectory/subdirectory
3333
S3_INTEGRATION: amazon
3434
SOURCE: ${{CF_REPO_NAME}}/myDirectory
35+
- description: aws-s3-with-SA
36+
workflow:
37+
uploadToS3:
38+
title: Uploading to S3
39+
type: aws-s3
40+
stage: cleanup
41+
arguments:
42+
REGION: us-east-1
43+
BUCKET: my-s3-bucket
44+
S3_PREFIX: myDirectory/subdirectory
45+
SOURCE: ${{CF_REPO_NAME}}/myDirectory
3546
spec:
3647
arguments: |-
3748
{
@@ -40,21 +51,21 @@ spec:
4051
"type": "object",
4152
"additionalProperties": true,
4253
"patterns": [],
43-
"required": ["BUCKET", "S3_PREFIX", "S3_INTEGRATION", "SOURCE"],
54+
"required": ["BUCKET", "S3_PREFIX", "SOURCE"],
4455
"properties":
4556
{
4657
"S3_INTEGRATION": {
4758
"type": "string",
48-
"description": "The name of the S3 storage integration"
59+
"description": "The name of the S3 storage integration if you do not use a Service Account. if a SA exists, it will be used instead"
4960
},
5061
"AWS_IMAGE": {
5162
"type": "string",
5263
"default": "quay.io/codefreshplugins/aws-s3",
53-
"description": "The AWS S3 container image registry/image for the step."
64+
"description": "The AWS S3 container registry/image for the step."
5465
},
5566
"AWS_IMAGE_VERSION": {
5667
"type": "string",
57-
"default": "1.0.0",
68+
"default": "1.1.0",
5869
"description": "Version of the AWS S3 image to use, Docker image tag."
5970
},
6071
"REGION": {
@@ -69,17 +80,17 @@ spec:
6980
"S3_PREFIX":
7081
{
7182
"type": "string",
72-
"description": "The prefix or path you want the files to be uploaded to. If a single file, this will be the object key unless you add a / at the end"
83+
"description": "The prefix or path you want the files to be uploaded to. If a single file, this will be the object key unless you add a / at the end."
7384
},
7485
"SOURCE":
7586
{
7687
"type": "string",
77-
"description": "The name of the directory or file you want to upload. Path from /codefresh/volume/ "
88+
"description": "The name of the directory or file you want to upload. Path from /codefresh/volume/"
7889
},
7990
"working_directory":
8091
{
8192
"type": "string",
82-
"description": "The directory where you want to be in. The default value is '/codefresh/volume/",
93+
"description": "The directory where you want to be in. The default value is '/codefresh/volume/'",
8394
"default": "/codefresh/volume/"
8495
}
8596
}

0 commit comments

Comments
 (0)