Skip to content

Commit 68337a0

Browse files
AWS-S3 step (#572)
* Created initial s3 step * Fixing some syntax issues Remove subfolder in S3 bucket for build ID Adding IMAGE NAME and VERSION for easier testing Make script executable Make non public Add parameter for integration name Signed-off-by: Laurent Rochette <[email protected]> * fixed aws export commands, output url, cf api key * updated working Dir and script * updated steps.yaml for public * upated steps.yaml to rquire sourcedir * adding test for directory Signed-off-by: Laurent Rochette <[email protected]> * Reduced image size, update step, updaet script * updated steps.yaml with corrections * updated URL to image --------- Signed-off-by: Laurent Rochette <[email protected]> Co-authored-by: Laurent Rochette <[email protected]>
1 parent 9402ff2 commit 68337a0

File tree

4 files changed

+153
-0
lines changed

4 files changed

+153
-0
lines changed

incubating/aws-s3/Dockerfile

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
FROM amazon/aws-cli
2+
3+
RUN yum install -y tar gzip jq && \
4+
curl -sSL -o - https://github.com/codefresh-io/cli/releases/download/v0.82.5/codefresh-v0.82.5-linux-x64.tar.gz | tar zx codefresh && mv ./codefresh /usr/local/bin/codefresh && \
5+
yum remove -y tar gzip && \
6+
yum clean all && rm -rf /var/cache/yum
7+
8+
COPY ./src/start.sh /

incubating/aws-s3/image/s3-logo.svg

Lines changed: 18 additions & 0 deletions
Loading

incubating/aws-s3/src/start.sh

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
export AWS_ACCESS_KEY_ID=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.accessKeyId')
2+
export AWS_SECRET_ACCESS_KEY=$(codefresh get context ${S3_INTEGRATION} --decrypt -o json |jq --raw-output '.spec.data.auth.jsonConfig.secretAccessKey')
3+
export AWS_DEFAULT_REGION=$(echo "${REGION,,}")
4+
export AWS_PAGER=""
5+
6+
cd $working_directory
7+
echo "Starting to upload files to S3"
8+
echo ""
9+
10+
if [ -d $SOURCE ]; then
11+
aws s3 cp $SOURCE s3://$BUCKET/$S3_PREFIX --recursive
12+
else
13+
aws s3 cp $SOURCE s3://$BUCKET/$S3_PREFIX
14+
fi
15+
16+
echo ""
17+
echo "Finished uplaoding files to S3"
18+
echo ""
19+
20+
export uploadToS3_CF_OUTPUT_URL="https://s3.console.aws.amazon.com/s3/buckets/$BUCKET?region=$AWS_DEFAULT_REGION&prefix=$S3_PREFIX/"
21+
22+
echo "Files have been upload to: "
23+
echo $uploadToS3_CF_OUTPUT_URL
24+
echo ""
25+
26+
cf_export uploadToS3_CF_OUTPUT_URL

incubating/aws-s3/step.yaml

Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
kind: step-type
2+
version: '1.0'
3+
metadata:
4+
name: aws-s3
5+
version: 1.0.0
6+
isPublic: true
7+
description: Integration that uploads files to S3
8+
sources:
9+
- https://github.com/codefresh-io/steps/tree/master/incubating/aws-s3
10+
stage: incubating
11+
maintainers:
12+
- name: Luke Goodfellow
13+
14+
categories:
15+
- utilities
16+
official: true
17+
tags: []
18+
icon:
19+
type: svg
20+
url: https://raw.githubusercontent.com/codefresh-io/steps/master/incubating/aws-s3/image/s3-logo.svg
21+
background: "#f4f4f4"
22+
examples:
23+
- description: aws-s3
24+
workflow:
25+
uploadToS3:
26+
title: Uploading to S3
27+
type: aws-s3
28+
stage: cleanup
29+
arguments:
30+
REGION: us-east-1
31+
BUCKET: my-s3-bucket
32+
S3_PREFIX: myDirectory/subdirectory
33+
S3_INTEGRATION: amazon
34+
SOURCE: ${{CF_REPO_NAME}}/myDirectory
35+
spec:
36+
arguments: |-
37+
{
38+
"definitions": {},
39+
"$schema": "http://json-schema.org/draft-07/schema#",
40+
"type": "object",
41+
"additionalProperties": true,
42+
"patterns": [],
43+
"required": ["BUCKET", "S3_PREFIX", "S3_INTEGRATION", "SOURCE"],
44+
"properties":
45+
{
46+
"S3_INTEGRATION": {
47+
"type": "string",
48+
"description": "The name of the S3 storage integration"
49+
},
50+
"AWS_IMAGE": {
51+
"type": "string",
52+
"default": "quay.io/codefreshplugins/aws-s3",
53+
"description": "The AWS S3 container image registry/image for the step."
54+
},
55+
"AWS_IMAGE_VERSION": {
56+
"type": "string",
57+
"default": "1.0.0",
58+
"description": "Version of the AWS S3 image to use, Docker image tag."
59+
},
60+
"REGION": {
61+
"type": "string",
62+
"description": "The region the bucket is in. Default is us-east-1",
63+
"default": "us-east-1"
64+
},
65+
"BUCKET": {
66+
"type": "string",
67+
"description": "The name of the bucket you are using"
68+
},
69+
"S3_PREFIX":
70+
{
71+
"type": "string",
72+
"description": "The prefix or path you want the files to be uploaded to. If a single file, this will be the object key unless you add a / at the end"
73+
},
74+
"SOURCE":
75+
{
76+
"type": "string",
77+
"description": "The name of the directory or file you want to upload. Path from /codefresh/volume/ "
78+
},
79+
"working_directory":
80+
{
81+
"type": "string",
82+
"description": "The directory where you want to be in. The default value is '/codefresh/volume/",
83+
"default": "/codefresh/volume/"
84+
}
85+
}
86+
}
87+
stepsTemplate: |-
88+
uploadToS3:
89+
name: aws-s3
90+
title: Uploading to S3
91+
image: '[[.Arguments.AWS_IMAGE]]:[[.Arguments.AWS_IMAGE_VERSION]]'
92+
environment:
93+
[[ range $key, $val := .Arguments ]]
94+
- '[[ $key ]]=[[ $val ]]'
95+
[[- end ]]
96+
- CF_API_KEY=${{CF_API_KEY}}
97+
commands:
98+
- /start.sh
99+
delimiters:
100+
left: '[['
101+
right: ']]'

0 commit comments

Comments
 (0)