Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/reference/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,12 @@ The following settings are available:
`aws.batch.executionRole`
: The AWS Batch [Execution Role](https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html) ARN that needs to be used to execute the Batch Job. It is mandatory when using AWS Fargate.

`aws.batch.forceGlacierTransfer`
: :::{versionadded 26.04.0}
:::
: When `true`, add the `--force-glacier-transfer` flag to AWS CLI S3 download commands (default: `false`).
: This option is needed when staging directories that have been restored from [S3 Glacier](https://aws.amazon.com/s3/storage-classes/glacier/). It does not restore objects from Glacier.

`aws.batch.jobRole`
: The AWS Batch Job Role ARN that needs to be used to execute the Batch Job.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,4 +164,8 @@ class AwsOptions implements CloudTransferOptions {
return awsConfig.batchConfig.terminateUnschedulableJobs
}

Boolean getForceGlacierTransfer() {
return awsConfig.batchConfig.forceGlacierTransfer
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ class AwsBatchConfig implements CloudTransferOptions, ConfigScope {
""")
final String executionRole

@ConfigOption
@Description("""
When `true`, add the `--force-glacier-transfer` flag to AWS CLI S3 download commands (default: `false`).
""")
final boolean forceGlacierTransfer

@ConfigOption
@Description("""
The AWS Batch Job Role ARN that needs to be used to execute the Batch Job.
Expand Down Expand Up @@ -151,6 +157,7 @@ class AwsBatchConfig implements CloudTransferOptions, ConfigScope {
schedulingPriority = opts.schedulingPriority as Integer ?: 0
executionRole = opts.executionRole
terminateUnschedulableJobs = opts.terminateUnschedulableJobs as boolean
forceGlacierTransfer = opts.forceGlacierTransfer as boolean
if( retryMode == 'built-in' )
retryMode = null // this force falling back on NF built-in retry mode instead of delegating to AWS CLI tool
if( retryMode && retryMode !in AwsOptions.VALID_RETRY_MODES )
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class S3BashLib extends BashFunLib<S3BashLib> {
private String s5cmdPath
private String acl = ''
private String requesterPays = ''
private String forceGlacierTransfer = ''

S3BashLib withCliPath(String cliPath) {
if( cliPath )
Expand Down Expand Up @@ -90,12 +91,17 @@ class S3BashLib extends BashFunLib<S3BashLib> {
return this
}

S3BashLib withForceGlacierTransfer(Boolean value) {
this.forceGlacierTransfer = value ? '--force-glacier-transfer ' : ''
return this
}

protected String retryEnv() {
if( !retryMode )
return ''
"""
# aws cli retry config
export AWS_RETRY_MODE=${retryMode}
export AWS_RETRY_MODE=${retryMode}
export AWS_MAX_ATTEMPTS=${maxTransferAttempts}
""".stripIndent().rightTrim()
}
Expand Down Expand Up @@ -126,8 +132,8 @@ class S3BashLib extends BashFunLib<S3BashLib> {
local file_name=\$(basename \$1)
local is_dir=\$($cli s3 ls \$source | grep -F "PRE \${file_name}/" -c)
if [[ \$is_dir == 1 ]]; then
$cli s3 cp --only-show-errors --recursive "\$source" "\$target"
else
$cli s3 cp --only-show-errors --recursive ${forceGlacierTransfer}"\$source" "\$target"
else
$cli s3 cp --only-show-errors "\$source" "\$target"
fi
}
Expand Down Expand Up @@ -165,7 +171,7 @@ class S3BashLib extends BashFunLib<S3BashLib> {
local is_dir=\$($cli ls \$source | grep -F "DIR \${file_name}/" -c)
if [[ \$is_dir == 1 ]]; then
$cli cp "\$source/*" "\$target"
else
else
$cli cp "\$source" "\$target"
fi
}
Expand Down Expand Up @@ -194,6 +200,7 @@ class S3BashLib extends BashFunLib<S3BashLib> {
.withS5cmdPath( opts.s5cmdPath )
.withAcl( opts.s3Acl )
.withRequesterPays( opts.requesterPays )
.withForceGlacierTransfer( opts.forceGlacierTransfer )
}

static String script(AwsOptions opts) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ class AwsBatchFileCopyStrategyTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -296,7 +296,7 @@ class AwsBatchFileCopyStrategyTest extends Specification {
local is_dir=$(/foo/aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
/foo/aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
/foo/aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ class AwsBatchScriptLauncherTest extends Specification {
local is_dir=$(/conda/bin/aws --region eu-west-1 s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
/conda/bin/aws --region eu-west-1 s3 cp --only-show-errors --recursive "$source" "$target"
else
else
/conda/bin/aws --region eu-west-1 s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -318,7 +318,7 @@ class AwsBatchScriptLauncherTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -490,7 +490,7 @@ class AwsBatchScriptLauncherTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -584,7 +584,7 @@ class AwsBatchScriptLauncherTest extends Specification {
}

# aws cli retry config
export AWS_RETRY_MODE=adaptive
export AWS_RETRY_MODE=adaptive
export AWS_MAX_ATTEMPTS=3
# aws helper
nxf_s3_upload() {
Expand All @@ -606,7 +606,7 @@ class AwsBatchScriptLauncherTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ class AwsBatchConfigTest extends Specification {
!batch.s5cmdPath
batch.schedulingPriority == 0
!batch.terminateUnschedulableJobs
!batch.forceGlacierTransfer
}

def 'should create config with options' () {
Expand Down Expand Up @@ -153,4 +154,18 @@ class AwsBatchConfigTest extends Specification {
[terminateUnschedulableJobs: false] | false
[terminateUnschedulableJobs: true] | true
}

def 'should parse forceGlacierTransfer flag' () {
given:
def opts = new AwsBatchConfig(OPTS)

expect:
opts.forceGlacierTransfer == FORCE_GLACIER

where:
OPTS | FORCE_GLACIER
[:] | false
[forceGlacierTransfer: false] | false
[forceGlacierTransfer: true] | true
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -203,7 +203,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(/foo/bin/aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
/foo/bin/aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
/foo/bin/aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand All @@ -223,7 +223,7 @@ class S3BashLibTest extends Specification {
expect:
S3BashLib.script() == '''
# aws cli retry config
export AWS_RETRY_MODE=standard
export AWS_RETRY_MODE=standard
export AWS_MAX_ATTEMPTS=5
# aws helper
nxf_s3_upload() {
Expand All @@ -245,7 +245,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand All @@ -261,7 +261,7 @@ class S3BashLibTest extends Specification {
expect:
S3BashLib.script() == '''
# aws cli retry config
export AWS_RETRY_MODE=legacy
export AWS_RETRY_MODE=legacy
export AWS_MAX_ATTEMPTS=100
# aws helper
nxf_s3_upload() {
Expand All @@ -283,7 +283,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -318,7 +318,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand All @@ -334,7 +334,7 @@ class S3BashLibTest extends Specification {
expect:
S3BashLib.script() == '''
# aws cli retry config
export AWS_RETRY_MODE=legacy
export AWS_RETRY_MODE=legacy
export AWS_MAX_ATTEMPTS=99
# aws helper
nxf_s3_upload() {
Expand All @@ -356,7 +356,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(/some/bin/aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
/some/bin/aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
/some/bin/aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -450,7 +450,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -541,7 +541,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -616,7 +616,7 @@ class S3BashLibTest extends Specification {
}

# aws cli retry config
export AWS_RETRY_MODE=standard
export AWS_RETRY_MODE=standard
export AWS_MAX_ATTEMPTS=5
# aws helper
nxf_s3_upload() {
Expand All @@ -638,7 +638,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -714,7 +714,7 @@ class S3BashLibTest extends Specification {
}

# aws cli retry config
export AWS_RETRY_MODE=standard
export AWS_RETRY_MODE=standard
export AWS_MAX_ATTEMPTS=5
# aws helper
nxf_s3_upload() {
Expand All @@ -736,7 +736,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
Expand Down Expand Up @@ -773,7 +773,7 @@ class S3BashLibTest extends Specification {
local is_dir=$(s5cmd ls $source | grep -F "DIR ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
s5cmd cp "$source/*" "$target"
else
else
s5cmd cp "$source" "$target"
fi
}
Expand Down Expand Up @@ -802,20 +802,58 @@ class S3BashLibTest extends Specification {
s5cmd cp --acl public-read --storage-class STANDARD "$name" "$s3path/$name"
fi
}

nxf_s3_download() {
local source=$1
local target=$2
local file_name=$(basename $1)
local is_dir=$(s5cmd ls $source | grep -F "DIR ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
s5cmd cp "$source/*" "$target"
else
else
s5cmd cp "$source" "$target"
fi
}
'''.stripIndent(true)
}


def 'should create script with force glacier transfer' () {
given:
Global.session = Mock(Session) {
getConfig() >> [aws:[batch:[forceGlacierTransfer: true]]]
}

expect:
S3BashLib.script() == '''
# aws cli retry config
export AWS_RETRY_MODE=standard
export AWS_MAX_ATTEMPTS=5
# aws helper
nxf_s3_upload() {
local name=$1
local s3path=$2
if [[ "$name" == - ]]; then
aws s3 cp --only-show-errors --storage-class STANDARD - "$s3path"
elif [[ -d "$name" ]]; then
aws s3 cp --only-show-errors --recursive --storage-class STANDARD "$name" "$s3path/$name"
else
aws s3 cp --only-show-errors --storage-class STANDARD "$name" "$s3path/$name"
fi
}

nxf_s3_download() {
local source=$1
local target=$2
local file_name=$(basename $1)
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive --force-glacier-transfer "$source" "$target"
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
'''.stripIndent(true)
}


}
Loading
Loading