Skip to content

Commit 9ea2eb7

Browse files
committed
feat: allow accessControlLevel to be set in s3-batch-upload
1 parent 7433325 commit 9ea2eb7

File tree

4 files changed

+93
-30
lines changed

4 files changed

+93
-30
lines changed

README.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,8 @@ Options:
4343
-g, --glob A glob on filename level to filter the files to upload [string] [default: "*.*"]
4444
-a, --cache-control Cache control for uploaded files, can be string for single value or list of glob settings
4545
[string] [default: ""]
46+
-acl, --access-control-level Sets the access control level for uploaded files
47+
[string] [default: "undefined"]
4648
-c, --config The AWS config json path to load S3 credentials with loadFromPath. [string]
4749
-h, --help Show help [boolean]
4850
@@ -75,7 +77,8 @@ await new Uploader({
7577
'**/settings.json': 'max-age=60', // 1 mins for settings, specific matches should go first
7678
'**/*.json': 'max-age=300', // 5 mins for other jsons
7779
'**/*.*': 'max-age=3600', // 1 hour for everthing else
78-
}
80+
},
81+
accessControlLevel: 'bucket-owner-full-control' // optional, not passed if undefined. - available options - "private"|"public-read"|"public-read-write"|"authenticated-read"|"aws-exec-read"|"bucket-owner-read"|"bucket-owner-full-control"
7982
}).upload();
8083
```
8184

@@ -88,8 +91,8 @@ in your repo. Use the following template for the config file as stated in the [A
8891

8992
```json
9093
{
91-
"accessKeyId": "<YOUR_ACCESS_KEY_ID>",
92-
"secretAccessKey": "<YOUR_SECRET_ACCESS_KEY>",
94+
"accessKeyId": "<YOUR_ACCESS_KEY_ID>",
95+
"secretAccessKey": "<YOUR_SECRET_ACCESS_KEY>",
9396
"region": "us-east-1"
9497
}
9598
```

src/lib/Uploader.ts

Lines changed: 43 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,19 @@ export type Options = {
2121
dryRun?: boolean;
2222
cacheControl?: string | { [key: string]: string };
2323
s3Client?: S3;
24+
accessControlLevel?: ObjectACL;
2425
};
2526

27+
export type ObjectACL =
28+
| 'private'
29+
| 'public-read'
30+
| 'public-read-write'
31+
| 'authenticated-read'
32+
| 'aws-exec-read'
33+
| 'bucket-owner-read'
34+
| 'bucket-owner-full-control'
35+
| string;
36+
2637
const defaultOptions = {
2738
dryRun: false,
2839
concurrency: 100,
@@ -90,38 +101,43 @@ export default class Uploader {
90101
gatheringSpinner.start();
91102

92103
return new Promise((resolve, reject) => {
93-
glob(
94-
`**/${globPath}`,
95-
{ cwd: path.resolve(localPath) },
96-
(err, files) => {
97-
if (err) {
98-
gatheringSpinner.fail(err);
99-
reject(err);
100-
}
101-
102-
gatheringSpinner.succeed(
103-
`Found ${chalk.green(files.length)} files at ${chalk.blue(
104-
localPath
105-
)}, starting upload:`,
106-
);
107-
108-
resolve(files);
109-
},
110-
);
104+
glob(`**/${globPath}`, { cwd: path.resolve(localPath) }, (err, files) => {
105+
if (err) {
106+
gatheringSpinner.fail(err);
107+
reject(err);
108+
}
109+
110+
gatheringSpinner.succeed(
111+
`Found ${chalk.green(files.length)} files at ${chalk.blue(localPath)}, starting upload:`,
112+
);
113+
114+
resolve(files);
115+
});
111116
});
112117
}
113118

114119
public uploadFile(localFilePath: string, remotePath: string): Promise<void> {
115120
const body = fs.createReadStream(localFilePath);
116-
const { dryRun, bucket: Bucket } = this.options;
117-
118-
const params = {
119-
Bucket,
120-
Key: remotePath.replace(/\\/g, '/'),
121-
Body: body,
122-
ContentType: mime.getType(localFilePath),
123-
CacheControl: this.getCacheControlValue(localFilePath),
124-
};
121+
const { dryRun, bucket: Bucket, accessControlLevel: ACL } = this.options;
122+
let params;
123+
if (ACL) {
124+
params = {
125+
ACL,
126+
Bucket,
127+
Key: remotePath.replace(/\\/g, '/'),
128+
Body: body,
129+
ContentType: mime.getType(localFilePath),
130+
CacheControl: this.getCacheControlValue(localFilePath),
131+
};
132+
} else {
133+
params = {
134+
Bucket,
135+
Key: remotePath.replace(/\\/g, '/'),
136+
Body: body,
137+
ContentType: mime.getType(localFilePath),
138+
CacheControl: this.getCacheControlValue(localFilePath),
139+
};
140+
}
125141

126142
return new Promise(resolve => {
127143
if (!dryRun) {

src/lib/cli.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,13 @@ yargs
6464
type: 'string',
6565
nargs: 1,
6666
})
67+
.option('acl', {
68+
alias: 'access-control-level',
69+
default: undefined,
70+
describe: 'Sets the bucket access control level for uploaded files',
71+
type: 'string',
72+
nargs: 1,
73+
})
6774
.option('a', {
6875
alias: 'cache-control',
6976
default: '',

test/Uploader.spec.ts

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,43 @@ describe('Uploader', () => {
4646
(<any>s3.upload).restore();
4747
});
4848

49+
it('should upload with access control level options', async function() {
50+
this.timeout(10000);
51+
52+
const s3 = {
53+
upload(_, cb) {
54+
cb(null);
55+
}
56+
};
57+
spy(s3, "upload");
58+
59+
uploader = new Uploader({
60+
localPath: 'test/files',
61+
remotePath: 'fake',
62+
bucket: 'fake',
63+
glob: '**/demo.png',
64+
s3Client: <any>s3,
65+
accessControlLevel: 'bucket-owner-full-control'
66+
});
67+
68+
await uploader.upload();
69+
70+
const { Body, ...args} = (<any>s3.upload).lastCall.args[0];
71+
72+
73+
expect(args).to.deep.equal({
74+
ACL: 'bucket-owner-full-control',
75+
Bucket: 'fake',
76+
Key: 'fake/demo.png',
77+
ContentType: 'image/png',
78+
CacheControl: '',
79+
});
80+
81+
(<any>expect(Body).to.be.a).ReadableStream;
82+
83+
(<any>s3.upload).restore();
84+
});
85+
4986
it('should fix windows paths', async function() {
5087
this.timeout(5000);
5188

0 commit comments

Comments
 (0)