|
1 |
| -'use strict'; |
2 |
| -const path = require('path'); |
| 1 | +#!/usr/bin/env node |
| 2 | + |
3 | 3 | const fs = require('fs');
|
| 4 | +const path = require('path'); |
4 | 5 | const archiver = require('archiver');
|
5 |
| -const utils = require('@liwb/cloud-utils'); |
6 |
| -const pkg = require('../package.json'); |
7 |
| -const outDir = `${path.resolve(__dirname, '../')}/${pkg.name}_${utils.formatDate(new Date(), 'yyyy-MM-dd_HH:mm:ss')}.zip`; |
8 |
| - |
9 |
| -// create a file to stream archive data to. |
10 |
| -const output = fs.createWriteStream(outDir); |
11 |
| -const archive = archiver('zip', { |
12 |
| - zlib: { level: 9 } // Sets the compression level. |
13 |
| -}); |
14 |
| - |
15 |
| -// listen for all archive data to be written |
16 |
| -// 'close' event is fired only when a file descriptor is involved |
17 |
| -output.on('close', () => { |
18 |
| - console.log(archive.pointer() + ' total bytes'); |
19 |
| - console.log('archiver has been finalized and the output file descriptor has closed.'); |
20 |
| -}); |
21 |
| - |
22 |
| -// This event is fired when the data source is drained no matter what was the data source. |
23 |
| -// It is not part of this library but rather from the NodeJS Stream API. |
24 |
| -// @see: https://nodejs.org/api/stream.html#stream_event_end |
25 |
| -output.on('end', () => { |
26 |
| - console.log('Data has been drained'); |
27 |
| -}); |
28 |
| - |
29 |
| -// good practice to catch warnings (ie stat failures and other non-blocking errors) |
30 |
| -archive.on('warning', (err) => { |
31 |
| - if (err.code === 'ENOENT') { |
32 |
| - // log warning |
33 |
| - } else { |
34 |
| - // throw error |
35 |
| - throw err; |
| 6 | +const { formatDate } = require('@liwb/cloud-utils'); |
| 7 | + |
| 8 | +const DEST_DIR = path.join(__dirname, '../dist'); |
| 9 | +const DEST_ZIP_DIR = path.join(__dirname, '../dist-zip'); |
| 10 | + |
| 11 | +const extractExtensionData = () => { |
| 12 | + const extPackageJson = require('../package.json'); |
| 13 | + |
| 14 | + return { |
| 15 | + name: extPackageJson.name, |
| 16 | + version: extPackageJson.version, |
| 17 | + }; |
| 18 | +}; |
| 19 | + |
| 20 | +const makeDestZipDirIfNotExists = () => { |
| 21 | + if (!fs.existsSync(DEST_ZIP_DIR)) { |
| 22 | + fs.mkdirSync(DEST_ZIP_DIR); |
36 | 23 | }
|
37 |
| -}); |
| 24 | +}; |
| 25 | + |
| 26 | +const buildZip = (src, dist, zipFilename) => { |
| 27 | + console.info(`Building ${zipFilename}...`); |
| 28 | + |
| 29 | + const archive = archiver('zip', { zlib: { level: 9 } }); |
| 30 | + const stream = fs.createWriteStream(path.join(dist, zipFilename)); |
| 31 | + |
| 32 | + return new Promise((resolve, reject) => { |
| 33 | + archive |
| 34 | + .directory(src, false) |
| 35 | + .on('error', (err) => reject(err)) |
| 36 | + .pipe(stream); |
| 37 | + |
| 38 | + stream.on('close', () => resolve()); |
| 39 | + archive.finalize(); |
| 40 | + }); |
| 41 | +}; |
38 | 42 |
|
39 |
| -// good practice to catch this error explicitly |
40 |
| -archive.on('error', (err) => { |
41 |
| - throw err; |
42 |
| -}); |
| 43 | +const main = () => { |
| 44 | + const { name, version } = extractExtensionData(); |
| 45 | + const zipFilename = `${name}-v${version}_${formatDate( |
| 46 | + new Date(), |
| 47 | + 'yyyy-MM-dd_HH-mm-ss' |
| 48 | + )}.zip`; |
43 | 49 |
|
44 |
| -// pipe archive data to the file |
45 |
| -archive.pipe(output); |
| 50 | + makeDestZipDirIfNotExists(); |
46 | 51 |
|
47 |
| -archive.directory('./dist/', false); |
| 52 | + buildZip(DEST_DIR, DEST_ZIP_DIR, zipFilename) |
| 53 | + .then(() => console.info('OK')) |
| 54 | + .catch(console.err); |
| 55 | +}; |
48 | 56 |
|
49 |
| -// finalize the archive (ie we are done appending files but streams have to finish yet) |
50 |
| -// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand |
51 |
| -archive.finalize(); |
| 57 | +main(); |
0 commit comments