Skip to content

Commit ec4d76d

Browse files
committed
fix(Package): update formating
1 parent 6d33fc1 commit ec4d76d

File tree

1 file changed

+118
-104
lines changed

1 file changed

+118
-104
lines changed

package.js

Lines changed: 118 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -21,139 +21,153 @@
2121
*
2222
*/
2323

24-
// require modules
25-
var fs = require("fs");
26-
var path = require("path");
27-
var archiver = require("archiver");
28-
var version = require("./version.json");
24+
// require modules
25+
var fs = require('fs');
26+
var path = require('path');
27+
var archiver = require('archiver');
28+
var version = require('./version.json');
2929
const fetch = require('node-fetch');
3030
const {Octokit} = require('@octokit/rest');
3131
const mkdirp = require('mkdirp')
3232
const extract = require('extract-zip')
33-
const { exec } = require("child_process");
34-
const rimraf = require("rimraf").sync;
33+
const {exec} = require('child_process');
34+
const rimraf = require('rimraf').sync;
3535

3636

37-
const dir = "GeodePackage" + "-" + process.argv[2] + "-" + process.argv[3];
37+
const dir = 'GeodePackage' +
38+
'-' + process.argv[2] + '-' + process.argv[3];
3839
mkdirp.sync(dir);
39-
const owner = "Geode-solutions"
40+
const owner = 'Geode-solutions'
4041

4142
var octokit = new Octokit({auth: process.env.TOKEN});
4243

4344
function getRelease(repo, version, isModule) {
44-
const outputDirectory = isModule ? path.join(dir, "modules") : dir;
45+
const outputDirectory = isModule ? path.join(dir, 'modules') : dir;
4546
return new Promise((resolve, reject) => {
46-
console.log(repo, version);
47-
const tag = "v" + version;
48-
octokit.repos.getReleaseByTag({owner, repo, tag}).then(release => {
49-
const release_id = release.data.id;
50-
// console.log(release);
51-
octokit.repos.listReleaseAssets({owner, repo, release_id})
52-
.then(assets => {
53-
const asset = assets.data.find(asset => asset.name.includes(process.argv[3]));
54-
console.log('Asset name:', asset.name);
55-
let assetUrl = asset.url;
56-
fetch(assetUrl, {
57-
headers: {accept: 'application/octet-stream'}
58-
}).then(response => {
59-
const outputFile = repo.concat(".zip");
60-
console.log('Downloading to:', outputFile);
61-
response.body.pipe(fs.createWriteStream(outputFile))
62-
.on('finish', function() {
63-
console.log('Unzipping', outputFile);
64-
try {
65-
extract(outputFile, { dir: path.resolve(outputDirectory) }).then(()=>{
66-
let extractedDirectory = "";
67-
if( isModule ) {
68-
const extractedName = asset.name.slice(0, -4);
69-
extractedDirectory = path.join(outputDirectory, repo);
70-
rimraf(extractedDirectory);
71-
fs.renameSync(path.join(outputDirectory, extractedName),extractedDirectory);
47+
console.log(repo, version);
48+
const tag = 'v' + version;
49+
octokit.repos.getReleaseByTag({owner, repo, tag})
50+
.then(release => {
51+
const release_id = release.data.id;
52+
octokit.repos.listReleaseAssets({owner, repo, release_id})
53+
.then(assets => {
54+
const asset = assets.data.find(
55+
asset => asset.name.includes(process.argv[3]));
56+
console.log('Asset name:', asset.name);
57+
let assetUrl = asset.url;
58+
fetch(assetUrl, {
59+
headers: {accept: 'application/octet-stream'}
60+
}).then(response => {
61+
const outputFile = repo.concat('.zip');
62+
console.log('Downloading to:', outputFile);
63+
response.body.pipe(fs.createWriteStream(outputFile))
64+
.on('finish', function() {
65+
console.log('Unzipping', outputFile);
66+
try {
67+
extract(outputFile, {
68+
dir: path.resolve(outputDirectory)
69+
}).then(() => {
70+
let extractedDirectory = '';
71+
if (isModule) {
72+
const extractedName = asset.name.slice(0, -4);
73+
extractedDirectory =
74+
path.join(outputDirectory, repo);
75+
rimraf(extractedDirectory);
76+
fs.renameSync(
77+
path.join(outputDirectory, extractedName),
78+
extractedDirectory);
79+
}
80+
else {
81+
extractedDirectory = dir;
82+
}
83+
console.log('Unzip to:', repo);
84+
const pipDestination = path.join(dir, 'server');
85+
if (!fs.existsSync(pipDestination)) {
86+
fs.mkdirSync(pipDestination);
87+
}
88+
console.log('PIP to:', pipDestination);
89+
exec(
90+
'python -m pip install --upgrade -r ' +
91+
path.join(
92+
extractedDirectory,
93+
'server/requirements.txt') +
94+
' -t ' + pipDestination,
95+
(err, stdout, stderr) => {
96+
console.log(`stdout: ${stdout}`);
97+
console.log(`stderr: ${stderr}`);
98+
resolve();
99+
});
100+
});
101+
} catch (error) {
102+
reject(error);
72103
}
73-
else{
74-
extractedDirectory = dir;
75-
}
76-
console.log('Unzip to:', repo);
77-
const pipDestination = path.join(dir,"server");
78-
if (!fs.existsSync(pipDestination)){
79-
fs.mkdirSync(pipDestination);
80-
}
81-
console.log('PIP to:', pipDestination);
82-
exec(
83-
"python -m pip install --upgrade -r " + path.join(extractedDirectory,"server/requirements.txt") + " -t " + pipDestination,
84-
(err, stdout, stderr) => {
85-
console.log(`stdout: ${stdout}`);
86-
console.log(`stderr: ${stderr}`);
87-
resolve();
88-
});
89104
});
90-
} catch (error) {
91-
reject(error);
92-
}
93105
});
94-
});
106+
});
107+
})
108+
.catch((error) => {
109+
console.log(error);
110+
reject(error);
95111
});
96-
}).catch((error)=>{
97-
console.log(error);
98-
reject(error);
99-
});
100112
});
101113
}
102114

103115
let promises = [];
104-
let config = { modules:[]};
105-
promises.push(getRelease("Geode", version.geode, false));
116+
let config = {modules: []};
117+
promises.push(getRelease('Geode', version.geode, false));
106118
for (let [repo, tag] of Object.entries(version.modules)) {
107-
const repoGeode = repo.concat(".geode");
108-
config.modules.push(path.join("modules", repoGeode, "config.json"));
119+
const repoGeode = repo.concat('.geode');
120+
config.modules.push(path.join('modules', repoGeode, 'config.json'));
109121
promises.push(getRelease(repoGeode, tag, true));
110122
}
111-
fs.writeFileSync(path.join(dir,'config.json'), JSON.stringify(config));
123+
fs.writeFileSync(path.join(dir, 'config.json'), JSON.stringify(config));
112124

113-
Promise.all(promises).then(()=>{
114-
// create a file to stream archive data to.
115-
const outputName = path.join(__dirname, dir + ".zip");
116-
console.log("Output: ", outputName);
117-
var output = fs.createWriteStream(outputName);
118-
var archive = archiver("zip");
125+
Promise.all(promises).then(() => {
126+
// create a file to stream archive data to.
127+
const outputName = path.join(__dirname, dir + '.zip');
128+
console.log('Output: ', outputName);
129+
var output = fs.createWriteStream(outputName);
130+
var archive = archiver('zip');
119131

120-
// listen for all archive data to be written
121-
// 'close' event is fired only when a file descriptor is involved
122-
output.on("close", function() {
123-
console.log(archive.pointer() + " total bytes");
124-
console.log(
125-
"archiver has been finalized and the output file descriptor has closed."
126-
);
127-
});
132+
// listen for all archive data to be written
133+
// 'close' event is fired only when a file descriptor is involved
134+
output.on('close', function() {
135+
console.log(archive.pointer() + ' total bytes');
136+
console.log(
137+
'archiver has been finalized and the output file descriptor has closed.');
138+
});
128139

129-
// This event is fired when the data source is drained no matter what was the data source.
130-
// It is not part of this library but rather from the NodeJS Stream API.
131-
// @see: https://nodejs.org/api/stream.html#stream_event_end
132-
output.on("end", function() {
133-
console.log("Data has been drained");
134-
});
140+
// This event is fired when the data source is drained no matter what was the
141+
// data source. It is not part of this library but rather from the NodeJS
142+
// Stream API.
143+
// @see: https://nodejs.org/api/stream.html#stream_event_end
144+
output.on('end', function() {
145+
console.log('Data has been drained');
146+
});
135147

136-
// good practice to catch warnings (ie stat failures and other non-blocking errors)
137-
archive.on("warning", function(err) {
138-
if (err.code === "ENOENT") {
139-
// log warning
140-
} else {
141-
// throw error
142-
throw err;
143-
}
144-
});
148+
// good practice to catch warnings (ie stat failures and other non-blocking
149+
// errors)
150+
archive.on('warning', function(err) {
151+
if (err.code === 'ENOENT') {
152+
// log warning
153+
} else {
154+
// throw error
155+
throw err;
156+
}
157+
});
145158

146-
// good practice to catch this error explicitly
147-
archive.on("error", function(err) {
148-
throw err;
149-
});
159+
// good practice to catch this error explicitly
160+
archive.on('error', function(err) {
161+
throw err;
162+
});
150163

151-
// pipe archive data to the file
152-
archive.pipe(output);
164+
// pipe archive data to the file
165+
archive.pipe(output);
153166

154-
archive.directory(dir);
167+
archive.directory(dir);
155168

156-
// finalize the archive (ie we are done appending files but streams have to finish yet)
157-
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
158-
archive.finalize();
169+
// finalize the archive (ie we are done appending files but streams have to
170+
// finish yet) 'close', 'end' or 'finish' may be fired right after calling
171+
// this method so register to them beforehand
172+
archive.finalize();
159173
});

0 commit comments

Comments
 (0)