-
Notifications
You must be signed in to change notification settings - Fork 203
Expand file tree
/
Copy pathhandler.ts
More file actions
118 lines (112 loc) · 5.42 KB
/
handler.ts
File metadata and controls
118 lines (112 loc) · 5.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import * as yaml from "js-yaml";
import chalk from "chalk-template";
import path from "path";
import fs from "fs-extra";
import yargs from "yargs";
import {Commander} from "./commander.js";
import {Parser} from "./parser.js";
import * as state from "./state.js";
import prettyHrtime from "pretty-hrtime";
import {WriteStreams} from "./write-streams.js";
import {cleanupJobResources, Job} from "./job.js";
import {Utils} from "./utils.js";
import {Argv} from "./argv.js";
import assert from "assert";
const generateGitIgnore = (cwd: string, stateDir: string) => {
const gitIgnoreFilePath = `${cwd}/${stateDir}/.gitignore`;
const gitIgnoreContent = "*\n!.gitignore\n";
if (!fs.existsSync(gitIgnoreFilePath)) {
fs.outputFileSync(gitIgnoreFilePath, gitIgnoreContent);
}
};
export async function handler (args: any, writeStreams: WriteStreams, jobs: Job[] = [], childPipelineDepth = 0) {
assert(childPipelineDepth <= 2, "Parent and child pipelines have a maximum depth of two levels of child pipelines.");
const argv = await Argv.build({...args, childPipelineDepth: childPipelineDepth}, writeStreams);
const cwd = argv.cwd;
const stateDir = argv.stateDir;
const file = argv.file;
let parser: Parser;
if (argv.completion) {
yargs(process.argv.slice(2)).showCompletionScript();
return [];
}
assert(fs.existsSync(`${cwd}/${file}`), `${path.resolve(cwd)}/${file} could not be found`);
if (argv.preview) {
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs, false);
const gitlabData = parser.gitlabData;
for (const jobName of Object.keys(gitlabData)) {
if (jobName === "stages") {
continue;
}
if (jobName.startsWith(".") || ["include", "after_script", "before_script", "default"].includes(jobName)) {
// Remove since these are redundant info which are already "extended" in the jobs
delete gitlabData[jobName];
}
}
writeStreams.stdout(`---\n${yaml.dump(gitlabData, {lineWidth: 160})}`);
} else if (argv.list || argv.listAll) {
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
Commander.runList(parser, writeStreams, argv.listAll);
} else if (argv.validateDependencyChain) {
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
Commander.validateDependencyChain(parser);
writeStreams.stdout(chalk`{green ✓ All job dependencies are valid}\n`);
} else if (argv.listJson) {
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
Commander.runJson(parser, writeStreams);
} else if (argv.listCsv || argv.listCsvAll) {
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
Commander.runCsv(parser, writeStreams, argv.listCsvAll);
} else if (argv.job.length > 0) {
assert(argv.stage === null, "You cannot use --stage when starting individual jobs");
if (argv.registry) {
await Utils.startDockerRegistry(argv);
}
generateGitIgnore(cwd, stateDir);
const time = process.hrtime();
let pipelineIid: number;
if (argv.needs || argv.onlyNeeds) {
pipelineIid = await state.incrementPipelineIid(cwd, stateDir);
} else {
pipelineIid = await state.getPipelineIid(cwd, stateDir);
}
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
await Utils.rsyncTrackedFiles(cwd, stateDir, ".docker");
await Commander.runJobs(argv, parser, writeStreams);
if (argv.needs || argv.onlyNeeds) {
writeStreams.stderr(chalk`{grey pipeline finished} in {grey ${prettyHrtime(process.hrtime(time))}}\n`);
}
} else if (argv.stage) {
if (argv.registry) {
await Utils.startDockerRegistry(argv);
}
generateGitIgnore(cwd, stateDir);
const time = process.hrtime();
const pipelineIid = await state.getPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
await Utils.rsyncTrackedFiles(cwd, stateDir, ".docker");
await Commander.runJobsInStage(argv, parser, writeStreams);
writeStreams.stderr(chalk`{grey pipeline finished} in {grey ${prettyHrtime(process.hrtime(time))}}\n`);
} else {
if (argv.registry) {
await Utils.startDockerRegistry(argv);
}
generateGitIgnore(cwd, stateDir);
const time = process.hrtime();
const pipelineIid = await state.incrementPipelineIid(cwd, stateDir);
parser = await Parser.create(argv, writeStreams, pipelineIid, jobs);
await Utils.rsyncTrackedFiles(cwd, stateDir, ".docker");
await Commander.runPipeline(argv, parser, writeStreams);
if (childPipelineDepth == 0) writeStreams.stderr(chalk`{grey pipeline finished} in {grey ${prettyHrtime(process.hrtime(time))}}\n`);
}
writeStreams.flush();
if (argv.registry) {
await Utils.stopDockerRegistry(argv.containerExecutable);
}
return cleanupJobResources(jobs);
}