|
1 | | -import fs, { write } from "fs"; |
2 | | -import mockFs from "mock-fs"; |
3 | | - |
| 1 | +//////////////////////////////////////////////////////////////////////////////// |
| 2 | +// !!NOTE!! |
| 3 | +// This test suite uses mock-fs to mock out the the file system |
| 4 | +// console.log CANNOT be reliably called in this suite |
| 5 | +// - https://github.com/facebook/jest/issues/5792 |
| 6 | +// - https://github.com/tschaub/mock-fs/issues/234 |
| 7 | +// |
| 8 | +// Workaround: The global logger object in `src/logger` does work, so use that |
| 9 | +// to debug |
| 10 | +//////////////////////////////////////////////////////////////////////////////// |
| 11 | +import fs from "fs"; |
4 | 12 | import yaml from "js-yaml"; |
| 13 | +import mockFs from "mock-fs"; |
| 14 | +import os from "os"; |
| 15 | +import path from "path"; |
| 16 | +import shelljs from "shelljs"; |
| 17 | +import uuid from "uuid/v4"; |
| 18 | +import { disableVerboseLogging, enableVerboseLogging, logger } from "../logger"; |
5 | 19 | import { |
6 | 20 | createTestBedrockYaml, |
7 | 21 | createTestHldAzurePipelinesYaml, |
8 | 22 | createTestMaintainersYaml |
9 | 23 | } from "../test/mockFactory"; |
10 | | - |
11 | | -import path from "path"; |
12 | | - |
13 | | -import { disableVerboseLogging, enableVerboseLogging } from "../logger"; |
14 | | -import { IBedrockFile, IHelmConfig, IMaintainersFile } from "../types"; |
| 24 | +import { |
| 25 | + IAzurePipelinesYaml, |
| 26 | + IBedrockFile, |
| 27 | + IHelmConfig, |
| 28 | + IMaintainersFile |
| 29 | +} from "../types"; |
15 | 30 | import { |
16 | 31 | addNewServiceToBedrockFile, |
17 | 32 | addNewServiceToMaintainersFile, |
18 | 33 | generateDockerfile, |
19 | 34 | generateGitIgnoreFile, |
20 | | - generateHldAzurePipelinesYaml |
| 35 | + generateHldAzurePipelinesYaml, |
| 36 | + generateStarterAzurePipelinesYaml, |
| 37 | + starterAzurePipelines |
21 | 38 | } from "./fileutils"; |
22 | 39 |
|
23 | 40 | beforeAll(() => { |
@@ -236,3 +253,73 @@ describe("generating service Dockerfile", () => { |
236 | 253 | ); |
237 | 254 | }); |
238 | 255 | }); |
| 256 | + |
| 257 | +describe("starterAzurePipelines", () => { |
| 258 | + // Create a random workspace dir before every test |
| 259 | + let randomDirPath = ""; |
| 260 | + beforeEach(() => { |
| 261 | + randomDirPath = path.join(os.tmpdir(), uuid()); |
| 262 | + shelljs.mkdir("-p", randomDirPath); |
| 263 | + }); |
| 264 | + |
| 265 | + test("that the value of the file is the same after (de)serialization", async () => { |
| 266 | + const branches = ["qa", "prod"]; |
| 267 | + const variableGroups = ["foo", "bar"]; |
| 268 | + const vmImage = "gentoo"; |
| 269 | + const starter = await starterAzurePipelines({ |
| 270 | + branches, |
| 271 | + relProjectPaths: [path.join("packages", "a"), path.join("packages", "b")], |
| 272 | + variableGroups, |
| 273 | + vmImage |
| 274 | + }); |
| 275 | + const serializedYaml = yaml.safeDump(starter, { |
| 276 | + lineWidth: Number.MAX_SAFE_INTEGER |
| 277 | + }); |
| 278 | + const pipelinesPath = path.join(randomDirPath, "azure-pipelines.yaml"); |
| 279 | + fs.writeFileSync(pipelinesPath, serializedYaml); |
| 280 | + const deserializedYaml = yaml.safeLoad( |
| 281 | + fs.readFileSync(pipelinesPath, "utf8") |
| 282 | + ); |
| 283 | + |
| 284 | + // should be equal to the initial value |
| 285 | + expect(deserializedYaml).toStrictEqual(starter); |
| 286 | + |
| 287 | + // trigger.branches.include should include 'qa' and 'prod' |
| 288 | + for (const branch of branches) { |
| 289 | + expect(starter.trigger!.branches!.include!.includes(branch)); |
| 290 | + } |
| 291 | + |
| 292 | + // variables should include all groups |
| 293 | + for (const group of variableGroups) { |
| 294 | + expect(starter.variables!.includes({ group })); |
| 295 | + } |
| 296 | + |
| 297 | + // pool.vmImage should be 'gentoo' |
| 298 | + expect(starter.pool!.vmImage).toBe(vmImage); |
| 299 | + }); |
| 300 | + |
| 301 | + test("that all services receive an azure-pipelines.yaml and the correct paths have been inserted", async () => { |
| 302 | + // Create service directories |
| 303 | + const servicePaths = ["a", "b", "c"].map(serviceDir => { |
| 304 | + const servicePath = path.join(randomDirPath, "packages", serviceDir); |
| 305 | + shelljs.mkdir("-p", servicePath); |
| 306 | + return servicePath; |
| 307 | + }); |
| 308 | + |
| 309 | + for (const servicePath of servicePaths) { |
| 310 | + await generateStarterAzurePipelinesYaml(randomDirPath, servicePath); |
| 311 | + |
| 312 | + // file should exist |
| 313 | + expect(fs.existsSync(servicePath)).toBe(true); |
| 314 | + |
| 315 | + // pipeline triggers should include the relative path to the service |
| 316 | + const azureYaml: IAzurePipelinesYaml = yaml.safeLoad( |
| 317 | + fs.readFileSync(path.join(servicePath, "azure-pipelines.yaml"), "utf8") |
| 318 | + ); |
| 319 | + const hasCorrectIncludes = azureYaml.trigger!.paths!.include!.includes( |
| 320 | + "./" + path.relative(randomDirPath, servicePath) |
| 321 | + ); |
| 322 | + expect(hasCorrectIncludes).toBe(true); |
| 323 | + } |
| 324 | + }); |
| 325 | +}); |
0 commit comments