From 56b69c76aa2c216855e4edeaf9ed47b53ba05c7c Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Wed, 8 Oct 2025 16:50:11 +0100 Subject: [PATCH 1/6] Extract diff range computation into utils for reuse --- lib/analyze-action.js | 228 +++++++++++------------ src/analyze.test.ts | 199 -------------------- src/analyze.ts | 192 +------------------ src/diff-informed-analysis-utils.test.ts | 203 ++++++++++++++++++++ src/diff-informed-analysis-utils.ts | 189 ++++++++++++++++++- 5 files changed, 510 insertions(+), 501 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index af3fb58319..be30d1376e 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -90193,6 +90193,9 @@ var path16 = __toESM(require("path")); var import_perf_hooks2 = require("perf_hooks"); var io5 = __toESM(require_io()); +// src/autobuild.ts +var core11 = __toESM(require_core()); + // src/api-client.ts var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); @@ -90362,9 +90365,6 @@ function wrapApiConfigurationError(e) { return e; } -// src/autobuild.ts -var core11 = __toESM(require_core()); - // src/codeql.ts var fs14 = __toESM(require("fs")); var path14 = __toESM(require("path")); @@ -91575,6 +91575,117 @@ ${jsonContents}` ); return JSON.parse(jsonContents); } +async function getPullRequestEditedDiffRanges(branches, logger) { + const fileDiffs = await getFileDiffsWithBasehead(branches, logger); + if (fileDiffs === void 0) { + return void 0; + } + if (fileDiffs.length >= 300) { + logger.warning( + `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` + ); + return void 0; + } + const results = []; + for (const filediff of fileDiffs) { + const diffRanges = getDiffRanges(filediff, logger); + if (diffRanges === void 0) { + return void 0; + } + results.push(...diffRanges); + } + return results; +} +async function getFileDiffsWithBasehead(branches, logger) { + const repositoryNwo = getRepositoryNwoFromEnv( + "CODE_SCANNING_REPOSITORY", + "GITHUB_REPOSITORY" + ); + const basehead = `${branches.base}...${branches.head}`; + try { + const response = await getApiClient().rest.repos.compareCommitsWithBasehead( + { + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + basehead, + per_page: 1 + } + ); + logger.debug( + `Response from compareCommitsWithBasehead(${basehead}): +${JSON.stringify(response, null, 2)}` + ); + return response.data.files; + } catch (error2) { + if (error2.status) { + logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); + logger.debug( + `Error running compareCommitsWithBasehead(${basehead}): +Request: ${JSON.stringify(error2.request, null, 2)} +Error Response: ${JSON.stringify(error2.response, null, 2)}` + ); + return void 0; + } else { + throw error2; + } + } +} +function getDiffRanges(fileDiff, logger) { + const filename = path9.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path9.sep, "/"); + if (fileDiff.patch === void 0) { + if (fileDiff.changes === 0) { + return []; + } + return [ + { + path: filename, + startLine: 0, + endLine: 0 + } + ]; + } + let currentLine = 0; + let additionRangeStartLine = void 0; + const diffRanges = []; + const diffLines = fileDiff.patch.split("\n"); + diffLines.push(" "); + for (const diffLine of diffLines) { + if (diffLine.startsWith("-")) { + continue; + } + if (diffLine.startsWith("+")) { + if (additionRangeStartLine === void 0) { + additionRangeStartLine = currentLine; + } + currentLine++; + continue; + } + if (additionRangeStartLine !== void 0) { + diffRanges.push({ + path: filename, + startLine: additionRangeStartLine, + endLine: currentLine - 1 + }); + additionRangeStartLine = void 0; + } + if (diffLine.startsWith("@@ ")) { + const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); + if (match === null) { + logger.warning( + `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` + ); + return void 0; + } + currentLine = parseInt(match[1], 10); + continue; + } + if (diffLine.startsWith(" ")) { + currentLine++; + continue; + } + } + return diffRanges; +} // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -93658,117 +93769,6 @@ async function setupDiffInformedQueryRun(branches, logger) { } ); } -async function getPullRequestEditedDiffRanges(branches, logger) { - const fileDiffs = await getFileDiffsWithBasehead(branches, logger); - if (fileDiffs === void 0) { - return void 0; - } - if (fileDiffs.length >= 300) { - logger.warning( - `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` - ); - return void 0; - } - const results = []; - for (const filediff of fileDiffs) { - const diffRanges = getDiffRanges(filediff, logger); - if (diffRanges === void 0) { - return void 0; - } - results.push(...diffRanges); - } - return results; -} -async function getFileDiffsWithBasehead(branches, logger) { - const repositoryNwo = getRepositoryNwoFromEnv( - "CODE_SCANNING_REPOSITORY", - "GITHUB_REPOSITORY" - ); - const basehead = `${branches.base}...${branches.head}`; - try { - const response = await getApiClient().rest.repos.compareCommitsWithBasehead( - { - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - basehead, - per_page: 1 - } - ); - logger.debug( - `Response from compareCommitsWithBasehead(${basehead}): -${JSON.stringify(response, null, 2)}` - ); - return response.data.files; - } catch (error2) { - if (error2.status) { - logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); - logger.debug( - `Error running compareCommitsWithBasehead(${basehead}): -Request: ${JSON.stringify(error2.request, null, 2)} -Error Response: ${JSON.stringify(error2.response, null, 2)}` - ); - return void 0; - } else { - throw error2; - } - } -} -function getDiffRanges(fileDiff, logger) { - const filename = path16.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path16.sep, "/"); - if (fileDiff.patch === void 0) { - if (fileDiff.changes === 0) { - return []; - } - return [ - { - path: filename, - startLine: 0, - endLine: 0 - } - ]; - } - let currentLine = 0; - let additionRangeStartLine = void 0; - const diffRanges = []; - const diffLines = fileDiff.patch.split("\n"); - diffLines.push(" "); - for (const diffLine of diffLines) { - if (diffLine.startsWith("-")) { - continue; - } - if (diffLine.startsWith("+")) { - if (additionRangeStartLine === void 0) { - additionRangeStartLine = currentLine; - } - currentLine++; - continue; - } - if (additionRangeStartLine !== void 0) { - diffRanges.push({ - path: filename, - startLine: additionRangeStartLine, - endLine: currentLine - 1 - }); - additionRangeStartLine = void 0; - } - if (diffLine.startsWith("@@ ")) { - const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); - if (match === null) { - logger.warning( - `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` - ); - return void 0; - } - currentLine = parseInt(match[1], 10); - continue; - } - if (diffLine.startsWith(" ")) { - currentLine++; - continue; - } - } - return diffRanges; -} function writeDiffRangeDataExtensionPack(logger, ranges) { if (ranges === void 0) { return void 0; diff --git a/src/analyze.test.ts b/src/analyze.test.ts index f3d516a78a..afc7368177 100644 --- a/src/analyze.test.ts +++ b/src/analyze.test.ts @@ -4,10 +4,8 @@ import * as path from "path"; import test from "ava"; import * as sinon from "sinon"; -import * as actionsUtil from "./actions-util"; import { CodeQuality, CodeScanning } from "./analyses"; import { - exportedForTesting, runQueries, defaultSuites, resolveQuerySuiteAlias, @@ -131,203 +129,6 @@ test("status report fields", async (t) => { }); }); -function runGetDiffRanges(changes: number, patch: string[] | undefined): any { - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("checkout_path") - .returns("/checkout/path"); - return exportedForTesting.getDiffRanges( - { - filename: "test.txt", - changes, - patch: patch?.join("\n"), - }, - getRunnerLogger(true), - ); -} - -test("getDiffRanges: file unchanged", async (t) => { - const diffRanges = runGetDiffRanges(0, undefined); - t.deepEqual(diffRanges, []); -}); - -test("getDiffRanges: file diff too large", async (t) => { - const diffRanges = runGetDiffRanges(1000000, undefined); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 0, - endLine: 0, - }, - ]); -}); - -test("getDiffRanges: diff thunk with single addition range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,6 +50,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 54, - }, - ]); -}); - -test("getDiffRanges: diff thunk with single deletion range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,8 +50,6 @@", - " a", - " b", - " c", - "-1", - "-2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, []); -}); - -test("getDiffRanges: diff thunk with single update range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,7 @@", - " a", - " b", - " c", - "-1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 53, - }, - ]); -}); - -test("getDiffRanges: diff thunk with addition ranges", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,9 @@", - " a", - " b", - " c", - "+1", - " c", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 53, - }, - { - path: "/checkout/path/test.txt", - startLine: 55, - endLine: 55, - }, - ]); -}); - -test("getDiffRanges: diff thunk with mixed ranges", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,7 @@", - " a", - " b", - " c", - "-1", - " d", - "-2", - "+3", - " e", - " f", - "+4", - "+5", - " g", - " h", - " i", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 54, - endLine: 54, - }, - { - path: "/checkout/path/test.txt", - startLine: 57, - endLine: 58, - }, - ]); -}); - -test("getDiffRanges: multiple diff thunks", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,6 +50,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - "@@ -130,6 +150,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 54, - }, - { - path: "/checkout/path/test.txt", - startLine: 153, - endLine: 154, - }, - ]); -}); - -test("getDiffRanges: no diff context lines", async (t) => { - const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 50, - endLine: 51, - }, - ]); -}); - -test("getDiffRanges: malformed thunk header", async (t) => { - const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]); - t.deepEqual(diffRanges, undefined); -}); test("resolveQuerySuiteAlias", (t) => { // default query suite names should resolve to something language-specific ending in `.qls`. diff --git a/src/analyze.ts b/src/analyze.ts index b7eec921ac..b3dc31a650 100644 --- a/src/analyze.ts +++ b/src/analyze.ts @@ -6,13 +6,9 @@ import * as io from "@actions/io"; import * as del from "del"; import * as yaml from "js-yaml"; -import { - getRequiredInput, - getTemporaryDirectory, - PullRequestBranches, -} from "./actions-util"; +import { getTemporaryDirectory, PullRequestBranches } from "./actions-util"; import * as analyses from "./analyses"; -import { getApiClient } from "./api-client"; +// (getApiClient import removed; no longer needed after diff refactor) import { setupCppAutobuild } from "./autobuild"; import { type CodeQL } from "./codeql"; import * as configUtils from "./config-utils"; @@ -21,13 +17,14 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics"; import { DiffThunkRange, writeDiffRangesJsonFile, + getPullRequestEditedDiffRanges, } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { FeatureEnablement, Feature } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; import { OverlayDatabaseMode } from "./overlay-database-utils"; -import { getRepositoryNwoFromEnv } from "./repository"; +// getRepositoryNwoFromEnv no longer needed after extracting diff logic import { DatabaseCreationTimings, EventReport } from "./status-report"; import { endTracingForCluster } from "./tracer-config"; import * as util from "./util"; @@ -313,184 +310,7 @@ export async function setupDiffInformedQueryRun( ); } -/** - * Return the file line ranges that were added or modified in the pull request. - * - * @param branches The base and head branches of the pull request. - * @param logger - * @returns An array of tuples, where each tuple contains the absolute path of a - * file, the start line and the end line (both 1-based and inclusive) of an - * added or modified range in that file. Returns `undefined` if the action was - * not triggered by a pull request or if there was an error. - */ -async function getPullRequestEditedDiffRanges( - branches: PullRequestBranches, - logger: Logger, -): Promise { - const fileDiffs = await getFileDiffsWithBasehead(branches, logger); - if (fileDiffs === undefined) { - return undefined; - } - if (fileDiffs.length >= 300) { - // The "compare two commits" API returns a maximum of 300 changed files. If - // we see that many changed files, it is possible that there could be more, - // with the rest being truncated. In this case, we should not attempt to - // compute the diff ranges, as the result would be incomplete. - logger.warning( - `Cannot retrieve the full diff because there are too many ` + - `(${fileDiffs.length}) changed files in the pull request.`, - ); - return undefined; - } - const results: DiffThunkRange[] = []; - for (const filediff of fileDiffs) { - const diffRanges = getDiffRanges(filediff, logger); - if (diffRanges === undefined) { - return undefined; - } - results.push(...diffRanges); - } - return results; -} - -/** - * This interface is an abbreviated version of the file diff object returned by - * the GitHub API. - */ -interface FileDiff { - filename: string; - changes: number; - // A patch may be absent if the file is binary, if the file diff is too large, - // or if the file is unchanged. - patch?: string | undefined; -} - -async function getFileDiffsWithBasehead( - branches: PullRequestBranches, - logger: Logger, -): Promise { - // Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back - // to GITHUB_REPOSITORY. - const repositoryNwo = getRepositoryNwoFromEnv( - "CODE_SCANNING_REPOSITORY", - "GITHUB_REPOSITORY", - ); - const basehead = `${branches.base}...${branches.head}`; - try { - const response = await getApiClient().rest.repos.compareCommitsWithBasehead( - { - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - basehead, - per_page: 1, - }, - ); - logger.debug( - `Response from compareCommitsWithBasehead(${basehead}):` + - `\n${JSON.stringify(response, null, 2)}`, - ); - return response.data.files; - } catch (error: any) { - if (error.status) { - logger.warning(`Error retrieving diff ${basehead}: ${error.message}`); - logger.debug( - `Error running compareCommitsWithBasehead(${basehead}):` + - `\nRequest: ${JSON.stringify(error.request, null, 2)}` + - `\nError Response: ${JSON.stringify(error.response, null, 2)}`, - ); - return undefined; - } else { - throw error; - } - } -} - -function getDiffRanges( - fileDiff: FileDiff, - logger: Logger, -): DiffThunkRange[] | undefined { - // Diff-informed queries expect the file path to be absolute. CodeQL always - // uses forward slashes as the path separator, so on Windows we need to - // replace any backslashes with forward slashes. - const filename = path - .join(getRequiredInput("checkout_path"), fileDiff.filename) - .replaceAll(path.sep, "/"); - - if (fileDiff.patch === undefined) { - if (fileDiff.changes === 0) { - // There are situations where a changed file legitimately has no diff. - // For example, the file may be a binary file, or that the file may have - // been renamed with no changes to its contents. In these cases, the - // file would be reported as having 0 changes, and we can return an empty - // array to indicate no diff range in this file. - return []; - } - // If a file is reported to have nonzero changes but no patch, that may be - // due to the file diff being too large. In this case, we should fall back - // to a special diff range that covers the entire file. - return [ - { - path: filename, - startLine: 0, - endLine: 0, - }, - ]; - } - // The 1-based file line number of the current line - let currentLine = 0; - // The 1-based file line number that starts the current range of added lines - let additionRangeStartLine: number | undefined = undefined; - const diffRanges: DiffThunkRange[] = []; - - const diffLines = fileDiff.patch.split("\n"); - // Adding a fake context line at the end ensures that the following loop will - // always terminate the last range of added lines. - diffLines.push(" "); - - for (const diffLine of diffLines) { - if (diffLine.startsWith("-")) { - // Ignore deletions completely -- we do not even want to consider them when - // calculating consecutive ranges of added lines. - continue; - } - if (diffLine.startsWith("+")) { - if (additionRangeStartLine === undefined) { - additionRangeStartLine = currentLine; - } - currentLine++; - continue; - } - if (additionRangeStartLine !== undefined) { - // Any line that does not start with a "+" or "-" terminates the current - // range of added lines. - diffRanges.push({ - path: filename, - startLine: additionRangeStartLine, - endLine: currentLine - 1, - }); - additionRangeStartLine = undefined; - } - if (diffLine.startsWith("@@ ")) { - // A new hunk header line resets the current line number. - const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); - if (match === null) { - logger.warning( - `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`, - ); - return undefined; - } - currentLine = parseInt(match[1], 10); - continue; - } - if (diffLine.startsWith(" ")) { - // An unchanged context line advances the current line number. - currentLine++; - continue; - } - } - return diffRanges; -} /** * Create an extension pack in the temporary directory that contains the file @@ -923,6 +743,4 @@ export async function warnIfGoInstalledAfterInit( } } -export const exportedForTesting = { - getDiffRanges, -}; +export const exportedForTesting = {}; diff --git a/src/diff-informed-analysis-utils.test.ts b/src/diff-informed-analysis-utils.test.ts index 1125f18fd0..99115aa311 100644 --- a/src/diff-informed-analysis-utils.test.ts +++ b/src/diff-informed-analysis-utils.test.ts @@ -183,3 +183,206 @@ test( }, false, ); + +// --------------------------------------------------------------------------- +// Tests for getDiffRanges (moved from analyze.test.ts after extraction) +// --------------------------------------------------------------------------- +import { exportedForTesting as diffExportedForTesting } from "./diff-informed-analysis-utils"; + +function runGetDiffRanges(changes: number, patch: string[] | undefined): any { + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("checkout_path") + .returns("/checkout/path"); + return diffExportedForTesting.getDiffRanges( + { + filename: "test.txt", + changes, + patch: patch?.join("\n"), + }, + getRunnerLogger(true), + ); +} + +test("getDiffRanges: file unchanged", async (t) => { + const diffRanges = runGetDiffRanges(0, undefined); + t.deepEqual(diffRanges, []); +}); + +test("getDiffRanges: file diff too large", async (t) => { + const diffRanges = runGetDiffRanges(1000000, undefined); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 0, + endLine: 0, + }, + ]); +}); + +test("getDiffRanges: diff thunk with single addition range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,6 +50,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 54, + }, + ]); +}); + +test("getDiffRanges: diff thunk with single deletion range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,8 +50,6 @@", + " a", + " b", + " c", + "-1", + "-2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, []); +}); + +test("getDiffRanges: diff thunk with single update range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,7 @@", + " a", + " b", + " c", + "-1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 53, + }, + ]); +}); + +test("getDiffRanges: diff thunk with addition ranges", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,9 @@", + " a", + " b", + " c", + "+1", + " c", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 53, + }, + { + path: "/checkout/path/test.txt", + startLine: 55, + endLine: 55, + }, + ]); +}); + +test("getDiffRanges: diff thunk with mixed ranges", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,7 @@", + " a", + " b", + " c", + "-1", + " d", + "-2", + "+3", + " e", + " f", + "+4", + "+5", + " g", + " h", + " i", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 54, + endLine: 54, + }, + { + path: "/checkout/path/test.txt", + startLine: 57, + endLine: 58, + }, + ]); +}); + +test("getDiffRanges: multiple diff thunks", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,6 +50,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + "@@ -130,6 +150,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 54, + }, + { + path: "/checkout/path/test.txt", + startLine: 153, + endLine: 154, + }, + ]); +}); + +test("getDiffRanges: no diff context lines", async (t) => { + const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 50, + endLine: 51, + }, + ]); +}); + +test("getDiffRanges: malformed thunk header", async (t) => { + const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]); + t.deepEqual(diffRanges, undefined); +}); diff --git a/src/diff-informed-analysis-utils.ts b/src/diff-informed-analysis-utils.ts index 7a23b3a295..f0cf8670e5 100644 --- a/src/diff-informed-analysis-utils.ts +++ b/src/diff-informed-analysis-utils.ts @@ -3,11 +3,25 @@ import * as path from "path"; import * as actionsUtil from "./actions-util"; import type { PullRequestBranches } from "./actions-util"; -import { getGitHubVersion } from "./api-client"; +import { getGitHubVersion, getApiClient } from "./api-client"; import type { CodeQL } from "./codeql"; import { Feature, FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; import { GitHubVariant, satisfiesGHESVersion } from "./util"; +import { getRepositoryNwoFromEnv } from "./repository"; +import { getRequiredInput } from "./actions-util"; + +/** + * This interface is an abbreviated version of the file diff object returned by + * the GitHub API. (Kept internal to this module.) + */ +interface FileDiff { + filename: string; + changes: number; + // A patch may be absent if the file is binary, if the file diff is too large, + // or if the file is unchanged. + patch?: string | undefined; +} /** * Check if the action should perform diff-informed analysis. @@ -93,3 +107,176 @@ export function readDiffRangesJsonFile( ); return JSON.parse(jsonContents) as DiffThunkRange[]; } + +/** + * Return the file line ranges that were added or modified in the pull request. + * + * @param branches The base and head branches of the pull request. + * @param logger + * @returns An array of objects, where each object contains the absolute path of a + * file, the start line and the end line (both 1-based and inclusive) of an + * added or modified range in that file. Returns `undefined` if the action was + * not triggered by a pull request or if there was an error (including API + * truncation conditions). + */ +export async function getPullRequestEditedDiffRanges( + branches: PullRequestBranches, + logger: Logger, +): Promise { + const fileDiffs = await getFileDiffsWithBasehead(branches, logger); + if (fileDiffs === undefined) { + return undefined; + } + if (fileDiffs.length >= 300) { + // The "compare two commits" API returns a maximum of 300 changed files. If + // we see that many changed files, it is possible that there could be more, + // with the rest being truncated. In this case, we should not attempt to + // compute the diff ranges, as the result would be incomplete. + logger.warning( + `Cannot retrieve the full diff because there are too many ` + + `(${fileDiffs.length}) changed files in the pull request.`, + ); + return undefined; + } + const results: DiffThunkRange[] = []; + for (const filediff of fileDiffs) { + const diffRanges = getDiffRanges(filediff, logger); + if (diffRanges === undefined) { + return undefined; + } + results.push(...diffRanges); + } + return results; +} + +async function getFileDiffsWithBasehead( + branches: PullRequestBranches, + logger: Logger, +): Promise { + // Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back + // to GITHUB_REPOSITORY. + const repositoryNwo = getRepositoryNwoFromEnv( + "CODE_SCANNING_REPOSITORY", + "GITHUB_REPOSITORY", + ); + const basehead = `${branches.base}...${branches.head}`; + try { + const response = await getApiClient().rest.repos.compareCommitsWithBasehead( + { + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + basehead, + per_page: 1, + }, + ); + logger.debug( + `Response from compareCommitsWithBasehead(${basehead}):` + + `\n${JSON.stringify(response, null, 2)}`, + ); + return response.data.files; + } catch (error: any) { + if (error.status) { + logger.warning(`Error retrieving diff ${basehead}: ${error.message}`); + logger.debug( + `Error running compareCommitsWithBasehead(${basehead}):` + + `\nRequest: ${JSON.stringify(error.request, null, 2)}` + + `\nError Response: ${JSON.stringify(error.response, null, 2)}`, + ); + return undefined; + } else { + throw error; + } + } +} + +function getDiffRanges( + fileDiff: FileDiff, + logger: Logger, +): DiffThunkRange[] | undefined { + // Diff-informed queries expect the file path to be absolute. CodeQL always + // uses forward slashes as the path separator, so on Windows we need to + // replace any backslashes with forward slashes. + const filename = path + .join(getRequiredInput("checkout_path"), fileDiff.filename) + .replaceAll(path.sep, "/"); + + if (fileDiff.patch === undefined) { + if (fileDiff.changes === 0) { + // There are situations where a changed file legitimately has no diff. + // For example, the file may be a binary file, or that the file may have + // been renamed with no changes to its contents. In these cases, the + // file would be reported as having 0 changes, and we can return an empty + // array to indicate no diff range in this file. + return []; + } + // If a file is reported to have nonzero changes but no patch, that may be + // due to the file diff being too large. In this case, we should fall back + // to a special diff range that covers the entire file. + return [ + { + path: filename, + startLine: 0, + endLine: 0, + }, + ]; + } + + // The 1-based file line number of the current line + let currentLine = 0; + // The 1-based file line number that starts the current range of added lines + let additionRangeStartLine: number | undefined = undefined; + const diffRanges: DiffThunkRange[] = []; + + const diffLines = fileDiff.patch.split("\n"); + // Adding a fake context line at the end ensures that the following loop will + // always terminate the last range of added lines. + diffLines.push(" "); + + for (const diffLine of diffLines) { + if (diffLine.startsWith("-")) { + // Ignore deletions completely -- we do not even want to consider them when + // calculating consecutive ranges of added lines. + continue; + } + if (diffLine.startsWith("+")) { + if (additionRangeStartLine === undefined) { + additionRangeStartLine = currentLine; + } + currentLine++; + continue; + } + if (additionRangeStartLine !== undefined) { + // Any line that does not start with a "+" or "-" terminates the current + // range of added lines. + diffRanges.push({ + path: filename, + startLine: additionRangeStartLine, + endLine: currentLine - 1, + }); + additionRangeStartLine = undefined; + } + if (diffLine.startsWith("@@ ")) { + // A new hunk header line resets the current line number. + const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); + if (match === null) { + logger.warning( + `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`, + ); + return undefined; + } + currentLine = parseInt(match[1], 10); + continue; + } + if (diffLine.startsWith(" ")) { + // An unchanged context line advances the current line number. + currentLine++; + continue; + } + } + return diffRanges; +} + +// Export internal helpers for unit testing only (kept stable for existing tests) +export const exportedForTesting = { + getDiffRanges, +}; From 9bb8375aed6608f0438fcdb10e645ca309a920aa Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Wed, 8 Oct 2025 17:25:52 +0100 Subject: [PATCH 2/6] Persist PR diff ranges early during init We don't use them yet and will re-save them during analysis. --- lib/init-action.js | 949 ++++++++++++++++++++++++++------------------- src/init-action.ts | 46 ++- 2 files changed, 600 insertions(+), 395 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 5481ab1e8f..24a459a5bc 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var os5 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs18.existsSync(filePath)) { + if (!fs19.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { + fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path20 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path21 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path20 && !path20.startsWith("/")) { - path20 = `/${path20}`; + if (path21 && !path21.startsWith("/")) { + path21 = `/${path21}`; } - url = new URL(origin + path20); + url = new URL(origin + path21); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path20) { - if (typeof path20 !== "string") { + module2.exports = function basename(path21) { + if (typeof path21 !== "string") { return ""; } - for (var i = path20.length - 1; i >= 0; --i) { - switch (path20.charCodeAt(i)) { + for (var i = path21.length - 1; i >= 0; --i) { + switch (path21.charCodeAt(i)) { case 47: // '/' case 92: - path20 = path20.slice(i + 1); - return path20 === ".." || path20 === "." ? "" : path20; + path21 = path21.slice(i + 1); + return path21 === ".." || path21 === "." ? "" : path21; } } - return path20 === ".." || path20 === "." ? "" : path20; + return path21 === ".." || path21 === "." ? "" : path21; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path20, + path: path21, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path20 !== "string") { + if (typeof path21 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path20[0] !== "/" && !(path20.startsWith("http://") || path20.startsWith("https://")) && method !== "CONNECT") { + } else if (path21[0] !== "/" && !(path21.startsWith("http://") || path21.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path20) !== null) { + } else if (invalidPathRegex.exec(path21) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path20, query) : path20; + this.path = query ? util.buildURL(path21, query) : path21; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path20 = search ? `${pathname}${search}` : pathname; + const path21 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path20; + this.opts.path = path21; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path20, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path21, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path20} HTTP/1.1\r + let header = `${method} ${path21} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path20, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path21, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path20; + headers[HTTP2_HEADER_PATH] = path21; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path20) { - if (typeof path20 !== "string") { - return path20; + function safeUrl(path21) { + if (typeof path21 !== "string") { + return path21; } - const pathSegments = path20.split("?"); + const pathSegments = path21.split("?"); if (pathSegments.length !== 2) { - return path20; + return path21; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path20, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path20); + function matchKey(mockDispatch2, { path: path21, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path21); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path20 }) => matchValue(safeUrl(path20), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path21 }) => matchValue(safeUrl(path21), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path20, method, body, headers, query } = opts; + const { path: path21, method, body, headers, query } = opts; return { - path: path20, + path: path21, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path20, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path21, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path20, + Path: path21, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path20) { - for (const char of path20) { + function validateCookiePath(path21) { + for (const char of path21) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path20 = opts.path; + let path21 = opts.path; if (!opts.path.startsWith("/")) { - path20 = `/${path20}`; + path21 = `/${path21}`; } - url = new URL(util.parseOrigin(url).origin + path20); + url = new URL(util.parseOrigin(url).origin + path21); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path20.sep); + return pth.replace(/[/\\]/g, path21.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs18 = __importStar4(require("fs")); - var path20 = __importStar4(require("path")); - _a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs19 = __importStar4(require("fs")); + var path21 = __importStar4(require("path")); + _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs18.constants.O_RDONLY; + exports2.READONLY = fs19.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path20.extname(filePath).toUpperCase(); + const upperExt = path21.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path20.dirname(filePath); - const upperName = path20.basename(filePath).toUpperCase(); + const directory = path21.dirname(filePath); + const upperName = path21.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path20.join(directory, actualName); + filePath = path21.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path20.join(dest, path20.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path21.join(dest, path21.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path20.relative(source, newDest) === "") { + if (path21.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path20.join(dest, path20.basename(source)); + dest = path21.join(dest, path21.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path20.dirname(dest)); + yield mkdirP(path21.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path20.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path21.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path20.sep)) { + if (tool.includes(path21.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path20.delimiter)) { + for (const p of process.env.PATH.split(path21.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path20.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path21.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os5 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var io7 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path20.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path21.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve9, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path20.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path21.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath2; function getInput2(name, options) { @@ -21743,8 +21743,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path20 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path20} does not exist${os_1.EOL}`); + const path21 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path21} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -26505,7 +26505,7 @@ var require_path = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.convertPosixPathToPattern = exports2.convertWindowsPathToPattern = exports2.convertPathToPattern = exports2.escapePosixPath = exports2.escapeWindowsPath = exports2.escape = exports2.removeLeadingDotSegment = exports2.makeAbsolute = exports2.unixify = void 0; var os5 = require("os"); - var path20 = require("path"); + var path21 = require("path"); var IS_WINDOWS_PLATFORM = os5.platform() === "win32"; var LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; var POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; @@ -26517,7 +26517,7 @@ var require_path = __commonJS({ } exports2.unixify = unixify; function makeAbsolute(cwd, filepath) { - return path20.resolve(cwd, filepath); + return path21.resolve(cwd, filepath); } exports2.makeAbsolute = makeAbsolute; function removeLeadingDotSegment(entry) { @@ -27814,7 +27814,7 @@ var require_braces = __commonJS({ var require_constants8 = __commonJS({ "node_modules/picomatch/lib/constants.js"(exports2, module2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var WIN_SLASH = "\\\\/"; var WIN_NO_SLASH = `[^${WIN_SLASH}]`; var DOT_LITERAL = "\\."; @@ -27984,7 +27984,7 @@ var require_constants8 = __commonJS({ /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - SEP: path20.sep, + SEP: path21.sep, /** * Create EXTGLOB_CHARS */ @@ -28011,7 +28011,7 @@ var require_constants8 = __commonJS({ var require_utils6 = __commonJS({ "node_modules/picomatch/lib/utils.js"(exports2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var win32 = process.platform === "win32"; var { REGEX_BACKSLASH, @@ -28040,7 +28040,7 @@ var require_utils6 = __commonJS({ if (options && typeof options.windows === "boolean") { return options.windows; } - return win32 === true || path20.sep === "\\"; + return win32 === true || path21.sep === "\\"; }; exports2.escapeLast = (input, char, lastIdx) => { const idx = input.lastIndexOf(char, lastIdx); @@ -29175,7 +29175,7 @@ var require_parse4 = __commonJS({ var require_picomatch = __commonJS({ "node_modules/picomatch/lib/picomatch.js"(exports2, module2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var scan = require_scan(); var parse = require_parse4(); var utils = require_utils6(); @@ -29260,7 +29260,7 @@ var require_picomatch = __commonJS({ }; picomatch.matchBase = (input, glob2, options, posix = utils.isWindows(options)) => { const regex = glob2 instanceof RegExp ? glob2 : picomatch.makeRe(glob2, options); - return regex.test(path20.basename(input)); + return regex.test(path21.basename(input)); }; picomatch.isMatch = (str2, patterns, options) => picomatch(patterns, options)(str2); picomatch.parse = (pattern, options) => { @@ -29487,7 +29487,7 @@ var require_pattern = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.isAbsolute = exports2.partitionAbsoluteAndRelative = exports2.removeDuplicateSlashes = exports2.matchAny = exports2.convertPatternsToRe = exports2.makeRe = exports2.getPatternParts = exports2.expandBraceExpansion = exports2.expandPatternsWithBraceExpansion = exports2.isAffectDepthOfReadingPattern = exports2.endsWithSlashGlobStar = exports2.hasGlobStar = exports2.getBaseDirectory = exports2.isPatternRelatedToParentDirectory = exports2.getPatternsOutsideCurrentDirectory = exports2.getPatternsInsideCurrentDirectory = exports2.getPositivePatterns = exports2.getNegativePatterns = exports2.isPositivePattern = exports2.isNegativePattern = exports2.convertToNegativePattern = exports2.convertToPositivePattern = exports2.isDynamicPattern = exports2.isStaticPattern = void 0; - var path20 = require("path"); + var path21 = require("path"); var globParent = require_glob_parent(); var micromatch = require_micromatch(); var GLOBSTAR = "**"; @@ -29582,7 +29582,7 @@ var require_pattern = __commonJS({ } exports2.endsWithSlashGlobStar = endsWithSlashGlobStar; function isAffectDepthOfReadingPattern(pattern) { - const basename = path20.basename(pattern); + const basename = path21.basename(pattern); return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); } exports2.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; @@ -29640,7 +29640,7 @@ var require_pattern = __commonJS({ } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute3(pattern) { - return path20.isAbsolute(pattern); + return path21.isAbsolute(pattern); } exports2.isAbsolute = isAbsolute3; } @@ -29815,10 +29815,10 @@ var require_utils7 = __commonJS({ exports2.array = array; var errno = require_errno(); exports2.errno = errno; - var fs18 = require_fs(); - exports2.fs = fs18; - var path20 = require_path(); - exports2.path = path20; + var fs19 = require_fs(); + exports2.fs = fs19; + var path21 = require_path(); + exports2.path = path21; var pattern = require_pattern(); exports2.pattern = pattern; var stream2 = require_stream(); @@ -29930,8 +29930,8 @@ var require_async = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.read = void 0; - function read(path20, settings, callback) { - settings.fs.lstat(path20, (lstatError, lstat) => { + function read(path21, settings, callback) { + settings.fs.lstat(path21, (lstatError, lstat) => { if (lstatError !== null) { callFailureCallback(callback, lstatError); return; @@ -29940,7 +29940,7 @@ var require_async = __commonJS({ callSuccessCallback(callback, lstat); return; } - settings.fs.stat(path20, (statError, stat) => { + settings.fs.stat(path21, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { callFailureCallback(callback, statError); @@ -29972,13 +29972,13 @@ var require_sync = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.read = void 0; - function read(path20, settings) { - const lstat = settings.fs.lstatSync(path20); + function read(path21, settings) { + const lstat = settings.fs.lstatSync(path21); if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { return lstat; } try { - const stat = settings.fs.statSync(path20); + const stat = settings.fs.statSync(path21); if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } @@ -30000,12 +30000,12 @@ var require_fs2 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); exports2.FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - stat: fs18.stat, - lstatSync: fs18.lstatSync, - statSync: fs18.statSync + lstat: fs19.lstat, + stat: fs19.stat, + lstatSync: fs19.lstatSync, + statSync: fs19.statSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === void 0) { @@ -30022,12 +30022,12 @@ var require_settings = __commonJS({ "node_modules/@nodelib/fs.stat/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var fs18 = require_fs2(); + var fs19 = require_fs2(); var Settings = class { constructor(_options = {}) { this._options = _options; this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs18.createFileSystemAdapter(this._options.fs); + this.fs = fs19.createFileSystemAdapter(this._options.fs); this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); } @@ -30049,17 +30049,17 @@ var require_out = __commonJS({ var sync = require_sync(); var settings_1 = require_settings(); exports2.Settings = settings_1.default; - function stat(path20, optionsOrSettingsOrCallback, callback) { + function stat(path21, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === "function") { - async.read(path20, getSettings(), optionsOrSettingsOrCallback); + async.read(path21, getSettings(), optionsOrSettingsOrCallback); return; } - async.read(path20, getSettings(optionsOrSettingsOrCallback), callback); + async.read(path21, getSettings(optionsOrSettingsOrCallback), callback); } exports2.stat = stat; - function statSync2(path20, optionsOrSettings) { + function statSync2(path21, optionsOrSettings) { const settings = getSettings(optionsOrSettings); - return sync.read(path20, settings); + return sync.read(path21, settings); } exports2.statSync = statSync2; function getSettings(settingsOrOptions = {}) { @@ -30182,8 +30182,8 @@ var require_utils8 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.fs = void 0; - var fs18 = require_fs3(); - exports2.fs = fs18; + var fs19 = require_fs3(); + exports2.fs = fs19; } }); @@ -30275,16 +30275,16 @@ var require_async2 = __commonJS({ return; } const tasks = names.map((name) => { - const path20 = common2.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const path21 = common2.joinPathSegments(directory, name, settings.pathSegmentSeparator); return (done) => { - fsStat.stat(path20, settings.fsStatSettings, (error2, stats) => { + fsStat.stat(path21, settings.fsStatSettings, (error2, stats) => { if (error2 !== null) { done(error2); return; } const entry = { name, - path: path20, + path: path21, dirent: utils.fs.createDirentFromStats(name, stats) }; if (settings.stats) { @@ -30378,14 +30378,14 @@ var require_fs4 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); exports2.FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - stat: fs18.stat, - lstatSync: fs18.lstatSync, - statSync: fs18.statSync, - readdir: fs18.readdir, - readdirSync: fs18.readdirSync + lstat: fs19.lstat, + stat: fs19.stat, + lstatSync: fs19.lstatSync, + statSync: fs19.statSync, + readdir: fs19.readdir, + readdirSync: fs19.readdirSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === void 0) { @@ -30402,15 +30402,15 @@ var require_settings2 = __commonJS({ "node_modules/@nodelib/fs.scandir/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsStat = require_out(); - var fs18 = require_fs4(); + var fs19 = require_fs4(); var Settings = class { constructor(_options = {}) { this._options = _options; this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs18.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path20.sep); + this.fs = fs19.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path21.sep); this.stats = this._getValue(this._options.stats, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); this.fsStatSettings = new fsStat.Settings({ @@ -30437,17 +30437,17 @@ var require_out2 = __commonJS({ var sync = require_sync2(); var settings_1 = require_settings2(); exports2.Settings = settings_1.default; - function scandir(path20, optionsOrSettingsOrCallback, callback) { + function scandir(path21, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === "function") { - async.read(path20, getSettings(), optionsOrSettingsOrCallback); + async.read(path21, getSettings(), optionsOrSettingsOrCallback); return; } - async.read(path20, getSettings(optionsOrSettingsOrCallback), callback); + async.read(path21, getSettings(optionsOrSettingsOrCallback), callback); } exports2.scandir = scandir; - function scandirSync(path20, optionsOrSettings) { + function scandirSync(path21, optionsOrSettings) { const settings = getSettings(optionsOrSettings); - return sync.read(path20, settings); + return sync.read(path21, settings); } exports2.scandirSync = scandirSync; function getSettings(settingsOrOptions = {}) { @@ -30974,7 +30974,7 @@ var require_settings3 = __commonJS({ "node_modules/@nodelib/fs.walk/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsScandir = require_out2(); var Settings = class { constructor(_options = {}) { @@ -30984,7 +30984,7 @@ var require_settings3 = __commonJS({ this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path20.sep); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path21.sep); this.fsScandirSettings = new fsScandir.Settings({ followSymbolicLinks: this._options.followSymbolicLinks, fs: this._options.fs, @@ -31046,7 +31046,7 @@ var require_reader2 = __commonJS({ "node_modules/fast-glob/out/readers/reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsStat = require_out(); var utils = require_utils7(); var Reader = class { @@ -31059,7 +31059,7 @@ var require_reader2 = __commonJS({ }); } _getFullEntryPath(filepath) { - return path20.resolve(this._settings.cwd, filepath); + return path21.resolve(this._settings.cwd, filepath); } _makeEntry(stats, pattern) { const entry = { @@ -31475,7 +31475,7 @@ var require_provider = __commonJS({ "node_modules/fast-glob/out/providers/provider.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var deep_1 = require_deep(); var entry_1 = require_entry(); var error_1 = require_error(); @@ -31489,7 +31489,7 @@ var require_provider = __commonJS({ this.entryTransformer = new entry_2.default(this._settings); } _getRootDirectory(task) { - return path20.resolve(this._settings.cwd, task.base); + return path21.resolve(this._settings.cwd, task.base); } _getReaderOptions(task) { const basePath = task.base === "." ? "" : task.base; @@ -31670,16 +31670,16 @@ var require_settings4 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); var os5 = require("os"); var CPU_COUNT = Math.max(os5.cpus().length, 1); exports2.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - lstatSync: fs18.lstatSync, - stat: fs18.stat, - statSync: fs18.statSync, - readdir: fs18.readdir, - readdirSync: fs18.readdirSync + lstat: fs19.lstat, + lstatSync: fs19.lstatSync, + stat: fs19.stat, + statSync: fs19.statSync, + readdir: fs19.readdir, + readdirSync: fs19.readdirSync }; var Settings = class { constructor(_options = {}) { @@ -32122,7 +32122,7 @@ var require_ignore = __commonJS({ // path matching. // - check `string` either `MODE_IGNORE` or `MODE_CHECK_IGNORE` // @returns {TestResult} true if a file is ignored - test(path20, checkUnignored, mode) { + test(path21, checkUnignored, mode) { let ignored = false; let unignored = false; let matchedRule; @@ -32131,7 +32131,7 @@ var require_ignore = __commonJS({ if (unignored === negative && ignored !== unignored || negative && !ignored && !unignored && !checkUnignored) { return; } - const matched = rule[mode].test(path20); + const matched = rule[mode].test(path21); if (!matched) { return; } @@ -32152,17 +32152,17 @@ var require_ignore = __commonJS({ var throwError2 = (message, Ctor) => { throw new Ctor(message); }; - var checkPath = (path20, originalPath, doThrow) => { - if (!isString(path20)) { + var checkPath = (path21, originalPath, doThrow) => { + if (!isString(path21)) { return doThrow( `path must be a string, but got \`${originalPath}\``, TypeError ); } - if (!path20) { + if (!path21) { return doThrow(`path must not be empty`, TypeError); } - if (checkPath.isNotRelative(path20)) { + if (checkPath.isNotRelative(path21)) { const r = "`path.relative()`d"; return doThrow( `path should be a ${r} string, but got "${originalPath}"`, @@ -32171,7 +32171,7 @@ var require_ignore = __commonJS({ } return true; }; - var isNotRelative = (path20) => REGEX_TEST_INVALID_PATH.test(path20); + var isNotRelative = (path21) => REGEX_TEST_INVALID_PATH.test(path21); checkPath.isNotRelative = isNotRelative; checkPath.convert = (p) => p; var Ignore = class { @@ -32201,19 +32201,19 @@ var require_ignore = __commonJS({ } // @returns {TestResult} _test(originalPath, cache, checkUnignored, slices) { - const path20 = originalPath && checkPath.convert(originalPath); + const path21 = originalPath && checkPath.convert(originalPath); checkPath( - path20, + path21, originalPath, this._strictPathCheck ? throwError2 : RETURN_FALSE ); - return this._t(path20, cache, checkUnignored, slices); + return this._t(path21, cache, checkUnignored, slices); } - checkIgnore(path20) { - if (!REGEX_TEST_TRAILING_SLASH.test(path20)) { - return this.test(path20); + checkIgnore(path21) { + if (!REGEX_TEST_TRAILING_SLASH.test(path21)) { + return this.test(path21); } - const slices = path20.split(SLASH).filter(Boolean); + const slices = path21.split(SLASH).filter(Boolean); slices.pop(); if (slices.length) { const parent = this._t( @@ -32226,18 +32226,18 @@ var require_ignore = __commonJS({ return parent; } } - return this._rules.test(path20, false, MODE_CHECK_IGNORE); + return this._rules.test(path21, false, MODE_CHECK_IGNORE); } - _t(path20, cache, checkUnignored, slices) { - if (path20 in cache) { - return cache[path20]; + _t(path21, cache, checkUnignored, slices) { + if (path21 in cache) { + return cache[path21]; } if (!slices) { - slices = path20.split(SLASH).filter(Boolean); + slices = path21.split(SLASH).filter(Boolean); } slices.pop(); if (!slices.length) { - return cache[path20] = this._rules.test(path20, checkUnignored, MODE_IGNORE); + return cache[path21] = this._rules.test(path21, checkUnignored, MODE_IGNORE); } const parent = this._t( slices.join(SLASH) + SLASH, @@ -32245,29 +32245,29 @@ var require_ignore = __commonJS({ checkUnignored, slices ); - return cache[path20] = parent.ignored ? parent : this._rules.test(path20, checkUnignored, MODE_IGNORE); + return cache[path21] = parent.ignored ? parent : this._rules.test(path21, checkUnignored, MODE_IGNORE); } - ignores(path20) { - return this._test(path20, this._ignoreCache, false).ignored; + ignores(path21) { + return this._test(path21, this._ignoreCache, false).ignored; } createFilter() { - return (path20) => !this.ignores(path20); + return (path21) => !this.ignores(path21); } filter(paths) { return makeArray(paths).filter(this.createFilter()); } // @returns {TestResult} - test(path20) { - return this._test(path20, this._testCache, true); + test(path21) { + return this._test(path21, this._testCache, true); } }; var factory = (options) => new Ignore(options); - var isPathValid = (path20) => checkPath(path20 && checkPath.convert(path20), path20, RETURN_FALSE); + var isPathValid = (path21) => checkPath(path21 && checkPath.convert(path21), path21, RETURN_FALSE); var setupWindows = () => { const makePosix = (str2) => /^\\\\\?\\/.test(str2) || /["<>|\u0000-\u001F]+/u.test(str2) ? str2 : str2.replace(/\\/g, "/"); checkPath.convert = makePosix; const REGEX_TEST_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i; - checkPath.isNotRelative = (path20) => REGEX_TEST_WINDOWS_PATH_ABSOLUTE.test(path20) || isNotRelative(path20); + checkPath.isNotRelative = (path21) => REGEX_TEST_WINDOWS_PATH_ABSOLUTE.test(path21) || isNotRelative(path21); }; if ( // Detect `process` so that it can run in browsers. @@ -34052,7 +34052,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname3(p) { @@ -34060,7 +34060,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path20.dirname(p); + let result = path21.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -34098,7 +34098,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path20.sep; + root += path21.sep; } return root + itemPath; } @@ -34136,10 +34136,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path20.sep)) { + if (!p.endsWith(path21.sep)) { return p; } - if (p === path20.sep) { + if (p === path21.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -34472,7 +34472,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path20 = (function() { + var path21 = (function() { try { return require("path"); } catch (e) { @@ -34480,7 +34480,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path20.sep; + minimatch.sep = path21.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -34569,8 +34569,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path20.sep !== "/") { - pattern = pattern.split(path20.sep).join("/"); + if (!options.allowWindowsEscape && path21.sep !== "/") { + pattern = pattern.split(path21.sep).join("/"); } this.options = options; this.set = []; @@ -34939,8 +34939,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path20.sep !== "/") { - f = f.split(path20.sep).join("/"); + if (path21.sep !== "/") { + f = f.split(path21.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -35072,7 +35072,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -35087,12 +35087,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path20.sep); + this.segments = itemPath.split(path21.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path20.basename(remaining); + const basename = path21.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -35110,7 +35110,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path20.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path21.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -35121,12 +35121,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path20.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path21.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path20.sep; + result += path21.sep; } result += this.segments[i]; } @@ -35170,7 +35170,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -35199,7 +35199,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir2); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path20.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path21.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -35223,8 +35223,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path20.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path20.sep}`; + if (!itemPath.endsWith(path21.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path21.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -35259,9 +35259,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path20.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path21.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path20.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path21.sep}`)) { homedir2 = homedir2 || os5.homedir(); assert_1.default(homedir2, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir2), `Expected HOME directory to be a rooted path. Actual '${homedir2}'`); @@ -35345,8 +35345,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path20, level) { - this.path = path20; + constructor(path21, level) { + this.path = path21; this.level = level; } }; @@ -35466,9 +35466,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -35518,7 +35518,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs18.promises.lstat(searchPath)); + yield __await4(fs19.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -35549,7 +35549,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path20.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path21.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -35584,7 +35584,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs18.promises.stat(item.path); + stats = yield fs19.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -35596,10 +35596,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs18.promises.lstat(item.path); + stats = yield fs19.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs18.promises.realpath(item.path); + const realPath = yield fs19.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -36933,8 +36933,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); - var path20 = __importStar4(require("path")); + var fs19 = __importStar4(require("fs")); + var path21 = __importStar4(require("path")); var semver9 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants10(); @@ -36954,16 +36954,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path20.join(baseLocation, "actions", "temp"); + tempDirectory = path21.join(baseLocation, "actions", "temp"); } - const dest = path20.join(tempDirectory, crypto2.randomUUID()); + const dest = path21.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs18.statSync(filePath).size; + return fs19.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -36980,7 +36980,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); + const relativeFile = path21.relative(workspace, file).replace(new RegExp(`\\${path21.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs18.unlink)(filePath); + return util.promisify(fs19.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -37048,7 +37048,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -44886,15 +44886,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path20 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path20.startsWith("/")) { - path20 = path20.substring(1); + let path21 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path21.startsWith("/")) { + path21 = path21.substring(1); } - if (isAbsoluteUrl(path20)) { - requestUrl = path20; + if (isAbsoluteUrl(path21)) { + requestUrl = path21; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path20); + requestUrl = appendPath(requestUrl, path21); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -44942,9 +44942,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path20 = pathToAppend.substring(0, searchStart); + const path21 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path20; + newPath = newPath + path21; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -48821,7 +48821,7 @@ var require_dist7 = __commonJS({ var stream2 = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs18 = require("fs"); + var fs19 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -48844,7 +48844,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs18); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs19); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -49093,10 +49093,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path20 = urlParsed.pathname; - path20 = path20 || "/"; - path20 = escape(path20); - urlParsed.pathname = path20; + let path21 = urlParsed.pathname; + path21 = path21 || "/"; + path21 = escape(path21); + urlParsed.pathname = path21; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -49181,9 +49181,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path20 = urlParsed.pathname; - path20 = path20 ? path20.endsWith("/") ? `${path20}${name}` : `${path20}/${name}` : name; - urlParsed.pathname = path20; + let path21 = urlParsed.pathname; + path21 = path21 ? path21.endsWith("/") ? `${path21}${name}` : `${path21}/${name}` : name; + urlParsed.pathname = path21; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -50264,9 +50264,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path20 = getURLPath(request.url) || "/"; + const path21 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path20}`; + canonicalizedResourceString += `/${this.factory.accountName}${path21}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -50559,9 +50559,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path20 = getURLPath(request.url) || "/"; + const path21 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path20}`; + canonicalizedResourceString += `/${options.accountName}${path21}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -69863,8 +69863,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path20 = getURLPath(subRequest.url); - if (!path20 || path20 === "") { + const path21 = getURLPath(subRequest.url); + if (!path21 || path21 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -69924,8 +69924,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path20 = getURLPath(url2); - if (path20 && path20 !== "/") { + const path21 = getURLPath(url2); + if (path21 && path21 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -72692,7 +72692,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -72803,7 +72803,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs18.createWriteStream(archivePath); + const writeStream = fs19.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -72829,7 +72829,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs18.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs19.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -72946,7 +72946,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs18.openSync(archivePath, "w"); + const fd = fs19.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -72964,12 +72964,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs18.writeFileSync(fd, result); + fs19.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs18.closeSync(fd); + fs19.closeSync(fd); } } }); @@ -73268,7 +73268,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -73406,7 +73406,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs18.openSync(archivePath, "r"); + const fd = fs19.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -73420,7 +73420,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, { fd, start, end, @@ -73431,7 +73431,7 @@ Other caches with similar key:`); } }))); } finally { - fs18.closeSync(fd); + fs19.closeSync(fd); } return; }); @@ -78675,7 +78675,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar4(require_io()); var fs_1 = require("fs"); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants10(); var IS_WINDOWS = process.platform === "win32"; @@ -78721,13 +78721,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path20.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path21.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -78773,7 +78773,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -78782,7 +78782,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -78797,7 +78797,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -78806,7 +78806,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -78846,7 +78846,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path20.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path21.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -78916,7 +78916,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -79013,7 +79013,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path21.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -79082,7 +79082,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path21.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -79145,7 +79145,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path21.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -79209,7 +79209,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path21.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -79389,7 +79389,7 @@ var require_internal_path_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname3(p) { @@ -79397,7 +79397,7 @@ var require_internal_path_helper2 = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path20.dirname(p); + let result = path21.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -79435,7 +79435,7 @@ var require_internal_path_helper2 = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path20.sep; + root += path21.sep; } return root + itemPath; } @@ -79473,10 +79473,10 @@ var require_internal_path_helper2 = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path20.sep)) { + if (!p.endsWith(path21.sep)) { return p; } - if (p === path20.sep) { + if (p === path21.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -79627,7 +79627,7 @@ var require_internal_path2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -79642,12 +79642,12 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path20.sep); + this.segments = itemPath.split(path21.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path20.basename(remaining); + const basename = path21.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -79665,7 +79665,7 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path20.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path21.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -79676,12 +79676,12 @@ var require_internal_path2 = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path20.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path21.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path20.sep; + result += path21.sep; } result += this.segments[i]; } @@ -79729,7 +79729,7 @@ var require_internal_pattern2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -79758,7 +79758,7 @@ var require_internal_pattern2 = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir2); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path20.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path21.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -79782,8 +79782,8 @@ var require_internal_pattern2 = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path20.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path20.sep}`; + if (!itemPath.endsWith(path21.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path21.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -79818,9 +79818,9 @@ var require_internal_pattern2 = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path20.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path21.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path20.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path21.sep}`)) { homedir2 = homedir2 || os5.homedir(); (0, assert_1.default)(homedir2, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir2), `Expected HOME directory to be a rooted path. Actual '${homedir2}'`); @@ -79904,8 +79904,8 @@ var require_internal_search_state2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path20, level) { - this.path = path20; + constructor(path21, level) { + this.path = path21; this.level = level; } }; @@ -80029,9 +80029,9 @@ var require_internal_globber2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper2()); var internal_match_kind_1 = require_internal_match_kind2(); var internal_pattern_1 = require_internal_pattern2(); @@ -80083,7 +80083,7 @@ var require_internal_globber2 = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs18.promises.lstat(searchPath)); + yield __await4(fs19.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -80107,7 +80107,7 @@ var require_internal_globber2 = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path20.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path21.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -80117,7 +80117,7 @@ var require_internal_globber2 = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path20.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path21.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -80152,7 +80152,7 @@ var require_internal_globber2 = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs18.promises.stat(item.path); + stats = yield fs19.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -80164,10 +80164,10 @@ var require_internal_globber2 = __commonJS({ throw err; } } else { - stats = yield fs18.promises.lstat(item.path); + stats = yield fs19.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs18.promises.realpath(item.path); + const realPath = yield fs19.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -80266,10 +80266,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); function hashFiles2(globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; @@ -80285,17 +80285,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path20.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path21.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs18.statSync(file).isDirectory()) { + if (fs19.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs18.createReadStream(file), hash); + yield pipeline(fs19.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) { @@ -80444,7 +80444,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os5 = require("os"); var cp = require("child_process"); - var fs18 = require("fs"); + var fs19 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os5.platform(); @@ -80508,10 +80508,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs18.existsSync(lsbReleaseFile)) { - contents = fs18.readFileSync(lsbReleaseFile).toString(); - } else if (fs18.existsSync(osReleaseFile)) { - contents = fs18.readFileSync(osReleaseFile).toString(); + if (fs19.existsSync(lsbReleaseFile)) { + contents = fs19.readFileSync(lsbReleaseFile).toString(); + } else if (fs19.existsSync(osReleaseFile)) { + contents = fs19.readFileSync(osReleaseFile).toString(); } return contents; } @@ -80688,10 +80688,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver9 = __importStar4(require_semver2()); var stream2 = __importStar4(require("stream")); @@ -80712,8 +80712,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); - yield io7.mkdirP(path20.dirname(dest)); + dest = dest || path21.join(_getTempDirectory(), crypto2.randomUUID()); + yield io7.mkdirP(path21.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -80735,7 +80735,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs18.existsSync(dest)) { + if (fs19.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -80759,7 +80759,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs18.createWriteStream(dest)); + yield pipeline(readStream, fs19.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -80800,7 +80800,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path20.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path21.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -80971,12 +80971,12 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs18.statSync(sourceDir).isDirectory()) { + if (!fs19.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs18.readdirSync(sourceDir)) { - const s = path20.join(sourceDir, itemName); + for (const itemName of fs19.readdirSync(sourceDir)) { + const s = path21.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch2); @@ -80990,11 +80990,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source file: ${sourceFile}`); - if (!fs18.statSync(sourceFile).isFile()) { + if (!fs19.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path20.join(destFolder, targetFile); + const destPath = path21.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); @@ -81018,9 +81018,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path21.join(_getCacheDirectory(), toolName, versionSpec, arch2); core14.debug(`checking cache: ${cachePath}`); - if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { + if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -81033,13 +81033,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch2) { const versions = []; arch2 = arch2 || os5.arch(); - const toolPath = path20.join(_getCacheDirectory(), toolName); - if (fs18.existsSync(toolPath)) { - const children = fs18.readdirSync(toolPath); + const toolPath = path21.join(_getCacheDirectory(), toolName); + if (fs19.existsSync(toolPath)) { + const children = fs19.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path20.join(toolPath, child, arch2 || ""); - if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) { + const fullPath = path21.join(toolPath, child, arch2 || ""); + if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -81093,7 +81093,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path20.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path21.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -81101,7 +81101,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path21.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -81111,9 +81111,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch2) { - const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path21.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs18.writeFileSync(markerPath, ""); + fs19.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -81721,8 +81721,8 @@ var require_follow_redirects = __commonJS({ }); // src/init-action.ts -var fs17 = __toESM(require("fs")); -var path19 = __toESM(require("path")); +var fs18 = __toESM(require("fs")); +var path20 = __toESM(require("path")); var core13 = __toESM(require_core()); var io6 = __toESM(require_io()); var semver8 = __toESM(require_semver2()); @@ -82178,12 +82178,12 @@ var import_fast_glob = __toESM(require_out4(), 1); var import_ignore = __toESM(require_ignore(), 1); // node_modules/slash/index.js -function slash(path20) { - const isExtendedLengthPath = path20.startsWith("\\\\?\\"); +function slash(path21) { + const isExtendedLengthPath = path21.startsWith("\\\\?\\"); if (isExtendedLengthPath) { - return path20; + return path21; } - return path20.replace(/\\/g, "/"); + return path21.replace(/\\/g, "/"); } // node_modules/globby/utilities.js @@ -82271,8 +82271,8 @@ var assertPatternsInput = (patterns) => { } }; var normalizePathForDirectoryGlob = (filePath, cwd) => { - const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath; - return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20); + const path21 = isNegativePattern(filePath) ? filePath.slice(1) : filePath; + return import_node_path3.default.isAbsolute(path21) ? path21 : import_node_path3.default.join(cwd, path21); }; var getDirectoryGlob = ({ directoryPath, files, extensions }) => { const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : ""; @@ -82671,21 +82671,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs18 = options.fs || await import("node:fs/promises"); + const fs19 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -86228,8 +86228,8 @@ function getDependencyCachingEnabled() { } // src/config-utils.ts -var fs9 = __toESM(require("fs")); -var path11 = __toESM(require("path")); +var fs10 = __toESM(require("fs")); +var path12 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); // src/analyses.ts @@ -86561,6 +86561,10 @@ function generateCodeScanningConfig(logger, originalUserInput, augmentationPrope return augmentedConfig; } +// src/diff-informed-analysis-utils.ts +var fs8 = __toESM(require("fs")); +var path10 = __toESM(require("path")); + // src/feature-flags.ts var fs7 = __toESM(require("fs")); var path9 = __toESM(require("path")); @@ -86677,8 +86681,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path20 = decodeGitFilePath(match[2]); - fileOidMap[path20] = oid; + const path21 = decodeGitFilePath(match[2]); + fileOidMap[path21] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -87439,6 +87443,129 @@ async function getDiffInformedAnalysisBranches(codeql, features, logger) { } return branches; } +function getDiffRangesJsonFilePath() { + return path10.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function writeDiffRangesJsonFile(logger, ranges) { + const jsonContents = JSON.stringify(ranges, null, 2); + const jsonFilePath = getDiffRangesJsonFilePath(); + fs8.writeFileSync(jsonFilePath, jsonContents); + logger.debug( + `Wrote pr-diff-range JSON file to ${jsonFilePath}: +${jsonContents}` + ); +} +async function getPullRequestEditedDiffRanges(branches, logger) { + const fileDiffs = await getFileDiffsWithBasehead(branches, logger); + if (fileDiffs === void 0) { + return void 0; + } + if (fileDiffs.length >= 300) { + logger.warning( + `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` + ); + return void 0; + } + const results = []; + for (const filediff of fileDiffs) { + const diffRanges = getDiffRanges(filediff, logger); + if (diffRanges === void 0) { + return void 0; + } + results.push(...diffRanges); + } + return results; +} +async function getFileDiffsWithBasehead(branches, logger) { + const repositoryNwo = getRepositoryNwoFromEnv( + "CODE_SCANNING_REPOSITORY", + "GITHUB_REPOSITORY" + ); + const basehead = `${branches.base}...${branches.head}`; + try { + const response = await getApiClient().rest.repos.compareCommitsWithBasehead( + { + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + basehead, + per_page: 1 + } + ); + logger.debug( + `Response from compareCommitsWithBasehead(${basehead}): +${JSON.stringify(response, null, 2)}` + ); + return response.data.files; + } catch (error2) { + if (error2.status) { + logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); + logger.debug( + `Error running compareCommitsWithBasehead(${basehead}): +Request: ${JSON.stringify(error2.request, null, 2)} +Error Response: ${JSON.stringify(error2.response, null, 2)}` + ); + return void 0; + } else { + throw error2; + } + } +} +function getDiffRanges(fileDiff, logger) { + const filename = path10.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path10.sep, "/"); + if (fileDiff.patch === void 0) { + if (fileDiff.changes === 0) { + return []; + } + return [ + { + path: filename, + startLine: 0, + endLine: 0 + } + ]; + } + let currentLine = 0; + let additionRangeStartLine = void 0; + const diffRanges = []; + const diffLines = fileDiff.patch.split("\n"); + diffLines.push(" "); + for (const diffLine of diffLines) { + if (diffLine.startsWith("-")) { + continue; + } + if (diffLine.startsWith("+")) { + if (additionRangeStartLine === void 0) { + additionRangeStartLine = currentLine; + } + currentLine++; + continue; + } + if (additionRangeStartLine !== void 0) { + diffRanges.push({ + path: filename, + startLine: additionRangeStartLine, + endLine: currentLine - 1 + }); + additionRangeStartLine = void 0; + } + if (diffLine.startsWith("@@ ")) { + const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); + if (match === null) { + logger.warning( + `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` + ); + return void 0; + } + currentLine = parseInt(match[1], 10); + continue; + } + if (diffLine.startsWith(" ")) { + currentLine++; + continue; + } + } + return diffRanges; +} // src/languages.ts var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { @@ -87456,8 +87583,8 @@ var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { })(KnownLanguage || {}); // src/trap-caching.ts -var fs8 = __toESM(require("fs")); -var path10 = __toESM(require("path")); +var fs9 = __toESM(require("fs")); +var path11 = __toESM(require("path")); var actionsCache2 = __toESM(require_cache3()); var CACHE_VERSION2 = 1; var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap"; @@ -87473,13 +87600,13 @@ async function downloadTrapCaches(codeql, languages, logger) { `Found ${languagesSupportingCaching.length} languages that support TRAP caching` ); if (languagesSupportingCaching.length === 0) return result; - const cachesDir = path10.join( + const cachesDir = path11.join( getTemporaryDirectory(), "trapCaches" ); for (const language of languagesSupportingCaching) { - const cacheDir = path10.join(cachesDir, language); - fs8.mkdirSync(cacheDir, { recursive: true }); + const cacheDir = path11.join(cachesDir, language); + fs9.mkdirSync(cacheDir, { recursive: true }); result[language] = cacheDir; } if (await isAnalyzingDefaultBranch()) { @@ -87491,7 +87618,7 @@ async function downloadTrapCaches(codeql, languages, logger) { let baseSha = "unknown"; const eventPath = process.env.GITHUB_EVENT_PATH; if (getWorkflowEventName() === "pull_request" && eventPath !== void 0) { - const event = JSON.parse(fs8.readFileSync(path10.resolve(eventPath), "utf-8")); + const event = JSON.parse(fs9.readFileSync(path11.resolve(eventPath), "utf-8")); baseSha = event.pull_request?.base?.sha || baseSha; } for (const language of languages) { @@ -87593,9 +87720,9 @@ async function getSupportedLanguageMap(codeql, features, logger) { } var baseWorkflowsPath = ".github/workflows"; function hasActionsWorkflows(sourceRoot) { - const workflowsPath = path11.resolve(sourceRoot, baseWorkflowsPath); - const stats = fs9.lstatSync(workflowsPath, { throwIfNoEntry: false }); - return stats !== void 0 && stats.isDirectory() && fs9.readdirSync(workflowsPath).length > 0; + const workflowsPath = path12.resolve(sourceRoot, baseWorkflowsPath); + const stats = fs10.lstatSync(workflowsPath, { throwIfNoEntry: false }); + return stats !== void 0 && stats.isDirectory() && fs10.readdirSync(workflowsPath).length > 0; } async function getRawLanguagesInRepo(repository, sourceRoot, logger) { logger.debug( @@ -87765,8 +87892,8 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg async function loadUserConfig(configFile, workspacePath, apiDetails, tempDir) { if (isLocal(configFile)) { if (configFile !== userConfigFromActionPath(tempDir)) { - configFile = path11.resolve(workspacePath, configFile); - if (!(configFile + path11.sep).startsWith(workspacePath + path11.sep)) { + configFile = path12.resolve(workspacePath, configFile); + if (!(configFile + path12.sep).startsWith(workspacePath + path12.sep)) { throw new ConfigurationError( getConfigFileOutsideWorkspaceErrorMessage(configFile) ); @@ -87895,10 +88022,10 @@ async function getOverlayDatabaseMode(codeql, repository, features, languages, s }; } function dbLocationOrDefault(dbLocation, tempDir) { - return dbLocation || path11.resolve(tempDir, "codeql_databases"); + return dbLocation || path12.resolve(tempDir, "codeql_databases"); } function userConfigFromActionPath(tempDir) { - return path11.resolve(tempDir, "user-config-from-action.yml"); + return path12.resolve(tempDir, "user-config-from-action.yml"); } function hasQueryCustomisation(userConfig) { return isDefined(userConfig["disable-default-queries"]) || isDefined(userConfig.queries) || isDefined(userConfig["query-filters"]); @@ -87912,7 +88039,7 @@ async function initConfig(inputs) { ); } inputs.configFile = userConfigFromActionPath(tempDir); - fs9.writeFileSync(inputs.configFile, inputs.configInput); + fs10.writeFileSync(inputs.configFile, inputs.configInput); logger.debug(`Using config from action input: ${inputs.configFile}`); } let userConfig = {}; @@ -87987,12 +88114,12 @@ function isLocal(configPath) { return configPath.indexOf("@") === -1; } function getLocalConfig(configFile) { - if (!fs9.existsSync(configFile)) { + if (!fs10.existsSync(configFile)) { throw new ConfigurationError( getConfigFileDoesNotExistErrorMessage(configFile) ); } - return load(fs9.readFileSync(configFile, "utf8")); + return load(fs10.readFileSync(configFile, "utf8")); } async function getRemoteConfig(configFile, apiDetails) { const format = new RegExp( @@ -88027,13 +88154,13 @@ async function getRemoteConfig(configFile, apiDetails) { ); } function getPathToParsedConfigFile(tempDir) { - return path11.join(tempDir, "config"); + return path12.join(tempDir, "config"); } async function saveConfig(config, logger) { const configString = JSON.stringify(config); const configFile = getPathToParsedConfigFile(config.tempDir); - fs9.mkdirSync(path11.dirname(configFile), { recursive: true }); - fs9.writeFileSync(configFile, configString, "utf8"); + fs10.mkdirSync(path12.dirname(configFile), { recursive: true }); + fs10.writeFileSync(configFile, configString, "utf8"); logger.debug("Saved config:"); logger.debug(configString); } @@ -88043,9 +88170,9 @@ async function generateRegistries(registriesInput, tempDir, logger) { let qlconfigFile; if (registries) { const qlconfig = createRegistriesBlock(registries); - qlconfigFile = path11.join(tempDir, "qlconfig.yml"); + qlconfigFile = path12.join(tempDir, "qlconfig.yml"); const qlconfigContents = dump(qlconfig); - fs9.writeFileSync(qlconfigFile, qlconfigContents, "utf8"); + fs10.writeFileSync(qlconfigFile, qlconfigContents, "utf8"); logger.debug("Generated qlconfig.yml:"); logger.debug(qlconfigContents); registriesAuthTokens = registries.map((registry) => `${registry.url}=${registry.token}`).join(","); @@ -88323,14 +88450,14 @@ function flushDiagnostics(config) { } // src/init.ts -var fs15 = __toESM(require("fs")); -var path17 = __toESM(require("path")); +var fs16 = __toESM(require("fs")); +var path18 = __toESM(require("path")); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); // src/codeql.ts -var fs14 = __toESM(require("fs")); -var path16 = __toESM(require("path")); +var fs15 = __toESM(require("fs")); +var path17 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -88611,15 +88738,15 @@ function wrapCliConfigurationError(cliError) { } // src/setup-codeql.ts -var fs12 = __toESM(require("fs")); -var path14 = __toESM(require("path")); +var fs13 = __toESM(require("fs")); +var path15 = __toESM(require("path")); var toolcache3 = __toESM(require_tool_cache()); var import_fast_deep_equal = __toESM(require_fast_deep_equal()); var semver7 = __toESM(require_semver2()); // src/tar.ts var import_child_process = require("child_process"); -var fs10 = __toESM(require("fs")); +var fs11 = __toESM(require("fs")); var stream = __toESM(require("stream")); var import_toolrunner = __toESM(require_toolrunner()); var io4 = __toESM(require_io()); @@ -88692,7 +88819,7 @@ async function isZstdAvailable(logger) { } } async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs10.mkdirSync(dest, { recursive: true }); + fs11.mkdirSync(dest, { recursive: true }); switch (compressionMethod) { case "gzip": return await toolcache.extractTar(tarPath, dest); @@ -88776,9 +88903,9 @@ function inferCompressionMethod(tarPath) { } // src/tools-download.ts -var fs11 = __toESM(require("fs")); +var fs12 = __toESM(require("fs")); var os3 = __toESM(require("os")); -var path13 = __toESM(require("path")); +var path14 = __toESM(require("path")); var import_perf_hooks2 = require("perf_hooks"); var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); @@ -88883,7 +89010,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs11.mkdirSync(dest, { recursive: true }); + fs12.mkdirSync(dest, { recursive: true }); const agent = new import_http_client.HttpClient().getAgent(codeqlURL); headers = Object.assign( { "User-Agent": "CodeQL Action" }, @@ -88911,7 +89038,7 @@ async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorizatio await extractTarZst(response, dest, tarVersion, logger); } function getToolcacheDirectory(version) { - return path13.join( + return path14.join( getRequiredEnvParam("RUNNER_TOOL_CACHE"), TOOLCACHE_TOOL_NAME, semver6.clean(version) || version, @@ -88920,7 +89047,7 @@ function getToolcacheDirectory(version) { } function writeToolcacheMarkerFile(extractedPath, logger) { const markerFilePath = `${extractedPath}.complete`; - fs11.writeFileSync(markerFilePath, ""); + fs12.writeFileSync(markerFilePath, ""); logger.info(`Created toolcache marker file ${markerFilePath}`); } function sanitizeUrlForStatusReport(url) { @@ -89055,7 +89182,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) { const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ folder: toolcache3.find("CodeQL", version), version - })).filter(({ folder }) => fs12.existsSync(path14.join(folder, "pinned-version"))); + })).filter(({ folder }) => fs13.existsSync(path15.join(folder, "pinned-version"))); if (candidates.length === 1) { const candidate = candidates[0]; logger.debug( @@ -89418,7 +89545,7 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) { ); } function getTempExtractionDir(tempDir) { - return path14.join(tempDir, v4_default()); + return path15.join(tempDir, v4_default()); } async function getNightlyToolsUrl(logger) { const zstdAvailability = await isZstdAvailable(logger); @@ -89466,8 +89593,8 @@ function isReservedToolsValue(tools) { } // src/tracer-config.ts -var fs13 = __toESM(require("fs")); -var path15 = __toESM(require("path")); +var fs14 = __toESM(require("fs")); +var path16 = __toESM(require("path")); async function shouldEnableIndirectTracing(codeql, config) { if (config.buildMode === "none" /* None */) { return false; @@ -89479,8 +89606,8 @@ async function shouldEnableIndirectTracing(codeql, config) { } async function getTracerConfigForCluster(config) { const tracingEnvVariables = JSON.parse( - fs13.readFileSync( - path15.resolve( + fs14.readFileSync( + path16.resolve( config.dbLocation, "temp/tracingEnvironment/start-tracing.json" ), @@ -89527,7 +89654,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV toolsDownloadStatusReport )}` ); - let codeqlCmd = path16.join(codeqlFolder, "codeql", "codeql"); + let codeqlCmd = path17.join(codeqlFolder, "codeql", "codeql"); if (process.platform === "win32") { codeqlCmd += ".exe"; } else if (process.platform !== "linux" && process.platform !== "darwin") { @@ -89583,12 +89710,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path16.join( + const tracingConfigPath = path17.join( extractorPath, "tools", "tracing-config.lua" ); - return fs14.existsSync(tracingConfigPath); + return fs15.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -89659,7 +89786,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path16.join( + const autobuildCmd = path17.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -90049,7 +90176,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs14.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs15.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -90072,7 +90199,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path16.resolve(config.tempDir, "user-config.yaml"); + return path17.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -90126,7 +90253,7 @@ async function initConfig2(inputs) { }); } async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) { - fs15.mkdirSync(config.dbLocation, { recursive: true }); + fs16.mkdirSync(config.dbLocation, { recursive: true }); await wrapEnvironment( databaseInitEnvironment, async () => await codeql.databaseInitCluster( @@ -90161,25 +90288,25 @@ async function checkPacksForOverlayCompatibility(codeql, config, logger) { } function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) { try { - let qlpackPath = path17.join(packDir, "qlpack.yml"); - if (!fs15.existsSync(qlpackPath)) { - qlpackPath = path17.join(packDir, "codeql-pack.yml"); + let qlpackPath = path18.join(packDir, "qlpack.yml"); + if (!fs16.existsSync(qlpackPath)) { + qlpackPath = path18.join(packDir, "codeql-pack.yml"); } const qlpackContents = load( - fs15.readFileSync(qlpackPath, "utf8") + fs16.readFileSync(qlpackPath, "utf8") ); if (!qlpackContents.buildMetadata) { return true; } - const packInfoPath = path17.join(packDir, ".packinfo"); - if (!fs15.existsSync(packInfoPath)) { + const packInfoPath = path18.join(packDir, ".packinfo"); + if (!fs16.existsSync(packInfoPath)) { logger.warning( `The query pack at ${packDir} does not have a .packinfo file, so it cannot support overlay analysis. Recompiling the query pack with the latest CodeQL CLI should solve this problem.` ); return false; } const packInfoFileContents = JSON.parse( - fs15.readFileSync(packInfoPath, "utf8") + fs16.readFileSync(packInfoPath, "utf8") ); const packOverlayVersion = packInfoFileContents.overlayVersion; if (typeof packOverlayVersion !== "number") { @@ -90204,7 +90331,7 @@ function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) } async function checkInstallPython311(languages, codeql) { if (languages.includes("python" /* python */) && process.platform === "win32" && !(await codeql.getVersion()).features?.supportsPython312) { - const script = path17.resolve( + const script = path18.resolve( __dirname, "../python-setup", "check_python12.ps1" @@ -90214,8 +90341,8 @@ async function checkInstallPython311(languages, codeql) { ]).exec(); } } -function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs15.rmSync) { - if (fs15.existsSync(config.dbLocation) && (fs15.statSync(config.dbLocation).isFile() || fs15.readdirSync(config.dbLocation).length > 0)) { +function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs16.rmSync) { + if (fs16.existsSync(config.dbLocation) && (fs16.statSync(config.dbLocation).isFile() || fs16.readdirSync(config.dbLocation).length > 0)) { if (!options.disableExistingDirectoryWarning) { logger.warning( `The database cluster directory ${config.dbLocation} must be empty. Attempting to clean it up.` @@ -90472,8 +90599,8 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config } // src/workflow.ts -var fs16 = __toESM(require("fs")); -var path18 = __toESM(require("path")); +var fs17 = __toESM(require("fs")); +var path19 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); var core12 = __toESM(require_core()); function toCodedErrors(errors) { @@ -90624,15 +90751,15 @@ async function getWorkflow(logger) { ); } const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs16.readFileSync(workflowPath, "utf-8")); + return load(fs17.readFileSync(workflowPath, "utf-8")); } async function getWorkflowAbsolutePath(logger) { const relativePath = await getWorkflowRelativePath(); - const absolutePath = path18.join( + const absolutePath = path19.join( getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath ); - if (fs16.existsSync(absolutePath)) { + if (fs17.existsSync(absolutePath)) { logger.debug( `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` ); @@ -90729,7 +90856,7 @@ async function run() { core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); const configFile = getOptionalInput("config-file"); - const sourceRoot = path19.resolve( + const sourceRoot = path20.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), getOptionalInput("source-root") || "" ); @@ -90827,6 +90954,7 @@ async function run() { logger }); await checkInstallPython311(config.languages, codeql); + await computeAndPersistDiffRangesEarly(codeql, features, logger); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); core13.setFailed(error2.message); @@ -90908,21 +91036,21 @@ async function run() { )) { try { logger.debug(`Applying static binary workaround for Go`); - const tempBinPath = path19.resolve( + const tempBinPath = path20.resolve( getTemporaryDirectory(), "codeql-action-go-tracing", "bin" ); - fs17.mkdirSync(tempBinPath, { recursive: true }); + fs18.mkdirSync(tempBinPath, { recursive: true }); core13.addPath(tempBinPath); - const goWrapperPath = path19.resolve(tempBinPath, "go"); - fs17.writeFileSync( + const goWrapperPath = path20.resolve(tempBinPath, "go"); + fs18.writeFileSync( goWrapperPath, `#!/bin/bash exec ${goBinaryPath} "$@"` ); - fs17.chmodSync(goWrapperPath, "755"); + fs18.chmodSync(goWrapperPath, "755"); core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( @@ -91118,6 +91246,39 @@ exec ${goBinaryPath} "$@"` logger ); } +async function computeAndPersistDiffRangesEarly(codeql, features, logger) { + try { + await withGroupAsync("Compute PR diff ranges", async () => { + const branches = await getDiffInformedAnalysisBranches( + codeql, + features, + logger + ); + if (!branches) { + logger.debug( + "Diff-informed analysis not enabled (feature flag or context); skipping diff range computation." + ); + return; + } + const ranges = await getPullRequestEditedDiffRanges(branches, logger); + if (ranges === void 0) { + logger.info( + "Diff ranges unavailable (API limits, truncation, or error); will fall back to analyze-time computation." + ); + return; + } + writeDiffRangesJsonFile(logger, ranges); + const distinctFiles = new Set(ranges.map((r) => r.path)).size; + logger.info( + `Persisted ${ranges.length} diff range(s) across ${distinctFiles} file(s) for reuse during analyze step.` + ); + }); + } catch (e) { + logger.warning( + `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}` + ); + } +} function getTrapCachingEnabled() { const trapCaching = getOptionalInput("trap-caching"); if (trapCaching !== void 0) return trapCaching === "true"; diff --git a/src/init-action.ts b/src/init-action.ts index 114ad6cab1..b140b34a01 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -45,7 +45,7 @@ import { runDatabaseInitCluster, } from "./init"; import { KnownLanguage } from "./languages"; -import { getActionsLogger, Logger } from "./logging"; +import { getActionsLogger, Logger, withGroupAsync } from "./logging"; import { downloadOverlayBaseDatabaseFromCache, OverlayBaseDatabaseDownloadStats, @@ -66,6 +66,11 @@ import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; import { ToolsFeature } from "./tools-features"; import { getCombinedTracerConfig } from "./tracer-config"; +import { + getPullRequestEditedDiffRanges, + writeDiffRangesJsonFile, + getDiffInformedAnalysisBranches, +} from "./diff-informed-analysis-utils"; import { checkDiskUsage, checkForTimeout, @@ -336,6 +341,8 @@ async function run() { }); await checkInstallPython311(config.languages, codeql); + + await computeAndPersistDiffRangesEarly(codeql, features, logger); } catch (unwrappedError) { const error = wrapError(unwrappedError); core.setFailed(error.message); @@ -748,6 +755,43 @@ async function run() { ); } +/** + * Compute and persist diff ranges early during init when diff-informed analysis + * is enabled (feature flag + PR context). This writes the standard pr-diff-range.json + * file for later reuse in the analyze step. Failures are logged but non-fatal. + */ +async function computeAndPersistDiffRangesEarly( + codeql: CodeQL, + features: Features, + logger: Logger, +): Promise { + try { + await withGroupAsync("Compute PR diff ranges", async () => { + const branches = await getDiffInformedAnalysisBranches( + codeql, + features, + logger, + ); + if (!branches) { + return; + } + const ranges = await getPullRequestEditedDiffRanges(branches, logger); + if (ranges === undefined) { + return; + } + writeDiffRangesJsonFile(logger, ranges); + const distinctFiles = new Set(ranges.map((r) => r.path)).size; + logger.info( + `Persisted ${ranges.length} diff range(s) across ${distinctFiles} file(s) for reuse during analyze step.`, + ); + }); + } catch (e) { + logger.warning( + `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}`, + ); + } +} + function getTrapCachingEnabled(): boolean { // If the workflow specified something always respect that const trapCaching = getOptionalInput("trap-caching"); From 4c0acfe29e5029e945654d707e00c54ec87d0557 Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Wed, 8 Oct 2025 17:57:03 +0100 Subject: [PATCH 3/6] Consume precomputed diff ranges in analyze and avoid getting them from the API. --- lib/analyze-action.js | 188 +++++------------------------------------- lib/init-action.js | 6 -- src/analyze-action.ts | 11 +-- src/analyze.ts | 37 ++++++--- 4 files changed, 45 insertions(+), 197 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index be30d1376e..6981690e10 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -90112,29 +90112,6 @@ var persistInputs = function() { ); core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; -function getPullRequestBranches() { - const pullRequest = github.context.payload.pull_request; - if (pullRequest) { - return { - base: pullRequest.base.ref, - // We use the head label instead of the head ref here, because the head - // ref lacks owner information and by itself does not uniquely identify - // the head branch (which may be in a forked repository). - head: pullRequest.head.label - }; - } - const codeScanningRef = process.env.CODE_SCANNING_REF; - const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH; - if (codeScanningRef && codeScanningBaseBranch) { - return { - base: codeScanningBaseBranch, - // PR analysis under Default Setup analyzes the PR head commit instead of - // the merge commit, so we can use the provided ref directly. - head: codeScanningRef - }; - } - return void 0; -} var qualityCategoryMapping = { "c#": "csharp", cpp: "c-cpp", @@ -91534,34 +91511,9 @@ var GitHubFeatureFlags = class { }; // src/diff-informed-analysis-utils.ts -async function getDiffInformedAnalysisBranches(codeql, features, logger) { - if (!await features.getValue("diff_informed_queries" /* DiffInformedQueries */, codeql)) { - return void 0; - } - const gitHubVersion = await getGitHubVersion(); - if (gitHubVersion.type === 1 /* GHES */ && satisfiesGHESVersion(gitHubVersion.version, "<3.19", true)) { - return void 0; - } - const branches = getPullRequestBranches(); - if (!branches) { - logger.info( - "Not performing diff-informed analysis because we are not analyzing a pull request." - ); - } - return branches; -} function getDiffRangesJsonFilePath() { return path9.join(getTemporaryDirectory(), "pr-diff-range.json"); } -function writeDiffRangesJsonFile(logger, ranges) { - const jsonContents = JSON.stringify(ranges, null, 2); - const jsonFilePath = getDiffRangesJsonFilePath(); - fs8.writeFileSync(jsonFilePath, jsonContents); - logger.debug( - `Wrote pr-diff-range JSON file to ${jsonFilePath}: -${jsonContents}` - ); -} function readDiffRangesJsonFile(logger) { const jsonFilePath = getDiffRangesJsonFilePath(); if (!fs8.existsSync(jsonFilePath)) { @@ -91575,117 +91527,6 @@ ${jsonContents}` ); return JSON.parse(jsonContents); } -async function getPullRequestEditedDiffRanges(branches, logger) { - const fileDiffs = await getFileDiffsWithBasehead(branches, logger); - if (fileDiffs === void 0) { - return void 0; - } - if (fileDiffs.length >= 300) { - logger.warning( - `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` - ); - return void 0; - } - const results = []; - for (const filediff of fileDiffs) { - const diffRanges = getDiffRanges(filediff, logger); - if (diffRanges === void 0) { - return void 0; - } - results.push(...diffRanges); - } - return results; -} -async function getFileDiffsWithBasehead(branches, logger) { - const repositoryNwo = getRepositoryNwoFromEnv( - "CODE_SCANNING_REPOSITORY", - "GITHUB_REPOSITORY" - ); - const basehead = `${branches.base}...${branches.head}`; - try { - const response = await getApiClient().rest.repos.compareCommitsWithBasehead( - { - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - basehead, - per_page: 1 - } - ); - logger.debug( - `Response from compareCommitsWithBasehead(${basehead}): -${JSON.stringify(response, null, 2)}` - ); - return response.data.files; - } catch (error2) { - if (error2.status) { - logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); - logger.debug( - `Error running compareCommitsWithBasehead(${basehead}): -Request: ${JSON.stringify(error2.request, null, 2)} -Error Response: ${JSON.stringify(error2.response, null, 2)}` - ); - return void 0; - } else { - throw error2; - } - } -} -function getDiffRanges(fileDiff, logger) { - const filename = path9.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path9.sep, "/"); - if (fileDiff.patch === void 0) { - if (fileDiff.changes === 0) { - return []; - } - return [ - { - path: filename, - startLine: 0, - endLine: 0 - } - ]; - } - let currentLine = 0; - let additionRangeStartLine = void 0; - const diffRanges = []; - const diffLines = fileDiff.patch.split("\n"); - diffLines.push(" "); - for (const diffLine of diffLines) { - if (diffLine.startsWith("-")) { - continue; - } - if (diffLine.startsWith("+")) { - if (additionRangeStartLine === void 0) { - additionRangeStartLine = currentLine; - } - currentLine++; - continue; - } - if (additionRangeStartLine !== void 0) { - diffRanges.push({ - path: filename, - startLine: additionRangeStartLine, - endLine: currentLine - 1 - }); - additionRangeStartLine = void 0; - } - if (diffLine.startsWith("@@ ")) { - const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); - if (match === null) { - logger.warning( - `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` - ); - return void 0; - } - currentLine = parseInt(match[1], 10); - continue; - } - if (diffLine.startsWith(" ")) { - currentLine++; - continue; - } - } - return diffRanges; -} // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -93747,14 +93588,29 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag, trap_import_duration_ms: Math.round(trapImportTime) }; } -async function setupDiffInformedQueryRun(branches, logger) { +async function setupDiffInformedQueryRun(logger) { return await withGroupAsync( "Generating diff range extension pack", async () => { + let diffRanges; + try { + diffRanges = readDiffRangesJsonFile(logger); + } catch (e) { + logger.debug( + `Failed to read precomputed diff ranges: ${getErrorMessage(e)}` + ); + diffRanges = void 0; + } + if (diffRanges === void 0) { + logger.info( + "No precomputed diff ranges found; skipping diff-informed analysis stage." + ); + return void 0; + } + const fileCount = new Set(diffRanges.filter((r) => r.path).map((r) => r.path)).size; logger.info( - `Calculating diff ranges for ${branches.base}...${branches.head}` + `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).` ); - const diffRanges = await getPullRequestEditedDiffRanges(branches, logger); const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); if (packDir === void 0) { logger.warning( @@ -93817,7 +93673,6 @@ extensions: `Wrote pr-diff-range extension pack to ${extensionFilePath}: ${extensionContents}` ); - writeDiffRangesJsonFile(logger, ranges); return diffRangeDir; } var defaultSuites = /* @__PURE__ */ new Set([ @@ -96206,12 +96061,7 @@ async function run() { getOptionalInput("ram") || process.env["CODEQL_RAM"], logger ); - const branches = await getDiffInformedAnalysisBranches( - codeql, - features, - logger - ); - const diffRangePackDir = branches ? await setupDiffInformedQueryRun(branches, logger) : void 0; + const diffRangePackDir = await setupDiffInformedQueryRun(logger); await warnIfGoInstalledAfterInit(config, logger); await runAutobuildIfLegacyGoWorkflow(config, logger); dbCreationTimings = await runFinalize( diff --git a/lib/init-action.js b/lib/init-action.js index 24a459a5bc..445977d382 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -91255,16 +91255,10 @@ async function computeAndPersistDiffRangesEarly(codeql, features, logger) { logger ); if (!branches) { - logger.debug( - "Diff-informed analysis not enabled (feature flag or context); skipping diff range computation." - ); return; } const ranges = await getPullRequestEditedDiffRanges(branches, logger); if (ranges === void 0) { - logger.info( - "Diff ranges unavailable (API limits, truncation, or error); will fall back to analyze-time computation." - ); return; } writeDiffRangesJsonFile(logger, ranges); diff --git a/src/analyze-action.ts b/src/analyze-action.ts index 3d0fb1c89e..864063af89 100644 --- a/src/analyze-action.ts +++ b/src/analyze-action.ts @@ -30,7 +30,6 @@ import { DependencyCacheUploadStatusReport, uploadDependencyCaches, } from "./dependency-caching"; -import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { Feature, Features } from "./feature-flags"; import { KnownLanguage } from "./languages"; @@ -299,14 +298,8 @@ async function run() { logger, ); - const branches = await getDiffInformedAnalysisBranches( - codeql, - features, - logger, - ); - const diffRangePackDir = branches - ? await setupDiffInformedQueryRun(branches, logger) - : undefined; + // Setup diff informed analysis if needed (based on whether init created the file) + const diffRangePackDir = await setupDiffInformedQueryRun(logger); await warnIfGoInstalledAfterInit(config, logger); await runAutobuildIfLegacyGoWorkflow(config, logger); diff --git a/src/analyze.ts b/src/analyze.ts index b3dc31a650..a9daa82340 100644 --- a/src/analyze.ts +++ b/src/analyze.ts @@ -6,7 +6,7 @@ import * as io from "@actions/io"; import * as del from "del"; import * as yaml from "js-yaml"; -import { getTemporaryDirectory, PullRequestBranches } from "./actions-util"; +import { getTemporaryDirectory } from "./actions-util"; import * as analyses from "./analyses"; // (getApiClient import removed; no longer needed after diff refactor) import { setupCppAutobuild } from "./autobuild"; @@ -14,11 +14,7 @@ import { type CodeQL } from "./codeql"; import * as configUtils from "./config-utils"; import { getJavaTempDependencyDir } from "./dependency-caching"; import { addDiagnostic, makeDiagnostic } from "./diagnostics"; -import { - DiffThunkRange, - writeDiffRangesJsonFile, - getPullRequestEditedDiffRanges, -} from "./diff-informed-analysis-utils"; +import { DiffThunkRange, readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { FeatureEnablement, Feature } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; @@ -284,17 +280,35 @@ async function finalizeDatabaseCreation( * the diff range information, or `undefined` if the feature is disabled. */ export async function setupDiffInformedQueryRun( - branches: PullRequestBranches, logger: Logger, ): Promise { return await withGroupAsync( "Generating diff range extension pack", async () => { + // Only use precomputed diff ranges; never recompute here. + let diffRanges: DiffThunkRange[] | undefined; + try { + diffRanges = readDiffRangesJsonFile(logger); + } catch (e) { + logger.debug( + `Failed to read precomputed diff ranges: ${util.getErrorMessage(e)}`, + ); + diffRanges = undefined; + } + + if (diffRanges === undefined) { + logger.info( + "No precomputed diff ranges found; skipping diff-informed analysis stage.", + ); + return undefined; + } + + const fileCount = new Set(diffRanges.filter((r) => r.path).map((r) => r.path)).size; logger.info( - `Calculating diff ranges for ${branches.base}...${branches.head}`, + `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).`, ); - const diffRanges = await getPullRequestEditedDiffRanges(branches, logger); - const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); + + const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); if (packDir === undefined) { logger.warning( "Cannot create diff range extension pack for diff-informed queries; " + @@ -392,9 +406,6 @@ extensions: `Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`, ); - // Write the diff ranges to a JSON file, for action-side alert filtering by the - // upload-lib module. - writeDiffRangesJsonFile(logger, ranges); return diffRangeDir; } From 6a32dd9374b20730c7228550f05c5e8f0e29fbd8 Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Wed, 8 Oct 2025 18:17:40 +0100 Subject: [PATCH 4/6] Include diff-edidted files in overlay changes. --- lib/analyze-action-post.js | 1355 +++++++++++++++-------------- lib/analyze-action.js | 59 +- lib/autobuild-action.js | 487 ++++++----- lib/init-action-post.js | 103 ++- lib/init-action.js | 72 +- lib/resolve-environment-action.js | 477 +++++----- lib/upload-lib.js | 59 +- lib/upload-sarif-action.js | 125 ++- src/overlay-database-utils.ts | 54 +- 9 files changed, 1649 insertions(+), 1142 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 19c987905e..6a58c47db5 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var os = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs7.existsSync(filePath)) { + if (!fs8.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs7.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { + fs8.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path6 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path7 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path6 && !path6.startsWith("/")) { - path6 = `/${path6}`; + if (path7 && !path7.startsWith("/")) { + path7 = `/${path7}`; } - url = new URL(origin + path6); + url = new URL(origin + path7); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path6) { - if (typeof path6 !== "string") { + module2.exports = function basename(path7) { + if (typeof path7 !== "string") { return ""; } - for (var i = path6.length - 1; i >= 0; --i) { - switch (path6.charCodeAt(i)) { + for (var i = path7.length - 1; i >= 0; --i) { + switch (path7.charCodeAt(i)) { case 47: // '/' case 92: - path6 = path6.slice(i + 1); - return path6 === ".." || path6 === "." ? "" : path6; + path7 = path7.slice(i + 1); + return path7 === ".." || path7 === "." ? "" : path7; } } - return path6 === ".." || path6 === "." ? "" : path6; + return path7 === ".." || path7 === "." ? "" : path7; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path6, + path: path7, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path6 !== "string") { + if (typeof path7 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path6[0] !== "/" && !(path6.startsWith("http://") || path6.startsWith("https://")) && method !== "CONNECT") { + } else if (path7[0] !== "/" && !(path7.startsWith("http://") || path7.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path6) !== null) { + } else if (invalidPathRegex.exec(path7) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path6, query) : path6; + this.path = query ? util.buildURL(path7, query) : path7; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path6 = search ? `${pathname}${search}` : pathname; + const path7 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path6; + this.opts.path = path7; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path6, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path7, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path6} HTTP/1.1\r + let header = `${method} ${path7} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path6, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path7, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path6; + headers[HTTP2_HEADER_PATH] = path7; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path6) { - if (typeof path6 !== "string") { - return path6; + function safeUrl(path7) { + if (typeof path7 !== "string") { + return path7; } - const pathSegments = path6.split("?"); + const pathSegments = path7.split("?"); if (pathSegments.length !== 2) { - return path6; + return path7; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path6, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path6); + function matchKey(mockDispatch2, { path: path7, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path7); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path6 }) => matchValue(safeUrl(path6), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path7 }) => matchValue(safeUrl(path7), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path6, method, body, headers, query } = opts; + const { path: path7, method, body, headers, query } = opts; return { - path: path6, + path: path7, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path6, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path7, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path6, + Path: path7, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path6) { - for (const char of path6) { + function validateCookiePath(path7) { + for (const char of path7) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path6 = opts.path; + let path7 = opts.path; if (!opts.path.startsWith("/")) { - path6 = `/${path6}`; + path7 = `/${path7}`; } - url = new URL(util.parseOrigin(url).origin + path6); + url = new URL(util.parseOrigin(url).origin + path7); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path6.sep); + return pth.replace(/[/\\]/g, path7.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs7 = __importStar4(require("fs")); - var path6 = __importStar4(require("path")); - _a = fs7.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs8 = __importStar4(require("fs")); + var path7 = __importStar4(require("path")); + _a = fs8.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs7.constants.O_RDONLY; + exports2.READONLY = fs8.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path6.extname(filePath).toUpperCase(); + const upperExt = path7.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path6.dirname(filePath); - const upperName = path6.basename(filePath).toUpperCase(); + const directory = path7.dirname(filePath); + const upperName = path7.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path6.join(directory, actualName); + filePath = path7.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path6.join(dest, path6.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path7.join(dest, path7.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path6.relative(source, newDest) === "") { + if (path7.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path6.join(dest, path6.basename(source)); + dest = path7.join(dest, path7.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path6.dirname(dest)); + yield mkdirP(path7.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path6.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path7.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path6.sep)) { + if (tool.includes(path7.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path6.delimiter)) { + for (const p of process.env.PATH.split(path7.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path6.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path7.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var io6 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path6.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path7.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io6.which(this.toolPath, true); return new Promise((resolve5, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path6.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path7.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path6 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); + const path7 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path6.dirname(p); + let result = path7.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path6.sep; + root += path7.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path6.sep)) { + if (!p.endsWith(path7.sep)) { return p; } - if (p === path6.sep) { + if (p === path7.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path6 = (function() { + var path7 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path6.sep; + minimatch.sep = path7.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path6.sep !== "/") { - pattern = pattern.split(path6.sep).join("/"); + if (!options.allowWindowsEscape && path7.sep !== "/") { + pattern = pattern.split(path7.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path6.sep !== "/") { - f = f.split(path6.sep).join("/"); + if (path7.sep !== "/") { + f = f.split(path7.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path6.sep); + this.segments = itemPath.split(path7.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path6.basename(remaining); + const basename = path7.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path6.sep; + result += path7.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path6.sep}`; + if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path7.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { homedir = homedir || os.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path6, level) { - this.path = path6; + constructor(path7, level) { + this.path = path7; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs7.promises.lstat(searchPath)); + yield __await4(fs8.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs7.promises.stat(item.path); + stats = yield fs8.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs7.promises.lstat(item.path); + stats = yield fs8.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs7.promises.realpath(item.path); + const realPath = yield fs8.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar4(require_glob()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); - var path6 = __importStar4(require("path")); + var fs8 = __importStar4(require("fs")); + var path7 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path6.join(baseLocation, "actions", "temp"); + tempDirectory = path7.join(baseLocation, "actions", "temp"); } - const dest = path6.join(tempDirectory, crypto.randomUUID()); + const dest = path7.join(tempDirectory, crypto.randomUUID()); yield io6.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs7.statSync(filePath).size; + return fs8.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path6.relative(workspace, file).replace(new RegExp(`\\${path6.sep}`, "g"), "/"); + const relativeFile = path7.relative(workspace, file).replace(new RegExp(`\\${path7.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs7.unlink)(filePath); + return util.promisify(fs8.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs7.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs8.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path6 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path6.startsWith("/")) { - path6 = path6.substring(1); + let path7 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path7.startsWith("/")) { + path7 = path7.substring(1); } - if (isAbsoluteUrl(path6)) { - requestUrl = path6; + if (isAbsoluteUrl(path7)) { + requestUrl = path7; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path6); + requestUrl = appendPath(requestUrl, path7); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path6 = pathToAppend.substring(0, searchStart); + const path7 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path6; + newPath = newPath + path7; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs7 = require("fs"); + var fs8 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs7); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs8); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path6 = urlParsed.pathname; - path6 = path6 || "/"; - path6 = escape(path6); - urlParsed.pathname = path6; + let path7 = urlParsed.pathname; + path7 = path7 || "/"; + path7 = escape(path7); + urlParsed.pathname = path7; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path6 = urlParsed.pathname; - path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; - urlParsed.pathname = path6; + let path7 = urlParsed.pathname; + path7 = path7 ? path7.endsWith("/") ? `${path7}${name}` : `${path7}/${name}` : name; + urlParsed.pathname = path7; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path7 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path6}`; + canonicalizedResourceString += `/${this.factory.accountName}${path7}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path7 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path6}`; + canonicalizedResourceString += `/${options.accountName}${path7}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path6 = getURLPath(subRequest.url); - if (!path6 || path6 === "") { + const path7 = getURLPath(subRequest.url); + if (!path7 || path7 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path6 = getURLPath(url2); - if (path6 && path6 !== "/") { + const path7 = getURLPath(url2); + if (path7 && path7 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs7.createWriteStream(archivePath); + const writeStream = fs8.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs7.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs8.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs7.openSync(archivePath, "w"); + const fd = fs8.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs7.writeFileSync(fd, result); + fs8.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs7.closeSync(fd); + fs8.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs7.openSync(archivePath, "r"); + const fd = fs8.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs7.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs8.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs7.closeSync(fd); + fs8.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io6 = __importStar4(require_io()); var fs_1 = require("fs"); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path6.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path7.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os = require("os"); var cp = require("child_process"); - var fs7 = require("fs"); + var fs8 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs7.existsSync(lsbReleaseFile)) { - contents = fs7.readFileSync(lsbReleaseFile).toString(); - } else if (fs7.existsSync(osReleaseFile)) { - contents = fs7.readFileSync(osReleaseFile).toString(); + if (fs8.existsSync(lsbReleaseFile)) { + contents = fs8.readFileSync(lsbReleaseFile).toString(); + } else if (fs8.existsSync(osReleaseFile)) { + contents = fs8.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path6.join(_getTempDirectory(), crypto.randomUUID()); - yield io6.mkdirP(path6.dirname(dest)); + dest = dest || path7.join(_getTempDirectory(), crypto.randomUUID()); + yield io6.mkdirP(path7.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs7.existsSync(dest)) { + if (fs8.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs7.createWriteStream(dest)); + yield pipeline(readStream, fs8.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path6.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path7.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs7.statSync(sourceDir).isDirectory()) { + if (!fs8.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs7.readdirSync(sourceDir)) { - const s = path6.join(sourceDir, itemName); + for (const itemName of fs8.readdirSync(sourceDir)) { + const s = path7.join(sourceDir, itemName); yield io6.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source file: ${sourceFile}`); - if (!fs7.statSync(sourceFile).isFile()) { + if (!fs8.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path6.join(destFolder, targetFile); + const destPath = path7.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path6.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch); core14.debug(`checking cache: ${cachePath}`); - if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { + if (fs8.existsSync(cachePath) && fs8.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os.arch(); - const toolPath = path6.join(_getCacheDirectory(), toolName); - if (fs7.existsSync(toolPath)) { - const children = fs7.readdirSync(toolPath); + const toolPath = path7.join(_getCacheDirectory(), toolName); + if (fs8.existsSync(toolPath)) { + const children = fs8.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path6.join(toolPath, child, arch || ""); - if (fs7.existsSync(fullPath) && fs7.existsSync(`${fullPath}.complete`)) { + const fullPath = path7.join(toolPath, child, arch || ""); + if (fs8.existsSync(fullPath) && fs8.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path6.join(_getTempDirectory(), crypto.randomUUID()); + dest = path7.join(_getTempDirectory(), crypto.randomUUID()); } yield io6.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs7.writeFileSync(markerPath, ""); + fs8.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative, base) { - return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -76940,13 +76940,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.validateArtifactName = validateArtifactName; - function validateFilePath(path6) { - if (!path6) { + function validateFilePath(path7) { + if (!path7) { throw new Error(`Provided file path input during validation is empty`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path6.includes(invalidCharacterKey)) { - throw new Error(`The path for one of the files in artifact is not valid: ${path6}. Contains the following character: ${errorMessageForCharacter} + if (path7.includes(invalidCharacterKey)) { + throw new Error(`The path for one of the files in artifact is not valid: ${path7}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -77326,15 +77326,15 @@ var require_upload_zip_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadZipSpecification = exports2.validateRootDirectory = void 0; - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); function validateRootDirectory(rootDirectory) { - if (!fs7.existsSync(rootDirectory)) { + if (!fs8.existsSync(rootDirectory)) { throw new Error(`The provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs7.statSync(rootDirectory).isDirectory()) { + if (!fs8.statSync(rootDirectory).isDirectory()) { throw new Error(`The provided rootDirectory ${rootDirectory} is not a valid directory`); } (0, core_1.info)(`Root directory input is valid!`); @@ -77345,7 +77345,7 @@ var require_upload_zip_specification = __commonJS({ rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of filesToZip) { - const stats = fs7.lstatSync(file, { throwIfNoEntry: false }); + const stats = fs8.lstatSync(file, { throwIfNoEntry: false }); if (!stats) { throw new Error(`File ${file} does not exist`); } @@ -77811,8 +77811,8 @@ var require_minimatch2 = __commonJS({ return new Minimatch(pattern, options).match(p); }; module2.exports = minimatch; - var path6 = require_path(); - minimatch.sep = path6.sep; + var path7 = require_path(); + minimatch.sep = path7.sep; var GLOBSTAR = Symbol("globstar **"); minimatch.GLOBSTAR = GLOBSTAR; var expand = require_brace_expansion2(); @@ -78321,8 +78321,8 @@ var require_minimatch2 = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; const options = this.options; - if (path6.sep !== "/") { - f = f.split(path6.sep).join("/"); + if (path7.sep !== "/") { + f = f.split(path7.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -78360,13 +78360,13 @@ var require_minimatch2 = __commonJS({ var require_readdir_glob = __commonJS({ "node_modules/readdir-glob/index.js"(exports2, module2) { module2.exports = readdirGlob; - var fs7 = require("fs"); + var fs8 = require("fs"); var { EventEmitter } = require("events"); var { Minimatch } = require_minimatch2(); var { resolve: resolve5 } = require("path"); function readdir(dir, strict) { return new Promise((resolve6, reject) => { - fs7.readdir(dir, { withFileTypes: true }, (err, files) => { + fs8.readdir(dir, { withFileTypes: true }, (err, files) => { if (err) { switch (err.code) { case "ENOTDIR": @@ -78399,7 +78399,7 @@ var require_readdir_glob = __commonJS({ } function stat(file, followSymlinks) { return new Promise((resolve6, reject) => { - const statFunc = followSymlinks ? fs7.stat : fs7.lstat; + const statFunc = followSymlinks ? fs8.stat : fs8.lstat; statFunc(file, (err, stats) => { if (err) { switch (err.code) { @@ -78420,8 +78420,8 @@ var require_readdir_glob = __commonJS({ }); }); } - async function* exploreWalkAsync(dir, path6, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path6 + dir, strict); + async function* exploreWalkAsync(dir, path7, followSymlinks, useStat, shouldSkip, strict) { + let files = await readdir(path7 + dir, strict); for (const file of files) { let name = file.name; if (name === void 0) { @@ -78429,8 +78429,8 @@ var require_readdir_glob = __commonJS({ useStat = true; } const filename = dir + "/" + name; - const relative = filename.slice(1); - const absolute = path6 + "/" + relative; + const relative2 = filename.slice(1); + const absolute = path7 + "/" + relative2; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -78442,17 +78442,17 @@ var require_readdir_glob = __commonJS({ stats = { isDirectory: () => false }; } if (stats.isDirectory()) { - if (!shouldSkip(relative)) { - yield { relative, absolute, stats }; - yield* exploreWalkAsync(filename, path6, followSymlinks, useStat, shouldSkip, false); + if (!shouldSkip(relative2)) { + yield { relative: relative2, absolute, stats }; + yield* exploreWalkAsync(filename, path7, followSymlinks, useStat, shouldSkip, false); } } else { - yield { relative, absolute, stats }; + yield { relative: relative2, absolute, stats }; } } } - async function* explore(path6, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync("", path6, followSymlinks, useStat, shouldSkip, true); + async function* explore(path7, followSymlinks, useStat, shouldSkip) { + yield* exploreWalkAsync("", path7, followSymlinks, useStat, shouldSkip, true); } function readOptions(options) { return { @@ -78517,11 +78517,11 @@ var require_readdir_glob = __commonJS({ } setTimeout(() => this._next(), 0); } - _shouldSkipDirectory(relative) { - return this.skipMatchers.some((m) => m.match(relative)); + _shouldSkipDirectory(relative2) { + return this.skipMatchers.some((m) => m.match(relative2)); } - _fileMatches(relative, isDirectory) { - const file = relative + (isDirectory ? "/" : ""); + _fileMatches(relative2, isDirectory) { + const file = relative2 + (isDirectory ? "/" : ""); return (this.matchers.length === 0 || this.matchers.some((m) => m.match(file))) && !this.ignoreMatchers.some((m) => m.match(file)) && (!this.options.nodir || !isDirectory); } _next() { @@ -78530,16 +78530,16 @@ var require_readdir_glob = __commonJS({ if (!obj.done) { const isDirectory = obj.value.stats.isDirectory(); if (this._fileMatches(obj.value.relative, isDirectory)) { - let relative = obj.value.relative; + let relative2 = obj.value.relative; let absolute = obj.value.absolute; if (this.options.mark && isDirectory) { - relative += "/"; + relative2 += "/"; absolute += "/"; } if (this.options.stat) { - this.emit("match", { relative, absolute, stat: obj.value.stats }); + this.emit("match", { relative: relative2, absolute, stat: obj.value.stats }); } else { - this.emit("match", { relative, absolute }); + this.emit("match", { relative: relative2, absolute }); } } this._next(this.iterator); @@ -80464,54 +80464,54 @@ var require_polyfills = __commonJS({ } var chdir; module2.exports = patch; - function patch(fs7) { + function patch(fs8) { if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs7); - } - if (!fs7.lutimes) { - patchLutimes(fs7); - } - fs7.chown = chownFix(fs7.chown); - fs7.fchown = chownFix(fs7.fchown); - fs7.lchown = chownFix(fs7.lchown); - fs7.chmod = chmodFix(fs7.chmod); - fs7.fchmod = chmodFix(fs7.fchmod); - fs7.lchmod = chmodFix(fs7.lchmod); - fs7.chownSync = chownFixSync(fs7.chownSync); - fs7.fchownSync = chownFixSync(fs7.fchownSync); - fs7.lchownSync = chownFixSync(fs7.lchownSync); - fs7.chmodSync = chmodFixSync(fs7.chmodSync); - fs7.fchmodSync = chmodFixSync(fs7.fchmodSync); - fs7.lchmodSync = chmodFixSync(fs7.lchmodSync); - fs7.stat = statFix(fs7.stat); - fs7.fstat = statFix(fs7.fstat); - fs7.lstat = statFix(fs7.lstat); - fs7.statSync = statFixSync(fs7.statSync); - fs7.fstatSync = statFixSync(fs7.fstatSync); - fs7.lstatSync = statFixSync(fs7.lstatSync); - if (fs7.chmod && !fs7.lchmod) { - fs7.lchmod = function(path6, mode, cb) { + patchLchmod(fs8); + } + if (!fs8.lutimes) { + patchLutimes(fs8); + } + fs8.chown = chownFix(fs8.chown); + fs8.fchown = chownFix(fs8.fchown); + fs8.lchown = chownFix(fs8.lchown); + fs8.chmod = chmodFix(fs8.chmod); + fs8.fchmod = chmodFix(fs8.fchmod); + fs8.lchmod = chmodFix(fs8.lchmod); + fs8.chownSync = chownFixSync(fs8.chownSync); + fs8.fchownSync = chownFixSync(fs8.fchownSync); + fs8.lchownSync = chownFixSync(fs8.lchownSync); + fs8.chmodSync = chmodFixSync(fs8.chmodSync); + fs8.fchmodSync = chmodFixSync(fs8.fchmodSync); + fs8.lchmodSync = chmodFixSync(fs8.lchmodSync); + fs8.stat = statFix(fs8.stat); + fs8.fstat = statFix(fs8.fstat); + fs8.lstat = statFix(fs8.lstat); + fs8.statSync = statFixSync(fs8.statSync); + fs8.fstatSync = statFixSync(fs8.fstatSync); + fs8.lstatSync = statFixSync(fs8.lstatSync); + if (fs8.chmod && !fs8.lchmod) { + fs8.lchmod = function(path7, mode, cb) { if (cb) process.nextTick(cb); }; - fs7.lchmodSync = function() { + fs8.lchmodSync = function() { }; } - if (fs7.chown && !fs7.lchown) { - fs7.lchown = function(path6, uid, gid, cb) { + if (fs8.chown && !fs8.lchown) { + fs8.lchown = function(path7, uid, gid, cb) { if (cb) process.nextTick(cb); }; - fs7.lchownSync = function() { + fs8.lchownSync = function() { }; } if (platform === "win32") { - fs7.rename = typeof fs7.rename !== "function" ? fs7.rename : (function(fs$rename) { + fs8.rename = typeof fs8.rename !== "function" ? fs8.rename : (function(fs$rename) { function rename(from, to, cb) { var start = Date.now(); var backoff = 0; fs$rename(from, to, function CB(er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 6e4) { setTimeout(function() { - fs7.stat(to, function(stater, st) { + fs8.stat(to, function(stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else @@ -80527,9 +80527,9 @@ var require_polyfills = __commonJS({ } if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename); return rename; - })(fs7.rename); + })(fs8.rename); } - fs7.read = typeof fs7.read !== "function" ? fs7.read : (function(fs$read) { + fs8.read = typeof fs8.read !== "function" ? fs8.read : (function(fs$read) { function read(fd, buffer, offset, length, position, callback_) { var callback; if (callback_ && typeof callback_ === "function") { @@ -80537,22 +80537,22 @@ var require_polyfills = __commonJS({ callback = function(er, _2, __) { if (er && er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; - return fs$read.call(fs7, fd, buffer, offset, length, position, callback); + return fs$read.call(fs8, fd, buffer, offset, length, position, callback); } callback_.apply(this, arguments); }; } - return fs$read.call(fs7, fd, buffer, offset, length, position, callback); + return fs$read.call(fs8, fd, buffer, offset, length, position, callback); } if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read); return read; - })(fs7.read); - fs7.readSync = typeof fs7.readSync !== "function" ? fs7.readSync : /* @__PURE__ */ (function(fs$readSync) { + })(fs8.read); + fs8.readSync = typeof fs8.readSync !== "function" ? fs8.readSync : /* @__PURE__ */ (function(fs$readSync) { return function(fd, buffer, offset, length, position) { var eagCounter = 0; while (true) { try { - return fs$readSync.call(fs7, fd, buffer, offset, length, position); + return fs$readSync.call(fs8, fd, buffer, offset, length, position); } catch (er) { if (er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; @@ -80562,11 +80562,11 @@ var require_polyfills = __commonJS({ } } }; - })(fs7.readSync); - function patchLchmod(fs8) { - fs8.lchmod = function(path6, mode, callback) { - fs8.open( - path6, + })(fs8.readSync); + function patchLchmod(fs9) { + fs9.lchmod = function(path7, mode, callback) { + fs9.open( + path7, constants.O_WRONLY | constants.O_SYMLINK, mode, function(err, fd) { @@ -80574,80 +80574,80 @@ var require_polyfills = __commonJS({ if (callback) callback(err); return; } - fs8.fchmod(fd, mode, function(err2) { - fs8.close(fd, function(err22) { + fs9.fchmod(fd, mode, function(err2) { + fs9.close(fd, function(err22) { if (callback) callback(err2 || err22); }); }); } ); }; - fs8.lchmodSync = function(path6, mode) { - var fd = fs8.openSync(path6, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs9.lchmodSync = function(path7, mode) { + var fd = fs9.openSync(path7, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { - ret = fs8.fchmodSync(fd, mode); + ret = fs9.fchmodSync(fd, mode); threw = false; } finally { if (threw) { try { - fs8.closeSync(fd); + fs9.closeSync(fd); } catch (er) { } } else { - fs8.closeSync(fd); + fs9.closeSync(fd); } } return ret; }; } - function patchLutimes(fs8) { - if (constants.hasOwnProperty("O_SYMLINK") && fs8.futimes) { - fs8.lutimes = function(path6, at, mt, cb) { - fs8.open(path6, constants.O_SYMLINK, function(er, fd) { + function patchLutimes(fs9) { + if (constants.hasOwnProperty("O_SYMLINK") && fs9.futimes) { + fs9.lutimes = function(path7, at, mt, cb) { + fs9.open(path7, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; } - fs8.futimes(fd, at, mt, function(er2) { - fs8.close(fd, function(er22) { + fs9.futimes(fd, at, mt, function(er2) { + fs9.close(fd, function(er22) { if (cb) cb(er2 || er22); }); }); }); }; - fs8.lutimesSync = function(path6, at, mt) { - var fd = fs8.openSync(path6, constants.O_SYMLINK); + fs9.lutimesSync = function(path7, at, mt) { + var fd = fs9.openSync(path7, constants.O_SYMLINK); var ret; var threw = true; try { - ret = fs8.futimesSync(fd, at, mt); + ret = fs9.futimesSync(fd, at, mt); threw = false; } finally { if (threw) { try { - fs8.closeSync(fd); + fs9.closeSync(fd); } catch (er) { } } else { - fs8.closeSync(fd); + fs9.closeSync(fd); } } return ret; }; - } else if (fs8.futimes) { - fs8.lutimes = function(_a, _b, _c, cb) { + } else if (fs9.futimes) { + fs9.lutimes = function(_a, _b, _c, cb) { if (cb) process.nextTick(cb); }; - fs8.lutimesSync = function() { + fs9.lutimesSync = function() { }; } } function chmodFix(orig) { if (!orig) return orig; return function(target, mode, cb) { - return orig.call(fs7, target, mode, function(er) { + return orig.call(fs8, target, mode, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -80657,7 +80657,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, mode) { try { - return orig.call(fs7, target, mode); + return orig.call(fs8, target, mode); } catch (er) { if (!chownErOk(er)) throw er; } @@ -80666,7 +80666,7 @@ var require_polyfills = __commonJS({ function chownFix(orig) { if (!orig) return orig; return function(target, uid, gid, cb) { - return orig.call(fs7, target, uid, gid, function(er) { + return orig.call(fs8, target, uid, gid, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -80676,7 +80676,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, uid, gid) { try { - return orig.call(fs7, target, uid, gid); + return orig.call(fs8, target, uid, gid); } catch (er) { if (!chownErOk(er)) throw er; } @@ -80696,13 +80696,13 @@ var require_polyfills = __commonJS({ } if (cb) cb.apply(this, arguments); } - return options ? orig.call(fs7, target, options, callback) : orig.call(fs7, target, callback); + return options ? orig.call(fs8, target, options, callback) : orig.call(fs8, target, callback); }; } function statFixSync(orig) { if (!orig) return orig; return function(target, options) { - var stats = options ? orig.call(fs7, target, options) : orig.call(fs7, target); + var stats = options ? orig.call(fs8, target, options) : orig.call(fs8, target); if (stats) { if (stats.uid < 0) stats.uid += 4294967296; if (stats.gid < 0) stats.gid += 4294967296; @@ -80731,16 +80731,16 @@ var require_legacy_streams = __commonJS({ "node_modules/graceful-fs/legacy-streams.js"(exports2, module2) { var Stream = require("stream").Stream; module2.exports = legacy; - function legacy(fs7) { + function legacy(fs8) { return { ReadStream, WriteStream }; - function ReadStream(path6, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path6, options); + function ReadStream(path7, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path7, options); Stream.call(this); var self2 = this; - this.path = path6; + this.path = path7; this.fd = null; this.readable = true; this.paused = false; @@ -80774,7 +80774,7 @@ var require_legacy_streams = __commonJS({ }); return; } - fs7.open(this.path, this.flags, this.mode, function(err, fd) { + fs8.open(this.path, this.flags, this.mode, function(err, fd) { if (err) { self2.emit("error", err); self2.readable = false; @@ -80785,10 +80785,10 @@ var require_legacy_streams = __commonJS({ self2._read(); }); } - function WriteStream(path6, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path6, options); + function WriteStream(path7, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path7, options); Stream.call(this); - this.path = path6; + this.path = path7; this.fd = null; this.writable = true; this.flags = "w"; @@ -80813,7 +80813,7 @@ var require_legacy_streams = __commonJS({ this.busy = false; this._queue = []; if (this.fd === null) { - this._open = fs7.open; + this._open = fs8.open; this._queue.push([this._open, this.path, this.flags, this.mode, void 0]); this.flush(); } @@ -80848,7 +80848,7 @@ var require_clone = __commonJS({ // node_modules/graceful-fs/graceful-fs.js var require_graceful_fs = __commonJS({ "node_modules/graceful-fs/graceful-fs.js"(exports2, module2) { - var fs7 = require("fs"); + var fs8 = require("fs"); var polyfills = require_polyfills(); var legacy = require_legacy_streams(); var clone = require_clone(); @@ -80880,12 +80880,12 @@ var require_graceful_fs = __commonJS({ m = "GFS4: " + m.split(/\n/).join("\nGFS4: "); console.error(m); }; - if (!fs7[gracefulQueue]) { + if (!fs8[gracefulQueue]) { queue = global[gracefulQueue] || []; - publishQueue(fs7, queue); - fs7.close = (function(fs$close) { + publishQueue(fs8, queue); + fs8.close = (function(fs$close) { function close(fd, cb) { - return fs$close.call(fs7, fd, function(err) { + return fs$close.call(fs8, fd, function(err) { if (!err) { resetQueue(); } @@ -80897,48 +80897,48 @@ var require_graceful_fs = __commonJS({ value: fs$close }); return close; - })(fs7.close); - fs7.closeSync = (function(fs$closeSync) { + })(fs8.close); + fs8.closeSync = (function(fs$closeSync) { function closeSync(fd) { - fs$closeSync.apply(fs7, arguments); + fs$closeSync.apply(fs8, arguments); resetQueue(); } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }); return closeSync; - })(fs7.closeSync); + })(fs8.closeSync); if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) { process.on("exit", function() { - debug2(fs7[gracefulQueue]); - require("assert").equal(fs7[gracefulQueue].length, 0); + debug2(fs8[gracefulQueue]); + require("assert").equal(fs8[gracefulQueue].length, 0); }); } } var queue; if (!global[gracefulQueue]) { - publishQueue(global, fs7[gracefulQueue]); - } - module2.exports = patch(clone(fs7)); - if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs7.__patched) { - module2.exports = patch(fs7); - fs7.__patched = true; - } - function patch(fs8) { - polyfills(fs8); - fs8.gracefulify = patch; - fs8.createReadStream = createReadStream; - fs8.createWriteStream = createWriteStream2; - var fs$readFile = fs8.readFile; - fs8.readFile = readFile; - function readFile(path6, options, cb) { + publishQueue(global, fs8[gracefulQueue]); + } + module2.exports = patch(clone(fs8)); + if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs8.__patched) { + module2.exports = patch(fs8); + fs8.__patched = true; + } + function patch(fs9) { + polyfills(fs9); + fs9.gracefulify = patch; + fs9.createReadStream = createReadStream; + fs9.createWriteStream = createWriteStream2; + var fs$readFile = fs9.readFile; + fs9.readFile = readFile; + function readFile(path7, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$readFile(path6, options, cb); - function go$readFile(path7, options2, cb2, startTime) { - return fs$readFile(path7, options2, function(err) { + return go$readFile(path7, options, cb); + function go$readFile(path8, options2, cb2, startTime) { + return fs$readFile(path8, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$readFile, [path7, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$readFile, [path8, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80946,16 +80946,16 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$writeFile = fs8.writeFile; - fs8.writeFile = writeFile; - function writeFile(path6, data, options, cb) { + var fs$writeFile = fs9.writeFile; + fs9.writeFile = writeFile; + function writeFile(path7, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$writeFile(path6, data, options, cb); - function go$writeFile(path7, data2, options2, cb2, startTime) { - return fs$writeFile(path7, data2, options2, function(err) { + return go$writeFile(path7, data, options, cb); + function go$writeFile(path8, data2, options2, cb2, startTime) { + return fs$writeFile(path8, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$writeFile, [path7, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$writeFile, [path8, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80963,17 +80963,17 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$appendFile = fs8.appendFile; + var fs$appendFile = fs9.appendFile; if (fs$appendFile) - fs8.appendFile = appendFile; - function appendFile(path6, data, options, cb) { + fs9.appendFile = appendFile; + function appendFile(path7, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$appendFile(path6, data, options, cb); - function go$appendFile(path7, data2, options2, cb2, startTime) { - return fs$appendFile(path7, data2, options2, function(err) { + return go$appendFile(path7, data, options, cb); + function go$appendFile(path8, data2, options2, cb2, startTime) { + return fs$appendFile(path8, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$appendFile, [path7, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$appendFile, [path8, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80981,9 +80981,9 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$copyFile = fs8.copyFile; + var fs$copyFile = fs9.copyFile; if (fs$copyFile) - fs8.copyFile = copyFile; + fs9.copyFile = copyFile; function copyFile(src, dest, flags, cb) { if (typeof flags === "function") { cb = flags; @@ -81001,34 +81001,34 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$readdir = fs8.readdir; - fs8.readdir = readdir; + var fs$readdir = fs9.readdir; + fs9.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; - function readdir(path6, options, cb) { + function readdir(path7, options, cb) { if (typeof options === "function") cb = options, options = null; - var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path7, options2, cb2, startTime) { - return fs$readdir(path7, fs$readdirCallback( - path7, + var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path8, options2, cb2, startTime) { + return fs$readdir(path8, fs$readdirCallback( + path8, options2, cb2, startTime )); - } : function go$readdir2(path7, options2, cb2, startTime) { - return fs$readdir(path7, options2, fs$readdirCallback( - path7, + } : function go$readdir2(path8, options2, cb2, startTime) { + return fs$readdir(path8, options2, fs$readdirCallback( + path8, options2, cb2, startTime )); }; - return go$readdir(path6, options, cb); - function fs$readdirCallback(path7, options2, cb2, startTime) { + return go$readdir(path7, options, cb); + function fs$readdirCallback(path8, options2, cb2, startTime) { return function(err, files) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) enqueue([ go$readdir, - [path7, options2, cb2], + [path8, options2, cb2], err, startTime || Date.now(), Date.now() @@ -81043,21 +81043,21 @@ var require_graceful_fs = __commonJS({ } } if (process.version.substr(0, 4) === "v0.8") { - var legStreams = legacy(fs8); + var legStreams = legacy(fs9); ReadStream = legStreams.ReadStream; WriteStream = legStreams.WriteStream; } - var fs$ReadStream = fs8.ReadStream; + var fs$ReadStream = fs9.ReadStream; if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype); ReadStream.prototype.open = ReadStream$open; } - var fs$WriteStream = fs8.WriteStream; + var fs$WriteStream = fs9.WriteStream; if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype); WriteStream.prototype.open = WriteStream$open; } - Object.defineProperty(fs8, "ReadStream", { + Object.defineProperty(fs9, "ReadStream", { get: function() { return ReadStream; }, @@ -81067,7 +81067,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(fs8, "WriteStream", { + Object.defineProperty(fs9, "WriteStream", { get: function() { return WriteStream; }, @@ -81078,7 +81078,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileReadStream = ReadStream; - Object.defineProperty(fs8, "FileReadStream", { + Object.defineProperty(fs9, "FileReadStream", { get: function() { return FileReadStream; }, @@ -81089,7 +81089,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileWriteStream = WriteStream; - Object.defineProperty(fs8, "FileWriteStream", { + Object.defineProperty(fs9, "FileWriteStream", { get: function() { return FileWriteStream; }, @@ -81099,7 +81099,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - function ReadStream(path6, options) { + function ReadStream(path7, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this; else @@ -81119,7 +81119,7 @@ var require_graceful_fs = __commonJS({ } }); } - function WriteStream(path6, options) { + function WriteStream(path7, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this; else @@ -81137,22 +81137,22 @@ var require_graceful_fs = __commonJS({ } }); } - function createReadStream(path6, options) { - return new fs8.ReadStream(path6, options); + function createReadStream(path7, options) { + return new fs9.ReadStream(path7, options); } - function createWriteStream2(path6, options) { - return new fs8.WriteStream(path6, options); + function createWriteStream2(path7, options) { + return new fs9.WriteStream(path7, options); } - var fs$open = fs8.open; - fs8.open = open; - function open(path6, flags, mode, cb) { + var fs$open = fs9.open; + fs9.open = open; + function open(path7, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; - return go$open(path6, flags, mode, cb); - function go$open(path7, flags2, mode2, cb2, startTime) { - return fs$open(path7, flags2, mode2, function(err, fd) { + return go$open(path7, flags, mode, cb); + function go$open(path8, flags2, mode2, cb2, startTime) { + return fs$open(path8, flags2, mode2, function(err, fd) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$open, [path7, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$open, [path8, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -81160,20 +81160,20 @@ var require_graceful_fs = __commonJS({ }); } } - return fs8; + return fs9; } function enqueue(elem) { debug2("ENQUEUE", elem[0].name, elem[1]); - fs7[gracefulQueue].push(elem); + fs8[gracefulQueue].push(elem); retry3(); } var retryTimer; function resetQueue() { var now = Date.now(); - for (var i = 0; i < fs7[gracefulQueue].length; ++i) { - if (fs7[gracefulQueue][i].length > 2) { - fs7[gracefulQueue][i][3] = now; - fs7[gracefulQueue][i][4] = now; + for (var i = 0; i < fs8[gracefulQueue].length; ++i) { + if (fs8[gracefulQueue][i].length > 2) { + fs8[gracefulQueue][i][3] = now; + fs8[gracefulQueue][i][4] = now; } } retry3(); @@ -81181,9 +81181,9 @@ var require_graceful_fs = __commonJS({ function retry3() { clearTimeout(retryTimer); retryTimer = void 0; - if (fs7[gracefulQueue].length === 0) + if (fs8[gracefulQueue].length === 0) return; - var elem = fs7[gracefulQueue].shift(); + var elem = fs8[gracefulQueue].shift(); var fn = elem[0]; var args = elem[1]; var err = elem[2]; @@ -81205,7 +81205,7 @@ var require_graceful_fs = __commonJS({ debug2("RETRY", fn.name, args); fn.apply(null, args.concat([startTime])); } else { - fs7[gracefulQueue].push(elem); + fs8[gracefulQueue].push(elem); } } if (retryTimer === void 0) { @@ -81505,7 +81505,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join7(s) { + BufferList.prototype.join = function join8(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -83253,22 +83253,22 @@ var require_lazystream = __commonJS({ // node_modules/normalize-path/index.js var require_normalize_path = __commonJS({ "node_modules/normalize-path/index.js"(exports2, module2) { - module2.exports = function(path6, stripTrailing) { - if (typeof path6 !== "string") { + module2.exports = function(path7, stripTrailing) { + if (typeof path7 !== "string") { throw new TypeError("expected path to be a string"); } - if (path6 === "\\" || path6 === "/") return "/"; - var len = path6.length; - if (len <= 1) return path6; + if (path7 === "\\" || path7 === "/") return "/"; + var len = path7.length; + if (len <= 1) return path7; var prefix = ""; - if (len > 4 && path6[3] === "\\") { - var ch = path6[2]; - if ((ch === "?" || ch === ".") && path6.slice(0, 2) === "\\\\") { - path6 = path6.slice(2); + if (len > 4 && path7[3] === "\\") { + var ch = path7[2]; + if ((ch === "?" || ch === ".") && path7.slice(0, 2) === "\\\\") { + path7 = path7.slice(2); prefix = "//"; } } - var segs = path6.split(/[/\\]+/); + var segs = path7.split(/[/\\]+/); if (stripTrailing !== false && segs[segs.length - 1] === "") { segs.pop(); } @@ -84024,8 +84024,8 @@ var require_primordials = __commonJS({ ArrayPrototypeIndexOf(self2, el) { return self2.indexOf(el); }, - ArrayPrototypeJoin(self2, sep2) { - return self2.join(sep2); + ArrayPrototypeJoin(self2, sep4) { + return self2.join(sep4); }, ArrayPrototypeMap(self2, fn) { return self2.map(fn); @@ -91803,11 +91803,11 @@ var require_commonjs13 = __commonJS({ return (f) => f.length === len && f !== "." && f !== ".."; }; var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; - var path6 = { + var path7 = { win32: { sep: "\\" }, posix: { sep: "/" } }; - exports2.sep = defaultPlatform === "win32" ? path6.win32.sep : path6.posix.sep; + exports2.sep = defaultPlatform === "win32" ? path7.win32.sep : path7.posix.sep; exports2.minimatch.sep = exports2.sep; exports2.GLOBSTAR = Symbol("globstar **"); exports2.minimatch.GLOBSTAR = exports2.GLOBSTAR; @@ -95055,12 +95055,12 @@ var require_commonjs16 = __commonJS({ /** * Get the Path object referenced by the string path, resolved from this Path */ - resolve(path6) { - if (!path6) { + resolve(path7) { + if (!path7) { return this; } - const rootPath = this.getRootString(path6); - const dir = path6.substring(rootPath.length); + const rootPath = this.getRootString(path7); + const dir = path7.substring(rootPath.length); const dirParts = dir.split(this.splitSep); const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts); return result; @@ -95813,8 +95813,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - getRootString(path6) { - return node_path_1.win32.parse(path6).root; + getRootString(path7) { + return node_path_1.win32.parse(path7).root; } /** * @internal @@ -95861,8 +95861,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - getRootString(path6) { - return path6.startsWith("/") ? "/" : ""; + getRootString(path7) { + return path7.startsWith("/") ? "/" : ""; } /** * @internal @@ -95912,8 +95912,8 @@ var require_commonjs16 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep2, { nocase, childrenCacheSize = 16 * 1024, fs: fs7 = defaultFS } = {}) { - this.#fs = fsFromOption(fs7); + constructor(cwd = process.cwd(), pathImpl, sep4, { nocase, childrenCacheSize = 16 * 1024, fs: fs8 = defaultFS } = {}) { + this.#fs = fsFromOption(fs8); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); } @@ -95923,7 +95923,7 @@ var require_commonjs16 = __commonJS({ this.#resolveCache = new ResolveCache(); this.#resolvePosixCache = new ResolveCache(); this.#children = new ChildrenCache(childrenCacheSize); - const split = cwdPath.substring(this.rootPath.length).split(sep2); + const split = cwdPath.substring(this.rootPath.length).split(sep4); if (split.length === 1 && !split[0]) { split.pop(); } @@ -95952,11 +95952,11 @@ var require_commonjs16 = __commonJS({ /** * Get the depth of a provided path, string, or the cwd */ - depth(path6 = this.cwd) { - if (typeof path6 === "string") { - path6 = this.cwd.resolve(path6); + depth(path7 = this.cwd) { + if (typeof path7 === "string") { + path7 = this.cwd.resolve(path7); } - return path6.depth(); + return path7.depth(); } /** * Return the cache of child entries. Exposed so subclasses can create @@ -96443,9 +96443,9 @@ var require_commonjs16 = __commonJS({ process2(); return results; } - chdir(path6 = this.cwd) { + chdir(path7 = this.cwd) { const oldCwd = this.cwd; - this.cwd = typeof path6 === "string" ? this.cwd.resolve(path6) : path6; + this.cwd = typeof path7 === "string" ? this.cwd.resolve(path7) : path7; this.cwd[setAsCwd](oldCwd); } }; @@ -96472,8 +96472,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - newRoot(fs7) { - return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs7 }); + newRoot(fs8) { + return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs8 }); } /** * Return true if the provided path string is an absolute path @@ -96502,8 +96502,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - newRoot(fs7) { - return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs7 }); + newRoot(fs8) { + return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs8 }); } /** * Return true if the provided path string is an absolute path @@ -96766,10 +96766,10 @@ var require_ignore = __commonJS({ ignored(p) { const fullpath = p.fullpath(); const fullpaths = `${fullpath}/`; - const relative = p.relative() || "."; - const relatives = `${relative}/`; + const relative2 = p.relative() || "."; + const relatives = `${relative2}/`; for (const m of this.relative) { - if (m.match(relative) || m.match(relatives)) + if (m.match(relative2) || m.match(relatives)) return true; } for (const m of this.absolute) { @@ -96780,9 +96780,9 @@ var require_ignore = __commonJS({ } childrenIgnored(p) { const fullpath = p.fullpath() + "/"; - const relative = (p.relative() || ".") + "/"; + const relative2 = (p.relative() || ".") + "/"; for (const m of this.relativeChildren) { - if (m.match(relative)) + if (m.match(relative2)) return true; } for (const m of this.absoluteChildren) { @@ -96833,8 +96833,8 @@ var require_processor = __commonJS({ } // match, absolute, ifdir entries() { - return [...this.store.entries()].map(([path6, n]) => [ - path6, + return [...this.store.entries()].map(([path7, n]) => [ + path7, !!(n & 2), !!(n & 1) ]); @@ -97052,9 +97052,9 @@ var require_walker = __commonJS({ signal; maxDepth; includeChildMatches; - constructor(patterns, path6, opts) { + constructor(patterns, path7, opts) { this.patterns = patterns; - this.path = path6; + this.path = path7; this.opts = opts; this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/"; this.includeChildMatches = opts.includeChildMatches !== false; @@ -97073,11 +97073,11 @@ var require_walker = __commonJS({ }); } } - #ignored(path6) { - return this.seen.has(path6) || !!this.#ignore?.ignored?.(path6); + #ignored(path7) { + return this.seen.has(path7) || !!this.#ignore?.ignored?.(path7); } - #childrenIgnored(path6) { - return !!this.#ignore?.childrenIgnored?.(path6); + #childrenIgnored(path7) { + return !!this.#ignore?.childrenIgnored?.(path7); } // backpressure mechanism pause() { @@ -97293,8 +97293,8 @@ var require_walker = __commonJS({ exports2.GlobUtil = GlobUtil; var GlobWalker = class extends GlobUtil { matches = /* @__PURE__ */ new Set(); - constructor(patterns, path6, opts) { - super(patterns, path6, opts); + constructor(patterns, path7, opts) { + super(patterns, path7, opts); } matchEmit(e) { this.matches.add(e); @@ -97332,8 +97332,8 @@ var require_walker = __commonJS({ exports2.GlobWalker = GlobWalker; var GlobStream = class extends GlobUtil { results; - constructor(patterns, path6, opts) { - super(patterns, path6, opts); + constructor(patterns, path7, opts) { + super(patterns, path7, opts); this.results = new minipass_1.Minipass({ signal: this.signal, objectMode: true @@ -97688,8 +97688,8 @@ var require_commonjs17 = __commonJS({ // node_modules/archiver-utils/file.js var require_file3 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { - var fs7 = require_graceful_fs(); - var path6 = require("path"); + var fs8 = require_graceful_fs(); + var path7 = require("path"); var flatten = require_flatten(); var difference = require_difference(); var union = require_union(); @@ -97714,8 +97714,8 @@ var require_file3 = __commonJS({ return result; }; file.exists = function() { - var filepath = path6.join.apply(path6, arguments); - return fs7.existsSync(filepath); + var filepath = path7.join.apply(path7, arguments); + return fs8.existsSync(filepath); }; file.expand = function(...args) { var options = isPlainObject(args[0]) ? args.shift() : {}; @@ -97728,12 +97728,12 @@ var require_file3 = __commonJS({ }); if (options.filter) { matches = matches.filter(function(filepath) { - filepath = path6.join(options.cwd || "", filepath); + filepath = path7.join(options.cwd || "", filepath); try { if (typeof options.filter === "function") { return options.filter(filepath); } else { - return fs7.statSync(filepath)[options.filter](); + return fs8.statSync(filepath)[options.filter](); } } catch (e) { return false; @@ -97745,7 +97745,7 @@ var require_file3 = __commonJS({ file.expandMapping = function(patterns, destBase, options) { options = Object.assign({ rename: function(destBase2, destPath) { - return path6.join(destBase2 || "", destPath); + return path7.join(destBase2 || "", destPath); } }, options); var files = []; @@ -97753,14 +97753,14 @@ var require_file3 = __commonJS({ file.expand(options, patterns).forEach(function(src) { var destPath = src; if (options.flatten) { - destPath = path6.basename(destPath); + destPath = path7.basename(destPath); } if (options.ext) { destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } var dest = options.rename(destBase, destPath, options); if (options.cwd) { - src = path6.join(options.cwd, src); + src = path7.join(options.cwd, src); } dest = dest.replace(pathSeparatorRe, "/"); src = src.replace(pathSeparatorRe, "/"); @@ -97841,8 +97841,8 @@ var require_file3 = __commonJS({ // node_modules/archiver-utils/index.js var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { - var fs7 = require_graceful_fs(); - var path6 = require("path"); + var fs8 = require_graceful_fs(); + var path7 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); var normalizePath = require_normalize_path(); @@ -97890,7 +97890,7 @@ var require_archiver_utils = __commonJS({ }; utils.lazyReadStream = function(filepath) { return new lazystream.Readable(function() { - return fs7.createReadStream(filepath); + return fs8.createReadStream(filepath); }); }; utils.normalizeInputSource = function(source) { @@ -97918,7 +97918,7 @@ var require_archiver_utils = __commonJS({ callback = base; base = dirpath; } - fs7.readdir(dirpath, function(err, list) { + fs8.readdir(dirpath, function(err, list) { var i = 0; var file; var filepath; @@ -97930,11 +97930,11 @@ var require_archiver_utils = __commonJS({ if (!file) { return callback(null, results); } - filepath = path6.join(dirpath, file); - fs7.stat(filepath, function(err2, stats) { + filepath = path7.join(dirpath, file); + fs8.stat(filepath, function(err2, stats) { results.push({ path: filepath, - relative: path6.relative(base, filepath).replace(/\\/g, "/"), + relative: path7.relative(base, filepath).replace(/\\/g, "/"), stats }); if (stats && stats.isDirectory()) { @@ -97993,10 +97993,10 @@ var require_error2 = __commonJS({ // node_modules/archiver/lib/core.js var require_core2 = __commonJS({ "node_modules/archiver/lib/core.js"(exports2, module2) { - var fs7 = require("fs"); + var fs8 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); - var path6 = require("path"); + var path7 = require("path"); var util = require_archiver_utils(); var inherits = require("util").inherits; var ArchiverError = require_error2(); @@ -98057,7 +98057,7 @@ var require_core2 = __commonJS({ data.sourcePath = filepath; task.data = data; this._entriesCount++; - if (data.stats && data.stats instanceof fs7.Stats) { + if (data.stats && data.stats instanceof fs8.Stats) { task = this._updateQueueTaskWithStats(task, data.stats); if (task) { if (data.stats.size) { @@ -98228,7 +98228,7 @@ var require_core2 = __commonJS({ callback(); return; } - fs7.lstat(task.filepath, function(err, stats) { + fs8.lstat(task.filepath, function(err, stats) { if (this._state.aborted) { setImmediate(callback); return; @@ -98271,10 +98271,10 @@ var require_core2 = __commonJS({ task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { - var linkPath = fs7.readlinkSync(task.filepath); - var dirName = path6.dirname(task.filepath); + var linkPath = fs8.readlinkSync(task.filepath); + var dirName = path7.dirname(task.filepath); task.data.type = "symlink"; - task.data.linkname = path6.relative(dirName, path6.resolve(dirName, linkPath)); + task.data.linkname = path7.relative(dirName, path7.resolve(dirName, linkPath)); task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { @@ -102724,8 +102724,8 @@ var require_context2 = __commonJS({ if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path6 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); + const path7 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -103867,14 +103867,14 @@ var require_url_state_machine = __commonJS({ return url.replace(/\u0009|\u000A|\u000D/g, ""); } function shortenPath(url) { - const path6 = url.path; - if (path6.length === 0) { + const path7 = url.path; + if (path7.length === 0) { return; } - if (url.scheme === "file" && path6.length === 1 && isNormalizedWindowsDriveLetter(path6[0])) { + if (url.scheme === "file" && path7.length === 1 && isNormalizedWindowsDriveLetter(path7[0])) { return; } - path6.pop(); + path7.pop(); } function includesCredentials(url) { return url.username !== "" || url.password !== ""; @@ -108045,7 +108045,7 @@ var require_traverse = __commonJS({ })(this.value); }; function walk(root, cb, immutable) { - var path6 = []; + var path7 = []; var parents = []; var alive = true; return (function walker(node_) { @@ -108054,11 +108054,11 @@ var require_traverse = __commonJS({ var state = { node, node_, - path: [].concat(path6), + path: [].concat(path7), parent: parents.slice(-1)[0], - key: path6.slice(-1)[0], - isRoot: path6.length === 0, - level: path6.length, + key: path7.slice(-1)[0], + isRoot: path7.length === 0, + level: path7.length, circular: null, update: function(x) { if (!state.isRoot) { @@ -108113,7 +108113,7 @@ var require_traverse = __commonJS({ parents.push(state); var keys = Object.keys(state.node); keys.forEach(function(key, i2) { - path6.push(key); + path7.push(key); if (modifiers.pre) modifiers.pre.call(state, state.node[key], key); var child = walker(state.node[key]); if (immutable && Object.hasOwnProperty.call(state.node, key)) { @@ -108122,7 +108122,7 @@ var require_traverse = __commonJS({ child.isLast = i2 == keys.length - 1; child.isFirst = i2 == 0; if (modifiers.post) modifiers.post.call(state, child); - path6.pop(); + path7.pop(); }); parents.pop(); } @@ -109143,11 +109143,11 @@ var require_unzip_stream = __commonJS({ return requiredLength; case states.CENTRAL_DIRECTORY_FILE_HEADER_SUFFIX: var isUtf8 = (this.parsedEntity.flags & 2048) !== 0; - var path6 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); + var path7 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); var extraDataBuffer = chunk.slice(this.parsedEntity.fileNameLength, this.parsedEntity.fileNameLength + this.parsedEntity.extraFieldLength); var extra = this._readExtraFields(extraDataBuffer); if (extra && extra.parsed && extra.parsed.path && !isUtf8) { - path6 = extra.parsed.path; + path7 = extra.parsed.path; } this.parsedEntity.extra = extra.parsed; var isUnix = (this.parsedEntity.versionMadeBy & 65280) >> 8 === 3; @@ -109159,7 +109159,7 @@ var require_unzip_stream = __commonJS({ } if (this.options.debug) { const debugObj = Object.assign({}, this.parsedEntity, { - path: path6, + path: path7, flags: "0x" + this.parsedEntity.flags.toString(16), unixAttrs: unixAttrs && "0" + unixAttrs.toString(8), isSymlink, @@ -109596,8 +109596,8 @@ var require_parser_stream = __commonJS({ // node_modules/mkdirp/index.js var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { - var path6 = require("path"); - var fs7 = require("fs"); + var path7 = require("path"); + var fs8 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; function mkdirP(p, opts, f, made) { @@ -109608,7 +109608,7 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs7; + var xfs = opts.fs || fs8; if (mode === void 0) { mode = _0777; } @@ -109616,7 +109616,7 @@ var require_mkdirp = __commonJS({ var cb = f || /* istanbul ignore next */ function() { }; - p = path6.resolve(p); + p = path7.resolve(p); xfs.mkdir(p, mode, function(er) { if (!er) { made = made || p; @@ -109624,8 +109624,8 @@ var require_mkdirp = __commonJS({ } switch (er.code) { case "ENOENT": - if (path6.dirname(p) === p) return cb(er); - mkdirP(path6.dirname(p), opts, function(er2, made2) { + if (path7.dirname(p) === p) return cb(er); + mkdirP(path7.dirname(p), opts, function(er2, made2) { if (er2) cb(er2, made2); else mkdirP(p, opts, cb, made2); }); @@ -109647,19 +109647,19 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs7; + var xfs = opts.fs || fs8; if (mode === void 0) { mode = _0777; } if (!made) made = null; - p = path6.resolve(p); + p = path7.resolve(p); try { xfs.mkdirSync(p, mode); made = made || p; } catch (err0) { switch (err0.code) { case "ENOENT": - made = sync(path6.dirname(p), opts, made); + made = sync(path7.dirname(p), opts, made); sync(p, opts, made); break; // In the case of any other error, just see if there's a dir @@ -109684,8 +109684,8 @@ var require_mkdirp = __commonJS({ // node_modules/unzip-stream/lib/extract.js var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { - var fs7 = require("fs"); - var path6 = require("path"); + var fs8 = require("fs"); + var path7 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); var Transform = require("stream").Transform; @@ -109727,11 +109727,11 @@ var require_extract2 = __commonJS({ }; Extract.prototype._processEntry = function(entry) { var self2 = this; - var destPath = path6.join(this.opts.path, entry.path); - var directory = entry.isDirectory ? destPath : path6.dirname(destPath); + var destPath = path7.join(this.opts.path, entry.path); + var directory = entry.isDirectory ? destPath : path7.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { - var pipedStream = fs7.createWriteStream(destPath); + var pipedStream = fs8.createWriteStream(destPath); pipedStream.on("close", function() { self2.unfinishedEntries--; self2._notifyAwaiter(); @@ -109855,10 +109855,10 @@ var require_download_artifact = __commonJS({ parsed.search = ""; return parsed.toString(); }; - function exists(path6) { + function exists(path7) { return __awaiter4(this, void 0, void 0, function* () { try { - yield promises_1.default.access(path6); + yield promises_1.default.access(path7); return true; } catch (error2) { if (error2.code === "ENOENT") { @@ -110089,12 +110089,12 @@ var require_dist_node24 = __commonJS({ octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); - const path6 = requestOptions.url.replace(options.baseUrl, ""); + const path7 = requestOptions.url.replace(options.baseUrl, ""); return request(options).then((response) => { - octokit.log.info(`${requestOptions.method} ${path6} - ${response.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path7} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch((error2) => { - octokit.log.info(`${requestOptions.method} ${path6} - ${error2.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path7} - ${error2.status} in ${Date.now() - start}ms`); throw error2; }); }); @@ -110814,13 +110814,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.checkArtifactName = checkArtifactName; - function checkArtifactFilePath(path6) { - if (!path6) { - throw new Error(`Artifact path: ${path6}, is incorrectly provided`); + function checkArtifactFilePath(path7) { + if (!path7) { + throw new Error(`Artifact path: ${path7}, is incorrectly provided`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path6.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path6}. Contains the following character: ${errorMessageForCharacter} + if (path7.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path7}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -110866,25 +110866,25 @@ var require_upload_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadSpecification = void 0; - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation2(); function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { const specifications = []; - if (!fs7.existsSync(rootDirectory)) { + if (!fs8.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs7.statSync(rootDirectory).isDirectory()) { + if (!fs8.statSync(rootDirectory).isDirectory()) { throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); } rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of artifactFiles) { - if (!fs7.existsSync(file)) { + if (!fs8.existsSync(file)) { throw new Error(`File ${file} does not exist`); } - if (!fs7.statSync(file).isDirectory()) { + if (!fs8.statSync(file).isDirectory()) { file = (0, path_1.normalize)(file); file = (0, path_1.resolve)(file); if (!file.startsWith(rootDirectory)) { @@ -110909,11 +110909,11 @@ var require_upload_specification = __commonJS({ // node_modules/tmp/lib/tmp.js var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { - var fs7 = require("fs"); + var fs8 = require("fs"); var os = require("os"); - var path6 = require("path"); + var path7 = require("path"); var crypto = require("crypto"); - var _c = { fs: fs7.constants, os: os.constants }; + var _c = { fs: fs8.constants, os: os.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; var TEMPLATE_PATTERN = /XXXXXX/; var DEFAULT_TRIES = 3; @@ -110925,13 +110925,13 @@ var require_tmp = __commonJS({ var FILE_MODE = 384; var EXIT = "exit"; var _removeObjects = []; - var FN_RMDIR_SYNC = fs7.rmdirSync.bind(fs7); + var FN_RMDIR_SYNC = fs8.rmdirSync.bind(fs8); var _gracefulCleanup = false; function rimraf(dirPath, callback) { - return fs7.rm(dirPath, { recursive: true }, callback); + return fs8.rm(dirPath, { recursive: true }, callback); } function FN_RIMRAF_SYNC(dirPath) { - return fs7.rmSync(dirPath, { recursive: true }); + return fs8.rmSync(dirPath, { recursive: true }); } function tmpName(options, callback) { const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -110941,7 +110941,7 @@ var require_tmp = __commonJS({ (function _getUniqueName() { try { const name = _generateTmpName(sanitizedOptions); - fs7.stat(name, function(err2) { + fs8.stat(name, function(err2) { if (!err2) { if (tries-- > 0) return _getUniqueName(); return cb(new Error("Could not get a unique tmp filename, max tries reached " + name)); @@ -110961,7 +110961,7 @@ var require_tmp = __commonJS({ do { const name = _generateTmpName(sanitizedOptions); try { - fs7.statSync(name); + fs8.statSync(name); } catch (e) { return name; } @@ -110972,10 +110972,10 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs7.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { + fs8.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { if (err2) return cb(err2); if (opts.discardDescriptor) { - return fs7.close(fd, function _discardCallback(possibleErr) { + return fs8.close(fd, function _discardCallback(possibleErr) { return cb(possibleErr, name, void 0, _prepareTmpFileRemoveCallback(name, -1, opts, false)); }); } else { @@ -110989,9 +110989,9 @@ var require_tmp = __commonJS({ const args = _parseArguments(options), opts = args[0]; const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; const name = tmpNameSync(opts); - let fd = fs7.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + let fd = fs8.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); if (opts.discardDescriptor) { - fs7.closeSync(fd); + fs8.closeSync(fd); fd = void 0; } return { @@ -111004,7 +111004,7 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs7.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { + fs8.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { if (err2) return cb(err2); cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); }); @@ -111013,7 +111013,7 @@ var require_tmp = __commonJS({ function dirSync(options) { const args = _parseArguments(options), opts = args[0]; const name = tmpNameSync(opts); - fs7.mkdirSync(name, opts.mode || DIR_MODE); + fs8.mkdirSync(name, opts.mode || DIR_MODE); return { name, removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) @@ -111027,20 +111027,20 @@ var require_tmp = __commonJS({ next(); }; if (0 <= fdPath[0]) - fs7.close(fdPath[0], function() { - fs7.unlink(fdPath[1], _handler); + fs8.close(fdPath[0], function() { + fs8.unlink(fdPath[1], _handler); }); - else fs7.unlink(fdPath[1], _handler); + else fs8.unlink(fdPath[1], _handler); } function _removeFileSync(fdPath) { let rethrownException = null; try { - if (0 <= fdPath[0]) fs7.closeSync(fdPath[0]); + if (0 <= fdPath[0]) fs8.closeSync(fdPath[0]); } catch (e) { if (!_isEBADF(e) && !_isENOENT(e)) throw e; } finally { try { - fs7.unlinkSync(fdPath[1]); + fs8.unlinkSync(fdPath[1]); } catch (e) { if (!_isENOENT(e)) rethrownException = e; } @@ -111056,7 +111056,7 @@ var require_tmp = __commonJS({ return sync ? removeCallbackSync : removeCallback; } function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs7.rmdir.bind(fs7); + const removeFunction = opts.unsafeCleanup ? rimraf : fs8.rmdir.bind(fs8); const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); @@ -111118,35 +111118,35 @@ var require_tmp = __commonJS({ return [actualOptions, callback]; } function _resolvePath(name, tmpDir, cb) { - const pathToResolve = path6.isAbsolute(name) ? name : path6.join(tmpDir, name); - fs7.stat(pathToResolve, function(err) { + const pathToResolve = path7.isAbsolute(name) ? name : path7.join(tmpDir, name); + fs8.stat(pathToResolve, function(err) { if (err) { - fs7.realpath(path6.dirname(pathToResolve), function(err2, parentDir) { + fs8.realpath(path7.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); - cb(null, path6.join(parentDir, path6.basename(pathToResolve))); + cb(null, path7.join(parentDir, path7.basename(pathToResolve))); }); } else { - fs7.realpath(path6, cb); + fs8.realpath(path7, cb); } }); } function _resolvePathSync(name, tmpDir) { - const pathToResolve = path6.isAbsolute(name) ? name : path6.join(tmpDir, name); + const pathToResolve = path7.isAbsolute(name) ? name : path7.join(tmpDir, name); try { - fs7.statSync(pathToResolve); - return fs7.realpathSync(pathToResolve); + fs8.statSync(pathToResolve); + return fs8.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs7.realpathSync(path6.dirname(pathToResolve)); - return path6.join(parentDir, path6.basename(pathToResolve)); + const parentDir = fs8.realpathSync(path7.dirname(pathToResolve)); + return path7.join(parentDir, path7.basename(pathToResolve)); } } function _generateTmpName(opts) { const tmpDir = opts.tmpdir; if (!_isUndefined(opts.name)) { - return path6.join(tmpDir, opts.dir, opts.name); + return path7.join(tmpDir, opts.dir, opts.name); } if (!_isUndefined(opts.template)) { - return path6.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + return path7.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); } const name = [ opts.prefix ? opts.prefix : "tmp", @@ -111156,13 +111156,13 @@ var require_tmp = __commonJS({ _randomChars(12), opts.postfix ? "-" + opts.postfix : "" ].join(""); - return path6.join(tmpDir, opts.dir, name); + return path7.join(tmpDir, opts.dir, name); } function _assertOptionsBase(options) { if (!_isUndefined(options.name)) { const name = options.name; - if (path6.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); - const basename = path6.basename(name); + if (path7.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); + const basename = path7.basename(name); if (basename === ".." || basename === "." || basename !== name) throw new Error(`name option must not contain a path, found "${name}".`); } @@ -111184,7 +111184,7 @@ var require_tmp = __commonJS({ if (_isUndefined(name)) return cb(null); _resolvePath(name, tmpDir, function(err, resolvedPath) { if (err) return cb(err); - const relativePath = path6.relative(tmpDir, resolvedPath); + const relativePath = path7.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { return cb(new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`)); } @@ -111194,7 +111194,7 @@ var require_tmp = __commonJS({ function _getRelativePathSync(option, name, tmpDir) { if (_isUndefined(name)) return; const resolvedPath = _resolvePathSync(name, tmpDir); - const relativePath = path6.relative(tmpDir, resolvedPath); + const relativePath = path7.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { throw new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`); } @@ -111241,10 +111241,10 @@ var require_tmp = __commonJS({ _gracefulCleanup = true; } function _getTmpDir(options, cb) { - return fs7.realpath(options && options.tmpdir || os.tmpdir(), cb); + return fs8.realpath(options && options.tmpdir || os.tmpdir(), cb); } function _getTmpDirSync(options) { - return fs7.realpathSync(options && options.tmpdir || os.tmpdir()); + return fs8.realpathSync(options && options.tmpdir || os.tmpdir()); } process.addListener(EXIT, _garbageCollector); Object.defineProperty(module2.exports, "tmpdir", { @@ -111274,14 +111274,14 @@ var require_tmp_promise = __commonJS({ var fileWithOptions = promisify( (options, cb) => tmp.file( options, - (err, path6, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path6, fd, cleanup: promisify(cleanup) }) + (err, path7, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path7, fd, cleanup: promisify(cleanup) }) ) ); module2.exports.file = async (options) => fileWithOptions(options); module2.exports.withFile = async function withFile(fn, options) { - const { path: path6, fd, cleanup } = await module2.exports.file(options); + const { path: path7, fd, cleanup } = await module2.exports.file(options); try { - return await fn({ path: path6, fd }); + return await fn({ path: path7, fd }); } finally { await cleanup(); } @@ -111290,14 +111290,14 @@ var require_tmp_promise = __commonJS({ var dirWithOptions = promisify( (options, cb) => tmp.dir( options, - (err, path6, cleanup) => err ? cb(err) : cb(void 0, { path: path6, cleanup: promisify(cleanup) }) + (err, path7, cleanup) => err ? cb(err) : cb(void 0, { path: path7, cleanup: promisify(cleanup) }) ) ); module2.exports.dir = async (options) => dirWithOptions(options); module2.exports.withDir = async function withDir(fn, options) { - const { path: path6, cleanup } = await module2.exports.dir(options); + const { path: path7, cleanup } = await module2.exports.dir(options); try { - return await fn({ path: path6 }); + return await fn({ path: path7 }); } finally { await cleanup(); } @@ -112098,10 +112098,10 @@ var require_upload_gzip = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createGZipFileInBuffer = exports2.createGZipFileOnDisk = void 0; - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var zlib = __importStar4(require("zlib")); var util_1 = require("util"); - var stat = (0, util_1.promisify)(fs7.stat); + var stat = (0, util_1.promisify)(fs8.stat); var gzipExemptFileExtensions = [ ".gz", ".gzip", @@ -112134,9 +112134,9 @@ var require_upload_gzip = __commonJS({ } } return new Promise((resolve5, reject) => { - const inputStream = fs7.createReadStream(originalFilePath); + const inputStream = fs8.createReadStream(originalFilePath); const gzip = zlib.createGzip(); - const outputStream = fs7.createWriteStream(tempFilePath); + const outputStream = fs8.createWriteStream(tempFilePath); inputStream.pipe(gzip).pipe(outputStream); outputStream.on("finish", () => __awaiter4(this, void 0, void 0, function* () { const size = (yield stat(tempFilePath)).size; @@ -112154,7 +112154,7 @@ var require_upload_gzip = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { return new Promise((resolve5) => __awaiter4(this, void 0, void 0, function* () { var _a, e_1, _b, _c; - const inputStream = fs7.createReadStream(originalFilePath); + const inputStream = fs8.createReadStream(originalFilePath); const gzip = zlib.createGzip(); inputStream.pipe(gzip); const chunks = []; @@ -112363,7 +112363,7 @@ var require_upload_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var core14 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream = __importStar4(require("stream")); @@ -112377,7 +112377,7 @@ var require_upload_http_client = __commonJS({ var http_manager_1 = require_http_manager(); var upload_gzip_1 = require_upload_gzip(); var requestUtils_1 = require_requestUtils2(); - var stat = (0, util_1.promisify)(fs7.stat); + var stat = (0, util_1.promisify)(fs8.stat); var UploadHttpClient = class { constructor() { this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), "@actions/artifact-upload"); @@ -112514,7 +112514,7 @@ var require_upload_http_client = __commonJS({ let openUploadStream; if (totalFileSize < buffer.byteLength) { core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs7.createReadStream(parameters.file); + openUploadStream = () => fs8.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { @@ -112560,7 +112560,7 @@ var require_upload_http_client = __commonJS({ failedChunkSizes += chunkSize; continue; } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs7.createReadStream(uploadFilePath, { + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs8.createReadStream(uploadFilePath, { start: startChunkIndex, end: endChunkIndex, autoClose: false @@ -112755,7 +112755,7 @@ var require_download_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var core14 = __importStar4(require_core()); var zlib = __importStar4(require("zlib")); var utils_1 = require_utils10(); @@ -112846,7 +112846,7 @@ var require_download_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let retryCount = 0; const retryLimit = (0, config_variables_1.getRetryLimit)(); - let destinationStream = fs7.createWriteStream(downloadPath); + let destinationStream = fs8.createWriteStream(downloadPath); const headers = (0, utils_1.getDownloadHeaders)("application/json", true, true); const makeDownloadRequest = () => __awaiter4(this, void 0, void 0, function* () { const client = this.downloadHttpManager.getClient(httpClientIndex); @@ -112888,7 +112888,7 @@ var require_download_http_client = __commonJS({ } }); yield (0, utils_1.rmFile)(fileDownloadPath); - destinationStream = fs7.createWriteStream(fileDownloadPath); + destinationStream = fs8.createWriteStream(fileDownloadPath); }); while (retryCount <= retryLimit) { let response; @@ -113005,21 +113005,21 @@ var require_download_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadSpecification = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { const directories = /* @__PURE__ */ new Set(); const specifications = { - rootDownloadLocation: includeRootDirectory ? path6.join(downloadPath, artifactName) : downloadPath, + rootDownloadLocation: includeRootDirectory ? path7.join(downloadPath, artifactName) : downloadPath, directoryStructure: [], emptyFilesToCreate: [], filesToDownload: [] }; for (const entry of artifactEntries) { if (entry.path.startsWith(`${artifactName}/`) || entry.path.startsWith(`${artifactName}\\`)) { - const normalizedPathEntry = path6.normalize(entry.path); - const filePath = path6.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); + const normalizedPathEntry = path7.normalize(entry.path); + const filePath = path7.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); if (entry.itemType === "file") { - directories.add(path6.dirname(filePath)); + directories.add(path7.dirname(filePath)); if (entry.fileLength === 0) { specifications.emptyFilesToCreate.push(filePath); } else { @@ -113161,7 +113161,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz return uploadResponse; }); } - downloadArtifact(name, path6, options) { + downloadArtifact(name, path7, options) { return __awaiter4(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const artifacts = yield downloadHttpClient.listArtifacts(); @@ -113175,12 +113175,12 @@ Note: The size of downloaded zips can differ significantly from the reported siz throw new Error(`Unable to find an artifact with the name: ${name}`); } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path6) { - path6 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path7) { + path7 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path6 = (0, path_1.normalize)(path6); - path6 = (0, path_1.resolve)(path6); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path6, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + path7 = (0, path_1.normalize)(path7); + path7 = (0, path_1.resolve)(path7); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path7, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core14.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { @@ -113195,7 +113195,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz }; }); } - downloadAllArtifacts(path6) { + downloadAllArtifacts(path7) { return __awaiter4(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const response = []; @@ -113204,18 +113204,18 @@ Note: The size of downloaded zips can differ significantly from the reported siz core14.info("Unable to find any artifacts for the associated workflow"); return response; } - if (!path6) { - path6 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path7) { + path7 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path6 = (0, path_1.normalize)(path6); - path6 = (0, path_1.resolve)(path6); + path7 = (0, path_1.normalize)(path7); + path7 = (0, path_1.resolve)(path7); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; core14.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path6, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path7, true); if (downloadSpecification.filesToDownload.length === 0) { core14.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { @@ -113356,7 +113356,7 @@ var require_internal_path_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -113364,7 +113364,7 @@ var require_internal_path_helper2 = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path6.dirname(p); + let result = path7.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -113402,7 +113402,7 @@ var require_internal_path_helper2 = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path6.sep; + root += path7.sep; } return root + itemPath; } @@ -113440,10 +113440,10 @@ var require_internal_path_helper2 = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path6.sep)) { + if (!p.endsWith(path7.sep)) { return p; } - if (p === path6.sep) { + if (p === path7.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -113594,7 +113594,7 @@ var require_internal_path2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -113609,12 +113609,12 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path6.sep); + this.segments = itemPath.split(path7.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path6.basename(remaining); + const basename = path7.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -113632,7 +113632,7 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -113643,12 +113643,12 @@ var require_internal_path2 = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path6.sep; + result += path7.sep; } result += this.segments[i]; } @@ -113696,7 +113696,7 @@ var require_internal_pattern2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -113725,7 +113725,7 @@ var require_internal_pattern2 = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -113749,8 +113749,8 @@ var require_internal_pattern2 = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path6.sep}`; + if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path7.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -113785,9 +113785,9 @@ var require_internal_pattern2 = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { homedir = homedir || os.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -113871,8 +113871,8 @@ var require_internal_search_state2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path6, level) { - this.path = path6; + constructor(path7, level) { + this.path = path7; this.level = level; } }; @@ -113996,9 +113996,9 @@ var require_internal_globber2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper2()); var internal_match_kind_1 = require_internal_match_kind2(); var internal_pattern_1 = require_internal_pattern2(); @@ -114050,7 +114050,7 @@ var require_internal_globber2 = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs7.promises.lstat(searchPath)); + yield __await4(fs8.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -114074,7 +114074,7 @@ var require_internal_globber2 = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path6.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path7.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -114084,7 +114084,7 @@ var require_internal_globber2 = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -114119,7 +114119,7 @@ var require_internal_globber2 = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs7.promises.stat(item.path); + stats = yield fs8.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -114131,10 +114131,10 @@ var require_internal_globber2 = __commonJS({ throw err; } } else { - stats = yield fs7.promises.lstat(item.path); + stats = yield fs8.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs7.promises.realpath(item.path); + const realPath = yield fs8.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -114233,10 +114233,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); var core14 = __importStar4(require_core()); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); - var path6 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); function hashFiles2(globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; @@ -114252,17 +114252,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path6.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path7.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs7.statSync(file).isDirectory()) { + if (fs8.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto.createHash("sha256"); const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs7.createReadStream(file), hash); + yield pipeline(fs8.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) { @@ -114348,7 +114348,7 @@ var require_glob3 = __commonJS({ }); // src/analyze-action-post.ts -var fs6 = __toESM(require("fs")); +var fs7 = __toESM(require("fs")); var core13 = __toESM(require_core()); // src/actions-util.ts @@ -114372,21 +114372,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs7 = options.fs || await import("node:fs/promises"); + const fs8 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs8.lstat(itemPath, { bigint: true }) : await fs8.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs8.readdir(itemPath) : await fs8.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -117282,8 +117282,8 @@ async function getGitHubVersion() { } // src/codeql.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path5 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -117525,8 +117525,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs3 = __toESM(require("fs")); -var path3 = __toESM(require("path")); +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -117548,6 +117548,10 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); +// src/diff-informed-analysis-utils.ts +var fs3 = __toESM(require("fs")); +var path3 = __toESM(require("path")); + // src/feature-flags.ts var semver4 = __toESM(require_semver2()); @@ -117645,8 +117649,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path6 = decodeGitFilePath(match[2]); - fileOidMap[path6] = oid; + const path7 = decodeGitFilePath(match[2]); + fileOidMap[path7] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -117765,8 +117769,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -117793,6 +117827,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path2.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path2.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -117980,6 +118031,24 @@ var featureConfig = { } }; +// src/diff-informed-analysis-utils.ts +function getDiffRangesJsonFilePath() { + return path3.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs3.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs3.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} + // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -118009,14 +118078,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path3.join(tempDir, "config"); + return path4.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs3.existsSync(configFile)) { + if (!fs4.existsSync(configFile)) { return void 0; } - const configString = fs3.readFileSync(configFile, "utf8"); + const configString = fs4.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -118125,12 +118194,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path4.join( + const tracingConfigPath = path5.join( extractorPath, "tools", "tracing-config.lua" ); - return fs4.existsSync(tracingConfigPath); + return fs5.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -118201,7 +118270,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path4.join( + const autobuildCmd = path5.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -118591,7 +118660,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs4.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs5.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -118614,7 +118683,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path4.resolve(config.tempDir, "user-config.yaml"); + return path5.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -118635,8 +118704,8 @@ async function getJobRunUuidSarifOptions(codeql) { } // src/debug-artifacts.ts -var fs5 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var fs6 = __toESM(require("fs")); +var path6 = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); var core12 = __toESM(require_core()); @@ -118667,14 +118736,14 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion logger.info( "Uploading available combined SARIF files as Actions debugging artifact..." ); - const baseTempDir = path5.resolve(tempDir, "combined-sarif"); + const baseTempDir = path6.resolve(tempDir, "combined-sarif"); const toUpload = []; - if (fs5.existsSync(baseTempDir)) { - const outputDirs = fs5.readdirSync(baseTempDir); + if (fs6.existsSync(baseTempDir)) { + const outputDirs = fs6.readdirSync(baseTempDir); for (const outputDir of outputDirs) { - const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => path5.extname(f) === ".sarif"); + const sarifFiles = fs6.readdirSync(path6.resolve(baseTempDir, outputDir)).filter((f) => path6.extname(f) === ".sarif"); for (const sarifFile of sarifFiles) { - toUpload.push(path5.resolve(baseTempDir, outputDir, sarifFile)); + toUpload.push(path6.resolve(baseTempDir, outputDir, sarifFile)); } } } @@ -118726,8 +118795,8 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV try { await artifactUploader.uploadArtifact( sanitizeArtifactName(`${artifactName}${suffix}`), - toUpload.map((file) => path5.normalize(file)), - path5.normalize(rootDir), + toUpload.map((file) => path6.normalize(file)), + path6.normalize(rootDir), { // ensure we don't keep the debug artifacts around for too long since they can be large. retentionDays: 7 @@ -118776,9 +118845,9 @@ async function runWrapper() { } } const javaTempDependencyDir = getJavaTempDependencyDir(); - if (fs6.existsSync(javaTempDependencyDir)) { + if (fs7.existsSync(javaTempDependencyDir)) { try { - fs6.rmSync(javaTempDependencyDir, { recursive: true }); + fs7.rmSync(javaTempDependencyDir, { recursive: true }); } catch (error2) { logger.info( `Failed to remove temporary Java dependencies directory: ${getErrorMessage(error2)}` diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 6981690e10..28b4706d9a 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative2 = []; + const relative3 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative2.push(pattern); + relative3.push(pattern); } } - return [absolute, relative2]; + return [absolute, relative3]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -80536,8 +80536,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); + function resolveUrl(relative3, base) { + return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -90922,8 +90922,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -90950,6 +90980,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path7.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path7.sep).join("/"); + forced.add(rel); + } + return forced; +} var CACHE_VERSION = 1; var CACHE_PREFIX = "codeql-overlay-base-database"; var MAX_CACHE_OPERATION_MS = 6e5; diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index adf440738a..f02a8cd480 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var os2 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs7.existsSync(filePath)) { + if (!fs8.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs7.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs8.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path7 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path8 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path7 && !path7.startsWith("/")) { - path7 = `/${path7}`; + if (path8 && !path8.startsWith("/")) { + path8 = `/${path8}`; } - url = new URL(origin + path7); + url = new URL(origin + path8); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path7) { - if (typeof path7 !== "string") { + module2.exports = function basename(path8) { + if (typeof path8 !== "string") { return ""; } - for (var i = path7.length - 1; i >= 0; --i) { - switch (path7.charCodeAt(i)) { + for (var i = path8.length - 1; i >= 0; --i) { + switch (path8.charCodeAt(i)) { case 47: // '/' case 92: - path7 = path7.slice(i + 1); - return path7 === ".." || path7 === "." ? "" : path7; + path8 = path8.slice(i + 1); + return path8 === ".." || path8 === "." ? "" : path8; } } - return path7 === ".." || path7 === "." ? "" : path7; + return path8 === ".." || path8 === "." ? "" : path8; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path7, + path: path8, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path7 !== "string") { + if (typeof path8 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path7[0] !== "/" && !(path7.startsWith("http://") || path7.startsWith("https://")) && method !== "CONNECT") { + } else if (path8[0] !== "/" && !(path8.startsWith("http://") || path8.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path7) !== null) { + } else if (invalidPathRegex.exec(path8) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path7, query) : path7; + this.path = query ? util.buildURL(path8, query) : path8; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path7 = search ? `${pathname}${search}` : pathname; + const path8 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path7; + this.opts.path = path8; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path7, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path8, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path7} HTTP/1.1\r + let header = `${method} ${path8} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path7, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path8, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path7; + headers[HTTP2_HEADER_PATH] = path8; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path7) { - if (typeof path7 !== "string") { - return path7; + function safeUrl(path8) { + if (typeof path8 !== "string") { + return path8; } - const pathSegments = path7.split("?"); + const pathSegments = path8.split("?"); if (pathSegments.length !== 2) { - return path7; + return path8; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path7, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path7); + function matchKey(mockDispatch2, { path: path8, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path8); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path7 }) => matchValue(safeUrl(path7), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path8 }) => matchValue(safeUrl(path8), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path7, method, body, headers, query } = opts; + const { path: path8, method, body, headers, query } = opts; return { - path: path7, + path: path8, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path7, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path8, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path7, + Path: path8, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path7) { - for (const char of path7) { + function validateCookiePath(path8) { + for (const char of path8) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path7 = opts.path; + let path8 = opts.path; if (!opts.path.startsWith("/")) { - path7 = `/${path7}`; + path8 = `/${path8}`; } - url = new URL(util.parseOrigin(url).origin + path7); + url = new URL(util.parseOrigin(url).origin + path8); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path7.sep); + return pth.replace(/[/\\]/g, path8.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs7 = __importStar4(require("fs")); - var path7 = __importStar4(require("path")); - _a = fs7.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs8 = __importStar4(require("fs")); + var path8 = __importStar4(require("path")); + _a = fs8.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs7.constants.O_RDONLY; + exports2.READONLY = fs8.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path7.extname(filePath).toUpperCase(); + const upperExt = path8.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path7.dirname(filePath); - const upperName = path7.basename(filePath).toUpperCase(); + const directory = path8.dirname(filePath); + const upperName = path8.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path7.join(directory, actualName); + filePath = path8.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path7.join(dest, path7.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path8.join(dest, path8.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path7.relative(source, newDest) === "") { + if (path8.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path7.join(dest, path7.basename(source)); + dest = path8.join(dest, path8.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path7.dirname(dest)); + yield mkdirP(path8.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path7.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path8.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path7.sep)) { + if (tool.includes(path8.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path7.delimiter)) { + for (const p of process.env.PATH.split(path8.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path7.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path8.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var io5 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path7.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path8.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io5.which(this.toolPath, true); return new Promise((resolve5, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path7.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path8.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path7 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); + const path8 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path8} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path7.dirname(p); + let result = path8.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path7.sep; + root += path8.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path7.sep)) { + if (!p.endsWith(path8.sep)) { return p; } - if (p === path7.sep) { + if (p === path8.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path7 = (function() { + var path8 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path7.sep; + minimatch.sep = path8.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path7.sep !== "/") { - pattern = pattern.split(path7.sep).join("/"); + if (!options.allowWindowsEscape && path8.sep !== "/") { + pattern = pattern.split(path8.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path7.sep !== "/") { - f = f.split(path7.sep).join("/"); + if (path8.sep !== "/") { + f = f.split(path8.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path7.sep); + this.segments = itemPath.split(path8.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path7.basename(remaining); + const basename = path8.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path8.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path8.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path7.sep; + result += path8.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os2 = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path8.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path7.sep}`; + if (!itemPath.endsWith(path8.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path8.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path8.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path8.sep}`)) { homedir = homedir || os2.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path7, level) { - this.path = path7; + constructor(path8, level) { + this.path = path8; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs7.promises.lstat(searchPath)); + yield __await4(fs8.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path8.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs7.promises.stat(item.path); + stats = yield fs8.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs7.promises.lstat(item.path); + stats = yield fs8.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs7.promises.realpath(item.path); + const realPath = yield fs8.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob = __importStar4(require_glob()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); - var path7 = __importStar4(require("path")); + var fs8 = __importStar4(require("fs")); + var path8 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path7.join(baseLocation, "actions", "temp"); + tempDirectory = path8.join(baseLocation, "actions", "temp"); } - const dest = path7.join(tempDirectory, crypto.randomUUID()); + const dest = path8.join(tempDirectory, crypto.randomUUID()); yield io5.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs7.statSync(filePath).size; + return fs8.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path7.relative(workspace, file).replace(new RegExp(`\\${path7.sep}`, "g"), "/"); + const relativeFile = path8.relative(workspace, file).replace(new RegExp(`\\${path8.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs7.unlink)(filePath); + return util.promisify(fs8.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs7.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs8.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path7 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path7.startsWith("/")) { - path7 = path7.substring(1); + let path8 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path8.startsWith("/")) { + path8 = path8.substring(1); } - if (isAbsoluteUrl(path7)) { - requestUrl = path7; + if (isAbsoluteUrl(path8)) { + requestUrl = path8; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path7); + requestUrl = appendPath(requestUrl, path8); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path7 = pathToAppend.substring(0, searchStart); + const path8 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path7; + newPath = newPath + path8; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs7 = require("fs"); + var fs8 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs7); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs8); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path7 = urlParsed.pathname; - path7 = path7 || "/"; - path7 = escape(path7); - urlParsed.pathname = path7; + let path8 = urlParsed.pathname; + path8 = path8 || "/"; + path8 = escape(path8); + urlParsed.pathname = path8; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path7 = urlParsed.pathname; - path7 = path7 ? path7.endsWith("/") ? `${path7}${name}` : `${path7}/${name}` : name; - urlParsed.pathname = path7; + let path8 = urlParsed.pathname; + path8 = path8 ? path8.endsWith("/") ? `${path8}${name}` : `${path8}/${name}` : name; + urlParsed.pathname = path8; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path7 = getURLPath(request.url) || "/"; + const path8 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path7}`; + canonicalizedResourceString += `/${this.factory.accountName}${path8}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path7 = getURLPath(request.url) || "/"; + const path8 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path7}`; + canonicalizedResourceString += `/${options.accountName}${path8}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path7 = getURLPath(subRequest.url); - if (!path7 || path7 === "") { + const path8 = getURLPath(subRequest.url); + if (!path8 || path8 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path7 = getURLPath(url2); - if (path7 && path7 !== "/") { + const path8 = getURLPath(url2); + if (path8 && path8 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs7.createWriteStream(archivePath); + const writeStream = fs8.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs7.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs8.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs7.openSync(archivePath, "w"); + const fd = fs8.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs7.writeFileSync(fd, result); + fs8.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs7.closeSync(fd); + fs8.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs7.openSync(archivePath, "r"); + const fd = fs8.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs7.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs8.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs7.closeSync(fd); + fs8.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io5 = __importStar4(require_io()); var fs_1 = require("fs"); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path8.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path7.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path8.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path8.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path8.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path8.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path8.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs7 = require("fs"); + var fs8 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs7.existsSync(lsbReleaseFile)) { - contents = fs7.readFileSync(lsbReleaseFile).toString(); - } else if (fs7.existsSync(osReleaseFile)) { - contents = fs7.readFileSync(osReleaseFile).toString(); + if (fs8.existsSync(lsbReleaseFile)) { + contents = fs8.readFileSync(lsbReleaseFile).toString(); + } else if (fs8.existsSync(osReleaseFile)) { + contents = fs8.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs7 = __importStar4(require("fs")); + var fs8 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os2 = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path8 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path7.join(_getTempDirectory(), crypto.randomUUID()); - yield io5.mkdirP(path7.dirname(dest)); + dest = dest || path8.join(_getTempDirectory(), crypto.randomUUID()); + yield io5.mkdirP(path8.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs7.existsSync(dest)) { + if (fs8.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs7.createWriteStream(dest)); + yield pipeline(readStream, fs8.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path7.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path8.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs7.statSync(sourceDir).isDirectory()) { + if (!fs8.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs7.readdirSync(sourceDir)) { - const s = path7.join(sourceDir, itemName); + for (const itemName of fs8.readdirSync(sourceDir)) { + const s = path8.join(sourceDir, itemName); yield io5.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source file: ${sourceFile}`); - if (!fs7.statSync(sourceFile).isFile()) { + if (!fs8.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path7.join(destFolder, targetFile); + const destPath = path8.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path8.join(_getCacheDirectory(), toolName, versionSpec, arch); core14.debug(`checking cache: ${cachePath}`); - if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { + if (fs8.existsSync(cachePath) && fs8.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os2.arch(); - const toolPath = path7.join(_getCacheDirectory(), toolName); - if (fs7.existsSync(toolPath)) { - const children = fs7.readdirSync(toolPath); + const toolPath = path8.join(_getCacheDirectory(), toolName); + if (fs8.existsSync(toolPath)) { + const children = fs8.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path7.join(toolPath, child, arch || ""); - if (fs7.existsSync(fullPath) && fs7.existsSync(`${fullPath}.complete`)) { + const fullPath = path8.join(toolPath, child, arch || ""); + if (fs8.existsSync(fullPath) && fs8.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path7.join(_getTempDirectory(), crypto.randomUUID()); + dest = path8.join(_getTempDirectory(), crypto.randomUUID()); } yield io5.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path8.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path8.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs7.writeFileSync(markerPath, ""); + fs8.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative, base) { - return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -74933,21 +74933,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs7 = options.fs || await import("node:fs/promises"); + const fs8 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs8.lstat(itemPath, { bigint: true }) : await fs8.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs8.readdir(itemPath) : await fs8.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -78024,8 +78024,8 @@ async function getAnalysisKey() { var core11 = __toESM(require_core()); // src/codeql.ts -var fs6 = __toESM(require("fs")); -var path6 = __toESM(require("path")); +var fs7 = __toESM(require("fs")); +var path7 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -78267,8 +78267,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path5 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -78290,6 +78290,10 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); +// src/diff-informed-analysis-utils.ts +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); + // src/feature-flags.ts var fs3 = __toESM(require("fs")); var path3 = __toESM(require("path")); @@ -78393,8 +78397,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path7 = decodeGitFilePath(match[2]); - fileOidMap[path7] = oid; + const path8 = decodeGitFilePath(match[2]); + fileOidMap[path8] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -78505,8 +78509,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -78533,6 +78567,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path2.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path2.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -78982,6 +79033,24 @@ var GitHubFeatureFlags = class { } }; +// src/diff-informed-analysis-utils.ts +function getDiffRangesJsonFilePath() { + return path4.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs4.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs4.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} + // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -79011,14 +79080,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path4.join(tempDir, "config"); + return path5.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs4.existsSync(configFile)) { + if (!fs5.existsSync(configFile)) { return void 0; } - const configString = fs4.readFileSync(configFile, "utf8"); + const configString = fs5.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -79073,8 +79142,8 @@ var semver6 = __toESM(require_semver2()); var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; // src/tracer-config.ts -var fs5 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var fs6 = __toESM(require("fs")); +var path6 = __toESM(require("path")); async function shouldEnableIndirectTracing(codeql, config) { if (config.buildMode === "none" /* None */) { return false; @@ -79089,18 +79158,18 @@ async function endTracingForCluster(codeql, config, logger) { logger.info( "Unsetting build tracing environment variables. Subsequent steps of this job will not be traced." ); - const envVariablesFile = path5.resolve( + const envVariablesFile = path6.resolve( config.dbLocation, "temp/tracingEnvironment/end-tracing.json" ); - if (!fs5.existsSync(envVariablesFile)) { + if (!fs6.existsSync(envVariablesFile)) { throw new Error( `Environment file for ending tracing not found: ${envVariablesFile}` ); } try { const endTracingEnvVariables = JSON.parse( - fs5.readFileSync(envVariablesFile, "utf8") + fs6.readFileSync(envVariablesFile, "utf8") ); for (const [key, value] of Object.entries(endTracingEnvVariables)) { if (value !== null) { @@ -79160,12 +79229,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path6.join( + const tracingConfigPath = path7.join( extractorPath, "tools", "tracing-config.lua" ); - return fs6.existsSync(tracingConfigPath); + return fs7.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -79236,7 +79305,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path6.join( + const autobuildCmd = path7.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -79626,7 +79695,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs6.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs7.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -79649,7 +79718,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path6.resolve(config.tempDir, "user-config.yaml"); + return path7.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 08c8449012..7fe5ff187e 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative2 = []; + const relative3 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative2.push(pattern); + relative3.push(pattern); } } - return [absolute, relative2]; + return [absolute, relative3]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -80536,8 +80536,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); + function resolveUrl(relative3, base) { + return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -84278,8 +84278,8 @@ var require_readdir_glob = __commonJS({ useStat = true; } const filename = dir + "/" + name; - const relative2 = filename.slice(1); - const absolute = path19 + "/" + relative2; + const relative3 = filename.slice(1); + const absolute = path19 + "/" + relative3; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -84291,12 +84291,12 @@ var require_readdir_glob = __commonJS({ stats = { isDirectory: () => false }; } if (stats.isDirectory()) { - if (!shouldSkip(relative2)) { - yield { relative: relative2, absolute, stats }; + if (!shouldSkip(relative3)) { + yield { relative: relative3, absolute, stats }; yield* exploreWalkAsync(filename, path19, followSymlinks, useStat, shouldSkip, false); } } else { - yield { relative: relative2, absolute, stats }; + yield { relative: relative3, absolute, stats }; } } } @@ -84366,11 +84366,11 @@ var require_readdir_glob = __commonJS({ } setTimeout(() => this._next(), 0); } - _shouldSkipDirectory(relative2) { - return this.skipMatchers.some((m) => m.match(relative2)); + _shouldSkipDirectory(relative3) { + return this.skipMatchers.some((m) => m.match(relative3)); } - _fileMatches(relative2, isDirectory2) { - const file = relative2 + (isDirectory2 ? "/" : ""); + _fileMatches(relative3, isDirectory2) { + const file = relative3 + (isDirectory2 ? "/" : ""); return (this.matchers.length === 0 || this.matchers.some((m) => m.match(file))) && !this.ignoreMatchers.some((m) => m.match(file)) && (!this.options.nodir || !isDirectory2); } _next() { @@ -84379,16 +84379,16 @@ var require_readdir_glob = __commonJS({ if (!obj.done) { const isDirectory2 = obj.value.stats.isDirectory(); if (this._fileMatches(obj.value.relative, isDirectory2)) { - let relative2 = obj.value.relative; + let relative3 = obj.value.relative; let absolute = obj.value.absolute; if (this.options.mark && isDirectory2) { - relative2 += "/"; + relative3 += "/"; absolute += "/"; } if (this.options.stat) { - this.emit("match", { relative: relative2, absolute, stat: obj.value.stats }); + this.emit("match", { relative: relative3, absolute, stat: obj.value.stats }); } else { - this.emit("match", { relative: relative2, absolute }); + this.emit("match", { relative: relative3, absolute }); } } this._next(this.iterator); @@ -89873,8 +89873,8 @@ var require_primordials = __commonJS({ ArrayPrototypeIndexOf(self2, el) { return self2.indexOf(el); }, - ArrayPrototypeJoin(self2, sep5) { - return self2.join(sep5); + ArrayPrototypeJoin(self2, sep6) { + return self2.join(sep6); }, ArrayPrototypeMap(self2, fn) { return self2.map(fn); @@ -101761,7 +101761,7 @@ var require_commonjs16 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) { + constructor(cwd = process.cwd(), pathImpl, sep6, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) { this.#fs = fsFromOption(fs20); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); @@ -101772,7 +101772,7 @@ var require_commonjs16 = __commonJS({ this.#resolveCache = new ResolveCache(); this.#resolvePosixCache = new ResolveCache(); this.#children = new ChildrenCache(childrenCacheSize); - const split = cwdPath.substring(this.rootPath.length).split(sep5); + const split = cwdPath.substring(this.rootPath.length).split(sep6); if (split.length === 1 && !split[0]) { split.pop(); } @@ -102615,10 +102615,10 @@ var require_ignore2 = __commonJS({ ignored(p) { const fullpath = p.fullpath(); const fullpaths = `${fullpath}/`; - const relative2 = p.relative() || "."; - const relatives = `${relative2}/`; + const relative3 = p.relative() || "."; + const relatives = `${relative3}/`; for (const m of this.relative) { - if (m.match(relative2) || m.match(relatives)) + if (m.match(relative3) || m.match(relatives)) return true; } for (const m of this.absolute) { @@ -102629,9 +102629,9 @@ var require_ignore2 = __commonJS({ } childrenIgnored(p) { const fullpath = p.fullpath() + "/"; - const relative2 = (p.relative() || ".") + "/"; + const relative3 = (p.relative() || ".") + "/"; for (const m of this.relativeChildren) { - if (m.match(relative2)) + if (m.match(relative3)) return true; } for (const m of this.absoluteChildren) { @@ -129189,8 +129189,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -129217,6 +129247,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path7.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path7.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver3 = __toESM(require_semver2()); diff --git a/lib/init-action.js b/lib/init-action.js index 445977d382..2448b3ff60 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -29628,15 +29628,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative2 = []; + const relative3 = []; for (const pattern of patterns) { if (isAbsolute3(pattern)) { absolute.push(pattern); } else { - relative2.push(pattern); + relative3.push(pattern); } } - return [absolute, relative2]; + return [absolute, relative3]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute3(pattern) { @@ -81633,8 +81633,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); + function resolveUrl(relative3, base) { + return useNativeURL ? new URL2(relative3, base) : parseUrl(url.resolve(base, relative3)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -86812,8 +86812,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path8.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path8.join( @@ -86840,6 +86870,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path8.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path8.sep).join("/"); + forced.add(rel); + } + return forced; +} var CACHE_VERSION = 1; var CACHE_PREFIX = "codeql-overlay-base-database"; var MAX_CACHE_OPERATION_MS = 6e5; @@ -87455,6 +87502,19 @@ function writeDiffRangesJsonFile(logger, ranges) { ${jsonContents}` ); } +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs8.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs8.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} async function getPullRequestEditedDiffRanges(branches, logger) { const fileDiffs = await getFileDiffsWithBasehead(branches, logger); if (fileDiffs === void 0) { diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index 4d1cdf81af..26f781f2a1 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs5 = __importStar4(require("fs")); + var fs6 = __importStar4(require("fs")); var os2 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs5.existsSync(filePath)) { + if (!fs6.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs5.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs6.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path5 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path6 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path5 && !path5.startsWith("/")) { - path5 = `/${path5}`; + if (path6 && !path6.startsWith("/")) { + path6 = `/${path6}`; } - url = new URL(origin + path5); + url = new URL(origin + path6); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path5) { - if (typeof path5 !== "string") { + module2.exports = function basename(path6) { + if (typeof path6 !== "string") { return ""; } - for (var i = path5.length - 1; i >= 0; --i) { - switch (path5.charCodeAt(i)) { + for (var i = path6.length - 1; i >= 0; --i) { + switch (path6.charCodeAt(i)) { case 47: // '/' case 92: - path5 = path5.slice(i + 1); - return path5 === ".." || path5 === "." ? "" : path5; + path6 = path6.slice(i + 1); + return path6 === ".." || path6 === "." ? "" : path6; } } - return path5 === ".." || path5 === "." ? "" : path5; + return path6 === ".." || path6 === "." ? "" : path6; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path5, + path: path6, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path5 !== "string") { + if (typeof path6 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path5[0] !== "/" && !(path5.startsWith("http://") || path5.startsWith("https://")) && method !== "CONNECT") { + } else if (path6[0] !== "/" && !(path6.startsWith("http://") || path6.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path5) !== null) { + } else if (invalidPathRegex.exec(path6) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path5, query) : path5; + this.path = query ? util.buildURL(path6, query) : path6; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path5 = search ? `${pathname}${search}` : pathname; + const path6 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path5; + this.opts.path = path6; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path5, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path6, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path5} HTTP/1.1\r + let header = `${method} ${path6} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path5, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path6, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path5; + headers[HTTP2_HEADER_PATH] = path6; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path5) { - if (typeof path5 !== "string") { - return path5; + function safeUrl(path6) { + if (typeof path6 !== "string") { + return path6; } - const pathSegments = path5.split("?"); + const pathSegments = path6.split("?"); if (pathSegments.length !== 2) { - return path5; + return path6; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path5, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path5); + function matchKey(mockDispatch2, { path: path6, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path6); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path5 }) => matchValue(safeUrl(path5), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path6 }) => matchValue(safeUrl(path6), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path5, method, body, headers, query } = opts; + const { path: path6, method, body, headers, query } = opts; return { - path: path5, + path: path6, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path5, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path6, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path5, + Path: path6, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path5) { - for (const char of path5) { + function validateCookiePath(path6) { + for (const char of path6) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path5 = opts.path; + let path6 = opts.path; if (!opts.path.startsWith("/")) { - path5 = `/${path5}`; + path6 = `/${path6}`; } - url = new URL(util.parseOrigin(url).origin + path5); + url = new URL(util.parseOrigin(url).origin + path6); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path5.sep); + return pth.replace(/[/\\]/g, path6.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs5 = __importStar4(require("fs")); - var path5 = __importStar4(require("path")); - _a = fs5.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs6 = __importStar4(require("fs")); + var path6 = __importStar4(require("path")); + _a = fs6.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs5.constants.O_RDONLY; + exports2.READONLY = fs6.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path5.extname(filePath).toUpperCase(); + const upperExt = path6.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path5.dirname(filePath); - const upperName = path5.basename(filePath).toUpperCase(); + const directory = path6.dirname(filePath); + const upperName = path6.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path5.join(directory, actualName); + filePath = path6.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path5.join(dest, path5.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path6.join(dest, path6.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path5.relative(source, newDest) === "") { + if (path6.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path5.join(dest, path5.basename(source)); + dest = path6.join(dest, path6.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path5.dirname(dest)); + yield mkdirP(path6.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path5.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path6.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path5.sep)) { + if (tool.includes(path6.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path5.delimiter)) { + for (const p of process.env.PATH.split(path6.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path5.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path6.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var io5 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path5.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path6.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io5.which(this.toolPath, true); return new Promise((resolve4, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar4(require("os")); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path5.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path6.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path5 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path5} does not exist${os_1.EOL}`); + const path6 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path5.dirname(p); + let result = path6.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path5.sep; + root += path6.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path5.sep)) { + if (!p.endsWith(path6.sep)) { return p; } - if (p === path5.sep) { + if (p === path6.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path5 = (function() { + var path6 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path5.sep; + minimatch.sep = path6.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path5.sep !== "/") { - pattern = pattern.split(path5.sep).join("/"); + if (!options.allowWindowsEscape && path6.sep !== "/") { + pattern = pattern.split(path6.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path5.sep !== "/") { - f = f.split(path5.sep).join("/"); + if (path6.sep !== "/") { + f = f.split(path6.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path5.sep); + this.segments = itemPath.split(path6.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path5.basename(remaining); + const basename = path6.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path5.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path5.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path5.sep; + result += path6.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os2 = __importStar4(require("os")); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path5.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path5.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path5.sep}`; + if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path6.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path5.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path5.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { homedir = homedir || os2.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path5, level) { - this.path = path5; + constructor(path6, level) { + this.path = path6; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core13 = __importStar4(require_core()); - var fs5 = __importStar4(require("fs")); + var fs6 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core13.debug(`Search path '${searchPath}'`); try { - yield __await4(fs5.promises.lstat(searchPath)); + yield __await4(fs6.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs5.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path5.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs6.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs5.promises.stat(item.path); + stats = yield fs6.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs5.promises.lstat(item.path); + stats = yield fs6.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs5.promises.realpath(item.path); + const realPath = yield fs6.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob = __importStar4(require_glob()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs5 = __importStar4(require("fs")); - var path5 = __importStar4(require("path")); + var fs6 = __importStar4(require("fs")); + var path6 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path5.join(baseLocation, "actions", "temp"); + tempDirectory = path6.join(baseLocation, "actions", "temp"); } - const dest = path5.join(tempDirectory, crypto.randomUUID()); + const dest = path6.join(tempDirectory, crypto.randomUUID()); yield io5.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs5.statSync(filePath).size; + return fs6.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path5.relative(workspace, file).replace(new RegExp(`\\${path5.sep}`, "g"), "/"); + const relativeFile = path6.relative(workspace, file).replace(new RegExp(`\\${path6.sep}`, "g"), "/"); core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs5.unlink)(filePath); + return util.promisify(fs6.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs5.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs6.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path5 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path5.startsWith("/")) { - path5 = path5.substring(1); + let path6 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path6.startsWith("/")) { + path6 = path6.substring(1); } - if (isAbsoluteUrl(path5)) { - requestUrl = path5; + if (isAbsoluteUrl(path6)) { + requestUrl = path6; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path5); + requestUrl = appendPath(requestUrl, path6); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path5 = pathToAppend.substring(0, searchStart); + const path6 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path5; + newPath = newPath + path6; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs5 = require("fs"); + var fs6 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs5); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs6); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path5 = urlParsed.pathname; - path5 = path5 || "/"; - path5 = escape(path5); - urlParsed.pathname = path5; + let path6 = urlParsed.pathname; + path6 = path6 || "/"; + path6 = escape(path6); + urlParsed.pathname = path6; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path5 = urlParsed.pathname; - path5 = path5 ? path5.endsWith("/") ? `${path5}${name}` : `${path5}/${name}` : name; - urlParsed.pathname = path5; + let path6 = urlParsed.pathname; + path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; + urlParsed.pathname = path6; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path5 = getURLPath(request.url) || "/"; + const path6 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path5}`; + canonicalizedResourceString += `/${this.factory.accountName}${path6}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path5 = getURLPath(request.url) || "/"; + const path6 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path5}`; + canonicalizedResourceString += `/${options.accountName}${path6}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path5 = getURLPath(subRequest.url); - if (!path5 || path5 === "") { + const path6 = getURLPath(subRequest.url); + if (!path6 || path6 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path5 = getURLPath(url2); - if (path5 && path5 !== "/") { + const path6 = getURLPath(url2); + if (path6 && path6 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs5 = __importStar4(require("fs")); + var fs6 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs5.createWriteStream(archivePath); + const writeStream = fs6.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs5.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs6.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs5.openSync(archivePath, "w"); + const fd = fs6.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs5.writeFileSync(fd, result); + fs6.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs5.closeSync(fd); + fs6.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs5 = __importStar4(require("fs")); + var fs6 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs5.openSync(archivePath, "r"); + const fd = fs6.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs5.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs6.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs5.closeSync(fd); + fs6.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io5 = __importStar4(require_io()); var fs_1 = require("fs"); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path5.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path5.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path6.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core13 = __importStar4(require_core()); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core13.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core13.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core13.debug(`Archive path: ${archivePath}`); core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs5 = require("fs"); + var fs6 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs5.existsSync(lsbReleaseFile)) { - contents = fs5.readFileSync(lsbReleaseFile).toString(); - } else if (fs5.existsSync(osReleaseFile)) { - contents = fs5.readFileSync(osReleaseFile).toString(); + if (fs6.existsSync(lsbReleaseFile)) { + contents = fs6.readFileSync(lsbReleaseFile).toString(); + } else if (fs6.existsSync(osReleaseFile)) { + contents = fs6.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core13 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs5 = __importStar4(require("fs")); + var fs6 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os2 = __importStar4(require("os")); - var path5 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path5.join(_getTempDirectory(), crypto.randomUUID()); - yield io5.mkdirP(path5.dirname(dest)); + dest = dest || path6.join(_getTempDirectory(), crypto.randomUUID()); + yield io5.mkdirP(path6.dirname(dest)); core13.debug(`Downloading ${url}`); core13.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs5.existsSync(dest)) { + if (fs6.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs5.createWriteStream(dest)); + yield pipeline(readStream, fs6.createWriteStream(dest)); core13.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path5.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path6.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core13.debug(`Caching tool ${tool} ${version} ${arch}`); core13.debug(`source dir: ${sourceDir}`); - if (!fs5.statSync(sourceDir).isDirectory()) { + if (!fs6.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs5.readdirSync(sourceDir)) { - const s = path5.join(sourceDir, itemName); + for (const itemName of fs6.readdirSync(sourceDir)) { + const s = path6.join(sourceDir, itemName); yield io5.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core13.debug(`Caching tool ${tool} ${version} ${arch}`); core13.debug(`source file: ${sourceFile}`); - if (!fs5.statSync(sourceFile).isFile()) { + if (!fs6.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path5.join(destFolder, targetFile); + const destPath = path6.join(destFolder, targetFile); core13.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path5.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path6.join(_getCacheDirectory(), toolName, versionSpec, arch); core13.debug(`checking cache: ${cachePath}`); - if (fs5.existsSync(cachePath) && fs5.existsSync(`${cachePath}.complete`)) { + if (fs6.existsSync(cachePath) && fs6.existsSync(`${cachePath}.complete`)) { core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os2.arch(); - const toolPath = path5.join(_getCacheDirectory(), toolName); - if (fs5.existsSync(toolPath)) { - const children = fs5.readdirSync(toolPath); + const toolPath = path6.join(_getCacheDirectory(), toolName); + if (fs6.existsSync(toolPath)) { + const children = fs6.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path5.join(toolPath, child, arch || ""); - if (fs5.existsSync(fullPath) && fs5.existsSync(`${fullPath}.complete`)) { + const fullPath = path6.join(toolPath, child, arch || ""); + if (fs6.existsSync(fullPath) && fs6.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path5.join(_getTempDirectory(), crypto.randomUUID()); + dest = path6.join(_getTempDirectory(), crypto.randomUUID()); } yield io5.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs5.writeFileSync(markerPath, ""); + fs6.writeFileSync(markerPath, ""); core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative, base) { - return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -74933,21 +74933,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs5 = options.fs || await import("node:fs/promises"); + const fs6 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs5.lstat(itemPath, { bigint: true }) : await fs5.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs6.lstat(itemPath, { bigint: true }) : await fs6.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs5.readdir(itemPath) : await fs5.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs6.readdir(itemPath) : await fs6.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -78266,8 +78266,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs3 = __toESM(require("fs")); -var path3 = __toESM(require("path")); +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -78289,6 +78289,10 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); +// src/diff-informed-analysis-utils.ts +var fs3 = __toESM(require("fs")); +var path3 = __toESM(require("path")); + // src/feature-flags.ts var semver4 = __toESM(require_semver2()); @@ -78386,8 +78390,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path5 = decodeGitFilePath(match[2]); - fileOidMap[path5] = oid; + const path6 = decodeGitFilePath(match[2]); + fileOidMap[path6] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -78498,8 +78502,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -78526,6 +78560,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path2.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path2.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -78709,6 +78760,24 @@ var featureConfig = { } }; +// src/diff-informed-analysis-utils.ts +function getDiffRangesJsonFilePath() { + return path3.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs3.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs3.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} + // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -78738,14 +78807,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path3.join(tempDir, "config"); + return path4.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs3.existsSync(configFile)) { + if (!fs4.existsSync(configFile)) { return void 0; } - const configString = fs3.readFileSync(configFile, "utf8"); + const configString = fs4.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -78781,8 +78850,8 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { } // src/codeql.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path5 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -78860,12 +78929,12 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path4.join( + const tracingConfigPath = path5.join( extractorPath, "tools", "tracing-config.lua" ); - return fs4.existsSync(tracingConfigPath); + return fs5.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -78936,7 +79005,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path4.join( + const autobuildCmd = path5.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -79326,7 +79395,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs4.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs5.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -79349,7 +79418,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path4.resolve(config.tempDir, "user-config.yaml"); + return path5.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; diff --git a/lib/upload-lib.js b/lib/upload-lib.js index e7c0bb5ecd..e29c14fb7f 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -29017,15 +29017,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative2 = []; + const relative3 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative2.push(pattern); + relative3.push(pattern); } } - return [absolute, relative2]; + return [absolute, relative3]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -81833,8 +81833,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); + function resolveUrl(relative3, base) { + return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -89293,8 +89293,38 @@ async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -89321,6 +89351,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path7.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path7.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver3 = __toESM(require_semver2()); diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index ecdb515e3f..dab1fc0514 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative2 = []; + const relative3 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative2.push(pattern); + relative3.push(pattern); } } - return [absolute, relative2]; + return [absolute, relative3]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -81833,8 +81833,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); + function resolveUrl(relative3, base) { + return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -88949,8 +88949,8 @@ function wrapApiConfigurationError(e) { } // src/feature-flags.ts -var fs6 = __toESM(require("fs")); -var path8 = __toESM(require("path")); +var fs7 = __toESM(require("fs")); +var path9 = __toESM(require("path")); var semver3 = __toESM(require_semver2()); // src/defaults.json @@ -88958,8 +88958,8 @@ var bundleVersion = "codeql-bundle-v2.23.2"; var cliVersion = "2.23.2"; // src/overlay-database-utils.ts -var fs5 = __toESM(require("fs")); -var path7 = __toESM(require("path")); +var fs6 = __toESM(require("fs")); +var path8 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts @@ -89159,6 +89159,26 @@ async function isAnalyzingDefaultBranch() { return currentRef === defaultBranch; } +// src/diff-informed-analysis-utils.ts +var fs5 = __toESM(require("fs")); +var path7 = __toESM(require("path")); +function getDiffRangesJsonFilePath() { + return path7.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs5.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs5.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} + // src/logging.ts var core7 = __toESM(require_core()); function getActionsLogger() { @@ -89184,12 +89204,12 @@ async function writeBaseDatabaseOidsFile(config, sourceRoot) { const gitFileOids = await getFileOidsUnderPath(sourceRoot); const gitFileOidsJson = JSON.stringify(gitFileOids); const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); - await fs5.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); + await fs6.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); } async function readBaseDatabaseOidsFile(config, logger) { const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); try { - const contents = await fs5.promises.readFile( + const contents = await fs6.promises.readFile( baseDatabaseOidsFilePath, "utf-8" ); @@ -89202,24 +89222,54 @@ async function readBaseDatabaseOidsFile(config, logger) { } } function getBaseDatabaseOidsFilePath(config) { - return path7.join(config.dbLocation, "base-database-oids.json"); + return path8.join(config.dbLocation, "base-database-oids.json"); } async function writeOverlayChangesFile(config, sourceRoot, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let forcedAddedCount = 0; + try { + const forced = getForcedOverlayFilesFromDiff(logger); + if (forced.size > 0) { + const existing = new Set(changedFiles); + for (const f of forced) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path8.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + forcedAddedCount++; + } + } + } + if (forcedAddedCount > 0) { + logger.debug( + `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in natural overlay changes (or none applicable)." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path7.join( + const overlayChangesFile = path8.join( getTemporaryDirectory(), "overlay-changes.json" ); logger.debug( `Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}` ); - await fs5.promises.writeFile(overlayChangesFile, changedFilesJson); + await fs6.promises.writeFile(overlayChangesFile, changedFilesJson); return overlayChangesFile; } function computeChangedFiles(baseFileOids, overlayFileOids) { @@ -89236,6 +89286,23 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } +function getForcedOverlayFilesFromDiff(logger) { + const forced = /* @__PURE__ */ new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + const checkoutPath = getRequiredInput("checkout_path"); + for (const r of diffRanges) { + const absPath = r.path; + if (!absPath) continue; + let rel = path8.relative(checkoutPath, absPath); + if (!rel || rel.startsWith("..")) continue; + rel = rel.split(path8.sep).join("/"); + forced.add(rel); + } + return forced; +} // src/tools-features.ts var semver2 = __toESM(require_semver2()); @@ -89428,7 +89495,7 @@ var Features = class { this.gitHubFeatureFlags = new GitHubFeatureFlags( gitHubVersion, repositoryNwo, - path8.join(tempDir, FEATURE_FLAGS_FILE_NAME), + path9.join(tempDir, FEATURE_FLAGS_FILE_NAME), logger ); } @@ -89607,12 +89674,12 @@ var GitHubFeatureFlags = class { } async readLocalFlags() { try { - if (fs6.existsSync(this.featureFlagsFile)) { + if (fs7.existsSync(this.featureFlagsFile)) { this.logger.debug( `Loading feature flags from ${this.featureFlagsFile}` ); return JSON.parse( - fs6.readFileSync(this.featureFlagsFile, "utf8") + fs7.readFileSync(this.featureFlagsFile, "utf8") ); } } catch (e) { @@ -89625,7 +89692,7 @@ var GitHubFeatureFlags = class { async writeLocalFlags(flags) { try { this.logger.debug(`Writing feature flags to ${this.featureFlagsFile}`); - fs6.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); + fs7.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); } catch (e) { this.logger.warning( `Error writing cached feature flags file ${this.featureFlagsFile}: ${e}.` @@ -89706,26 +89773,6 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); -// src/diff-informed-analysis-utils.ts -var fs7 = __toESM(require("fs")); -var path9 = __toESM(require("path")); -function getDiffRangesJsonFilePath() { - return path9.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs7.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs7.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); diff --git a/src/overlay-database-utils.ts b/src/overlay-database-utils.ts index 1de76fef77..32d52dba27 100644 --- a/src/overlay-database-utils.ts +++ b/src/overlay-database-utils.ts @@ -9,6 +9,7 @@ import { getAutomationID } from "./api-client"; import { type CodeQL } from "./codeql"; import { type Config } from "./config-utils"; import { getCommitOid, getFileOidsUnderPath } from "./git-utils"; +import { readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; import { Logger, withGroupAsync } from "./logging"; import { isInTestMode, @@ -121,8 +122,43 @@ export async function writeOverlayChangesFile( const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + + // Augment changed files with any files that appear in the precomputed PR diff ranges. + // This ensures overlay analysis always includes every file with at least one edited range. + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + const diffChangedFiles = getFilesFromDiff(logger); + if (diffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of diffChangedFiles) { + if (!existing.has(f)) { + // Only include if file still exists (added/modified) — skip deleted files that might appear in diff. + if (overlayFileOids[f] !== undefined || fs.existsSync(path.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`, + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database.", + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${(e as any).message || e}`, + ); + } + logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`, + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`, ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); @@ -155,6 +191,22 @@ function computeChangedFiles( return changes; } +/** + * Derive the set of repository-relative file paths that have at least one edited range + * in the precomputed diff ranges JSON. Returns an empty set if no JSON exists. + */ +function getFilesFromDiff(logger: Logger): Set { + const forced = new Set(); + const diffRanges = readDiffRangesJsonFile(logger); + if (!diffRanges || diffRanges.length === 0) { + return forced; + } + for (const r of diffRanges) { + forced.add(r.path); + } + return forced; +} + // Constants for database caching const CACHE_VERSION = 1; const CACHE_PREFIX = "codeql-overlay-base-database"; From f030ca35d0c646f1e34bc73b2a60b1cd6e7f8fe3 Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Thu, 9 Oct 2025 21:00:58 +0100 Subject: [PATCH 5/6] Pass changed files in code ratehr than by file-system. --- lib/analyze-action-post.js | 1347 ++++++++++++++-------------- lib/analyze-action.js | 51 +- lib/autobuild-action.js | 479 +++++----- lib/init-action-post.js | 95 +- lib/init-action.js | 86 +- lib/resolve-environment-action.js | 469 +++++----- lib/upload-lib.js | 51 +- lib/upload-sarif-action.js | 119 ++- src/codeql.test.ts | 4 + src/codeql.ts | 3 + src/init-action.ts | 21 +- src/init.ts | 2 + src/overlay-database-utils.test.ts | 1 + src/overlay-database-utils.ts | 23 +- 14 files changed, 1270 insertions(+), 1481 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 6a58c47db5..4eaab310d2 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var os = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs8.existsSync(filePath)) { + if (!fs7.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs8.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { + fs7.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path7 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path6 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path7 && !path7.startsWith("/")) { - path7 = `/${path7}`; + if (path6 && !path6.startsWith("/")) { + path6 = `/${path6}`; } - url = new URL(origin + path7); + url = new URL(origin + path6); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path7) { - if (typeof path7 !== "string") { + module2.exports = function basename(path6) { + if (typeof path6 !== "string") { return ""; } - for (var i = path7.length - 1; i >= 0; --i) { - switch (path7.charCodeAt(i)) { + for (var i = path6.length - 1; i >= 0; --i) { + switch (path6.charCodeAt(i)) { case 47: // '/' case 92: - path7 = path7.slice(i + 1); - return path7 === ".." || path7 === "." ? "" : path7; + path6 = path6.slice(i + 1); + return path6 === ".." || path6 === "." ? "" : path6; } } - return path7 === ".." || path7 === "." ? "" : path7; + return path6 === ".." || path6 === "." ? "" : path6; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path7, + path: path6, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path7 !== "string") { + if (typeof path6 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path7[0] !== "/" && !(path7.startsWith("http://") || path7.startsWith("https://")) && method !== "CONNECT") { + } else if (path6[0] !== "/" && !(path6.startsWith("http://") || path6.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path7) !== null) { + } else if (invalidPathRegex.exec(path6) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path7, query) : path7; + this.path = query ? util.buildURL(path6, query) : path6; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path7 = search ? `${pathname}${search}` : pathname; + const path6 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path7; + this.opts.path = path6; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path7, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path6, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path7} HTTP/1.1\r + let header = `${method} ${path6} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path7, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path6, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path7; + headers[HTTP2_HEADER_PATH] = path6; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path7) { - if (typeof path7 !== "string") { - return path7; + function safeUrl(path6) { + if (typeof path6 !== "string") { + return path6; } - const pathSegments = path7.split("?"); + const pathSegments = path6.split("?"); if (pathSegments.length !== 2) { - return path7; + return path6; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path7, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path7); + function matchKey(mockDispatch2, { path: path6, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path6); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path7 }) => matchValue(safeUrl(path7), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path6 }) => matchValue(safeUrl(path6), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path7, method, body, headers, query } = opts; + const { path: path6, method, body, headers, query } = opts; return { - path: path7, + path: path6, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path7, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path6, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path7, + Path: path6, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path7) { - for (const char of path7) { + function validateCookiePath(path6) { + for (const char of path6) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path7 = opts.path; + let path6 = opts.path; if (!opts.path.startsWith("/")) { - path7 = `/${path7}`; + path6 = `/${path6}`; } - url = new URL(util.parseOrigin(url).origin + path7); + url = new URL(util.parseOrigin(url).origin + path6); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path7.sep); + return pth.replace(/[/\\]/g, path6.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs8 = __importStar4(require("fs")); - var path7 = __importStar4(require("path")); - _a = fs8.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs7 = __importStar4(require("fs")); + var path6 = __importStar4(require("path")); + _a = fs7.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs8.constants.O_RDONLY; + exports2.READONLY = fs7.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path7.extname(filePath).toUpperCase(); + const upperExt = path6.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path7.dirname(filePath); - const upperName = path7.basename(filePath).toUpperCase(); + const directory = path6.dirname(filePath); + const upperName = path6.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path7.join(directory, actualName); + filePath = path6.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path7.join(dest, path7.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path6.join(dest, path6.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path7.relative(source, newDest) === "") { + if (path6.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path7.join(dest, path7.basename(source)); + dest = path6.join(dest, path6.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path7.dirname(dest)); + yield mkdirP(path6.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path7.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path6.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path7.sep)) { + if (tool.includes(path6.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path7.delimiter)) { + for (const p of process.env.PATH.split(path6.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path7.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path6.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var io6 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path7.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path6.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io6.which(this.toolPath, true); return new Promise((resolve5, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path7.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path6.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path7 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); + const path6 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path7.dirname(p); + let result = path6.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path7.sep; + root += path6.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path7.sep)) { + if (!p.endsWith(path6.sep)) { return p; } - if (p === path7.sep) { + if (p === path6.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path7 = (function() { + var path6 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path7.sep; + minimatch.sep = path6.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path7.sep !== "/") { - pattern = pattern.split(path7.sep).join("/"); + if (!options.allowWindowsEscape && path6.sep !== "/") { + pattern = pattern.split(path6.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path7.sep !== "/") { - f = f.split(path7.sep).join("/"); + if (path6.sep !== "/") { + f = f.split(path6.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path7.sep); + this.segments = itemPath.split(path6.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path7.basename(remaining); + const basename = path6.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path7.sep; + result += path6.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path7.sep}`; + if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path6.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { homedir = homedir || os.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path7, level) { - this.path = path7; + constructor(path6, level) { + this.path = path6; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs8.promises.lstat(searchPath)); + yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs8.promises.stat(item.path); + stats = yield fs7.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs8.promises.lstat(item.path); + stats = yield fs7.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs8.promises.realpath(item.path); + const realPath = yield fs7.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar4(require_glob()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); - var path7 = __importStar4(require("path")); + var fs7 = __importStar4(require("fs")); + var path6 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path7.join(baseLocation, "actions", "temp"); + tempDirectory = path6.join(baseLocation, "actions", "temp"); } - const dest = path7.join(tempDirectory, crypto.randomUUID()); + const dest = path6.join(tempDirectory, crypto.randomUUID()); yield io6.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs8.statSync(filePath).size; + return fs7.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path7.relative(workspace, file).replace(new RegExp(`\\${path7.sep}`, "g"), "/"); + const relativeFile = path6.relative(workspace, file).replace(new RegExp(`\\${path6.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs8.unlink)(filePath); + return util.promisify(fs7.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs8.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs7.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path7 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path7.startsWith("/")) { - path7 = path7.substring(1); + let path6 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path6.startsWith("/")) { + path6 = path6.substring(1); } - if (isAbsoluteUrl(path7)) { - requestUrl = path7; + if (isAbsoluteUrl(path6)) { + requestUrl = path6; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path7); + requestUrl = appendPath(requestUrl, path6); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path7 = pathToAppend.substring(0, searchStart); + const path6 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path7; + newPath = newPath + path6; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs8 = require("fs"); + var fs7 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs8); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs7); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path7 = urlParsed.pathname; - path7 = path7 || "/"; - path7 = escape(path7); - urlParsed.pathname = path7; + let path6 = urlParsed.pathname; + path6 = path6 || "/"; + path6 = escape(path6); + urlParsed.pathname = path6; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path7 = urlParsed.pathname; - path7 = path7 ? path7.endsWith("/") ? `${path7}${name}` : `${path7}/${name}` : name; - urlParsed.pathname = path7; + let path6 = urlParsed.pathname; + path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; + urlParsed.pathname = path6; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path7 = getURLPath(request.url) || "/"; + const path6 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path7}`; + canonicalizedResourceString += `/${this.factory.accountName}${path6}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path7 = getURLPath(request.url) || "/"; + const path6 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path7}`; + canonicalizedResourceString += `/${options.accountName}${path6}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path7 = getURLPath(subRequest.url); - if (!path7 || path7 === "") { + const path6 = getURLPath(subRequest.url); + if (!path6 || path6 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path7 = getURLPath(url2); - if (path7 && path7 !== "/") { + const path6 = getURLPath(url2); + if (path6 && path6 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs8.createWriteStream(archivePath); + const writeStream = fs7.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs8.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs7.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs8.openSync(archivePath, "w"); + const fd = fs7.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs8.writeFileSync(fd, result); + fs7.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs8.closeSync(fd); + fs7.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs8.openSync(archivePath, "r"); + const fd = fs7.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs8.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs7.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs8.closeSync(fd); + fs7.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io6 = __importStar4(require_io()); var fs_1 = require("fs"); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path7.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path6.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os = require("os"); var cp = require("child_process"); - var fs8 = require("fs"); + var fs7 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs8.existsSync(lsbReleaseFile)) { - contents = fs8.readFileSync(lsbReleaseFile).toString(); - } else if (fs8.existsSync(osReleaseFile)) { - contents = fs8.readFileSync(osReleaseFile).toString(); + if (fs7.existsSync(lsbReleaseFile)) { + contents = fs7.readFileSync(lsbReleaseFile).toString(); + } else if (fs7.existsSync(osReleaseFile)) { + contents = fs7.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io6 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path7.join(_getTempDirectory(), crypto.randomUUID()); - yield io6.mkdirP(path7.dirname(dest)); + dest = dest || path6.join(_getTempDirectory(), crypto.randomUUID()); + yield io6.mkdirP(path6.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs8.existsSync(dest)) { + if (fs7.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs8.createWriteStream(dest)); + yield pipeline(readStream, fs7.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path7.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path6.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs8.statSync(sourceDir).isDirectory()) { + if (!fs7.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs8.readdirSync(sourceDir)) { - const s = path7.join(sourceDir, itemName); + for (const itemName of fs7.readdirSync(sourceDir)) { + const s = path6.join(sourceDir, itemName); yield io6.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source file: ${sourceFile}`); - if (!fs8.statSync(sourceFile).isFile()) { + if (!fs7.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path7.join(destFolder, targetFile); + const destPath = path6.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path6.join(_getCacheDirectory(), toolName, versionSpec, arch); core14.debug(`checking cache: ${cachePath}`); - if (fs8.existsSync(cachePath) && fs8.existsSync(`${cachePath}.complete`)) { + if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os.arch(); - const toolPath = path7.join(_getCacheDirectory(), toolName); - if (fs8.existsSync(toolPath)) { - const children = fs8.readdirSync(toolPath); + const toolPath = path6.join(_getCacheDirectory(), toolName); + if (fs7.existsSync(toolPath)) { + const children = fs7.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path7.join(toolPath, child, arch || ""); - if (fs8.existsSync(fullPath) && fs8.existsSync(`${fullPath}.complete`)) { + const fullPath = path6.join(toolPath, child, arch || ""); + if (fs7.existsSync(fullPath) && fs7.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path7.join(_getTempDirectory(), crypto.randomUUID()); + dest = path6.join(_getTempDirectory(), crypto.randomUUID()); } yield io6.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs8.writeFileSync(markerPath, ""); + fs7.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); + function resolveUrl(relative, base) { + return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -76940,13 +76940,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.validateArtifactName = validateArtifactName; - function validateFilePath(path7) { - if (!path7) { + function validateFilePath(path6) { + if (!path6) { throw new Error(`Provided file path input during validation is empty`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path7.includes(invalidCharacterKey)) { - throw new Error(`The path for one of the files in artifact is not valid: ${path7}. Contains the following character: ${errorMessageForCharacter} + if (path6.includes(invalidCharacterKey)) { + throw new Error(`The path for one of the files in artifact is not valid: ${path6}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -77326,15 +77326,15 @@ var require_upload_zip_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadZipSpecification = exports2.validateRootDirectory = void 0; - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); function validateRootDirectory(rootDirectory) { - if (!fs8.existsSync(rootDirectory)) { + if (!fs7.existsSync(rootDirectory)) { throw new Error(`The provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs8.statSync(rootDirectory).isDirectory()) { + if (!fs7.statSync(rootDirectory).isDirectory()) { throw new Error(`The provided rootDirectory ${rootDirectory} is not a valid directory`); } (0, core_1.info)(`Root directory input is valid!`); @@ -77345,7 +77345,7 @@ var require_upload_zip_specification = __commonJS({ rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of filesToZip) { - const stats = fs8.lstatSync(file, { throwIfNoEntry: false }); + const stats = fs7.lstatSync(file, { throwIfNoEntry: false }); if (!stats) { throw new Error(`File ${file} does not exist`); } @@ -77811,8 +77811,8 @@ var require_minimatch2 = __commonJS({ return new Minimatch(pattern, options).match(p); }; module2.exports = minimatch; - var path7 = require_path(); - minimatch.sep = path7.sep; + var path6 = require_path(); + minimatch.sep = path6.sep; var GLOBSTAR = Symbol("globstar **"); minimatch.GLOBSTAR = GLOBSTAR; var expand = require_brace_expansion2(); @@ -78321,8 +78321,8 @@ var require_minimatch2 = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; const options = this.options; - if (path7.sep !== "/") { - f = f.split(path7.sep).join("/"); + if (path6.sep !== "/") { + f = f.split(path6.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -78360,13 +78360,13 @@ var require_minimatch2 = __commonJS({ var require_readdir_glob = __commonJS({ "node_modules/readdir-glob/index.js"(exports2, module2) { module2.exports = readdirGlob; - var fs8 = require("fs"); + var fs7 = require("fs"); var { EventEmitter } = require("events"); var { Minimatch } = require_minimatch2(); var { resolve: resolve5 } = require("path"); function readdir(dir, strict) { return new Promise((resolve6, reject) => { - fs8.readdir(dir, { withFileTypes: true }, (err, files) => { + fs7.readdir(dir, { withFileTypes: true }, (err, files) => { if (err) { switch (err.code) { case "ENOTDIR": @@ -78399,7 +78399,7 @@ var require_readdir_glob = __commonJS({ } function stat(file, followSymlinks) { return new Promise((resolve6, reject) => { - const statFunc = followSymlinks ? fs8.stat : fs8.lstat; + const statFunc = followSymlinks ? fs7.stat : fs7.lstat; statFunc(file, (err, stats) => { if (err) { switch (err.code) { @@ -78420,8 +78420,8 @@ var require_readdir_glob = __commonJS({ }); }); } - async function* exploreWalkAsync(dir, path7, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path7 + dir, strict); + async function* exploreWalkAsync(dir, path6, followSymlinks, useStat, shouldSkip, strict) { + let files = await readdir(path6 + dir, strict); for (const file of files) { let name = file.name; if (name === void 0) { @@ -78429,8 +78429,8 @@ var require_readdir_glob = __commonJS({ useStat = true; } const filename = dir + "/" + name; - const relative2 = filename.slice(1); - const absolute = path7 + "/" + relative2; + const relative = filename.slice(1); + const absolute = path6 + "/" + relative; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -78442,17 +78442,17 @@ var require_readdir_glob = __commonJS({ stats = { isDirectory: () => false }; } if (stats.isDirectory()) { - if (!shouldSkip(relative2)) { - yield { relative: relative2, absolute, stats }; - yield* exploreWalkAsync(filename, path7, followSymlinks, useStat, shouldSkip, false); + if (!shouldSkip(relative)) { + yield { relative, absolute, stats }; + yield* exploreWalkAsync(filename, path6, followSymlinks, useStat, shouldSkip, false); } } else { - yield { relative: relative2, absolute, stats }; + yield { relative, absolute, stats }; } } } - async function* explore(path7, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync("", path7, followSymlinks, useStat, shouldSkip, true); + async function* explore(path6, followSymlinks, useStat, shouldSkip) { + yield* exploreWalkAsync("", path6, followSymlinks, useStat, shouldSkip, true); } function readOptions(options) { return { @@ -78517,11 +78517,11 @@ var require_readdir_glob = __commonJS({ } setTimeout(() => this._next(), 0); } - _shouldSkipDirectory(relative2) { - return this.skipMatchers.some((m) => m.match(relative2)); + _shouldSkipDirectory(relative) { + return this.skipMatchers.some((m) => m.match(relative)); } - _fileMatches(relative2, isDirectory) { - const file = relative2 + (isDirectory ? "/" : ""); + _fileMatches(relative, isDirectory) { + const file = relative + (isDirectory ? "/" : ""); return (this.matchers.length === 0 || this.matchers.some((m) => m.match(file))) && !this.ignoreMatchers.some((m) => m.match(file)) && (!this.options.nodir || !isDirectory); } _next() { @@ -78530,16 +78530,16 @@ var require_readdir_glob = __commonJS({ if (!obj.done) { const isDirectory = obj.value.stats.isDirectory(); if (this._fileMatches(obj.value.relative, isDirectory)) { - let relative2 = obj.value.relative; + let relative = obj.value.relative; let absolute = obj.value.absolute; if (this.options.mark && isDirectory) { - relative2 += "/"; + relative += "/"; absolute += "/"; } if (this.options.stat) { - this.emit("match", { relative: relative2, absolute, stat: obj.value.stats }); + this.emit("match", { relative, absolute, stat: obj.value.stats }); } else { - this.emit("match", { relative: relative2, absolute }); + this.emit("match", { relative, absolute }); } } this._next(this.iterator); @@ -80464,54 +80464,54 @@ var require_polyfills = __commonJS({ } var chdir; module2.exports = patch; - function patch(fs8) { + function patch(fs7) { if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs8); - } - if (!fs8.lutimes) { - patchLutimes(fs8); - } - fs8.chown = chownFix(fs8.chown); - fs8.fchown = chownFix(fs8.fchown); - fs8.lchown = chownFix(fs8.lchown); - fs8.chmod = chmodFix(fs8.chmod); - fs8.fchmod = chmodFix(fs8.fchmod); - fs8.lchmod = chmodFix(fs8.lchmod); - fs8.chownSync = chownFixSync(fs8.chownSync); - fs8.fchownSync = chownFixSync(fs8.fchownSync); - fs8.lchownSync = chownFixSync(fs8.lchownSync); - fs8.chmodSync = chmodFixSync(fs8.chmodSync); - fs8.fchmodSync = chmodFixSync(fs8.fchmodSync); - fs8.lchmodSync = chmodFixSync(fs8.lchmodSync); - fs8.stat = statFix(fs8.stat); - fs8.fstat = statFix(fs8.fstat); - fs8.lstat = statFix(fs8.lstat); - fs8.statSync = statFixSync(fs8.statSync); - fs8.fstatSync = statFixSync(fs8.fstatSync); - fs8.lstatSync = statFixSync(fs8.lstatSync); - if (fs8.chmod && !fs8.lchmod) { - fs8.lchmod = function(path7, mode, cb) { + patchLchmod(fs7); + } + if (!fs7.lutimes) { + patchLutimes(fs7); + } + fs7.chown = chownFix(fs7.chown); + fs7.fchown = chownFix(fs7.fchown); + fs7.lchown = chownFix(fs7.lchown); + fs7.chmod = chmodFix(fs7.chmod); + fs7.fchmod = chmodFix(fs7.fchmod); + fs7.lchmod = chmodFix(fs7.lchmod); + fs7.chownSync = chownFixSync(fs7.chownSync); + fs7.fchownSync = chownFixSync(fs7.fchownSync); + fs7.lchownSync = chownFixSync(fs7.lchownSync); + fs7.chmodSync = chmodFixSync(fs7.chmodSync); + fs7.fchmodSync = chmodFixSync(fs7.fchmodSync); + fs7.lchmodSync = chmodFixSync(fs7.lchmodSync); + fs7.stat = statFix(fs7.stat); + fs7.fstat = statFix(fs7.fstat); + fs7.lstat = statFix(fs7.lstat); + fs7.statSync = statFixSync(fs7.statSync); + fs7.fstatSync = statFixSync(fs7.fstatSync); + fs7.lstatSync = statFixSync(fs7.lstatSync); + if (fs7.chmod && !fs7.lchmod) { + fs7.lchmod = function(path6, mode, cb) { if (cb) process.nextTick(cb); }; - fs8.lchmodSync = function() { + fs7.lchmodSync = function() { }; } - if (fs8.chown && !fs8.lchown) { - fs8.lchown = function(path7, uid, gid, cb) { + if (fs7.chown && !fs7.lchown) { + fs7.lchown = function(path6, uid, gid, cb) { if (cb) process.nextTick(cb); }; - fs8.lchownSync = function() { + fs7.lchownSync = function() { }; } if (platform === "win32") { - fs8.rename = typeof fs8.rename !== "function" ? fs8.rename : (function(fs$rename) { + fs7.rename = typeof fs7.rename !== "function" ? fs7.rename : (function(fs$rename) { function rename(from, to, cb) { var start = Date.now(); var backoff = 0; fs$rename(from, to, function CB(er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 6e4) { setTimeout(function() { - fs8.stat(to, function(stater, st) { + fs7.stat(to, function(stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else @@ -80527,9 +80527,9 @@ var require_polyfills = __commonJS({ } if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename); return rename; - })(fs8.rename); + })(fs7.rename); } - fs8.read = typeof fs8.read !== "function" ? fs8.read : (function(fs$read) { + fs7.read = typeof fs7.read !== "function" ? fs7.read : (function(fs$read) { function read(fd, buffer, offset, length, position, callback_) { var callback; if (callback_ && typeof callback_ === "function") { @@ -80537,22 +80537,22 @@ var require_polyfills = __commonJS({ callback = function(er, _2, __) { if (er && er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; - return fs$read.call(fs8, fd, buffer, offset, length, position, callback); + return fs$read.call(fs7, fd, buffer, offset, length, position, callback); } callback_.apply(this, arguments); }; } - return fs$read.call(fs8, fd, buffer, offset, length, position, callback); + return fs$read.call(fs7, fd, buffer, offset, length, position, callback); } if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read); return read; - })(fs8.read); - fs8.readSync = typeof fs8.readSync !== "function" ? fs8.readSync : /* @__PURE__ */ (function(fs$readSync) { + })(fs7.read); + fs7.readSync = typeof fs7.readSync !== "function" ? fs7.readSync : /* @__PURE__ */ (function(fs$readSync) { return function(fd, buffer, offset, length, position) { var eagCounter = 0; while (true) { try { - return fs$readSync.call(fs8, fd, buffer, offset, length, position); + return fs$readSync.call(fs7, fd, buffer, offset, length, position); } catch (er) { if (er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; @@ -80562,11 +80562,11 @@ var require_polyfills = __commonJS({ } } }; - })(fs8.readSync); - function patchLchmod(fs9) { - fs9.lchmod = function(path7, mode, callback) { - fs9.open( - path7, + })(fs7.readSync); + function patchLchmod(fs8) { + fs8.lchmod = function(path6, mode, callback) { + fs8.open( + path6, constants.O_WRONLY | constants.O_SYMLINK, mode, function(err, fd) { @@ -80574,80 +80574,80 @@ var require_polyfills = __commonJS({ if (callback) callback(err); return; } - fs9.fchmod(fd, mode, function(err2) { - fs9.close(fd, function(err22) { + fs8.fchmod(fd, mode, function(err2) { + fs8.close(fd, function(err22) { if (callback) callback(err2 || err22); }); }); } ); }; - fs9.lchmodSync = function(path7, mode) { - var fd = fs9.openSync(path7, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs8.lchmodSync = function(path6, mode) { + var fd = fs8.openSync(path6, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { - ret = fs9.fchmodSync(fd, mode); + ret = fs8.fchmodSync(fd, mode); threw = false; } finally { if (threw) { try { - fs9.closeSync(fd); + fs8.closeSync(fd); } catch (er) { } } else { - fs9.closeSync(fd); + fs8.closeSync(fd); } } return ret; }; } - function patchLutimes(fs9) { - if (constants.hasOwnProperty("O_SYMLINK") && fs9.futimes) { - fs9.lutimes = function(path7, at, mt, cb) { - fs9.open(path7, constants.O_SYMLINK, function(er, fd) { + function patchLutimes(fs8) { + if (constants.hasOwnProperty("O_SYMLINK") && fs8.futimes) { + fs8.lutimes = function(path6, at, mt, cb) { + fs8.open(path6, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; } - fs9.futimes(fd, at, mt, function(er2) { - fs9.close(fd, function(er22) { + fs8.futimes(fd, at, mt, function(er2) { + fs8.close(fd, function(er22) { if (cb) cb(er2 || er22); }); }); }); }; - fs9.lutimesSync = function(path7, at, mt) { - var fd = fs9.openSync(path7, constants.O_SYMLINK); + fs8.lutimesSync = function(path6, at, mt) { + var fd = fs8.openSync(path6, constants.O_SYMLINK); var ret; var threw = true; try { - ret = fs9.futimesSync(fd, at, mt); + ret = fs8.futimesSync(fd, at, mt); threw = false; } finally { if (threw) { try { - fs9.closeSync(fd); + fs8.closeSync(fd); } catch (er) { } } else { - fs9.closeSync(fd); + fs8.closeSync(fd); } } return ret; }; - } else if (fs9.futimes) { - fs9.lutimes = function(_a, _b, _c, cb) { + } else if (fs8.futimes) { + fs8.lutimes = function(_a, _b, _c, cb) { if (cb) process.nextTick(cb); }; - fs9.lutimesSync = function() { + fs8.lutimesSync = function() { }; } } function chmodFix(orig) { if (!orig) return orig; return function(target, mode, cb) { - return orig.call(fs8, target, mode, function(er) { + return orig.call(fs7, target, mode, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -80657,7 +80657,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, mode) { try { - return orig.call(fs8, target, mode); + return orig.call(fs7, target, mode); } catch (er) { if (!chownErOk(er)) throw er; } @@ -80666,7 +80666,7 @@ var require_polyfills = __commonJS({ function chownFix(orig) { if (!orig) return orig; return function(target, uid, gid, cb) { - return orig.call(fs8, target, uid, gid, function(er) { + return orig.call(fs7, target, uid, gid, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -80676,7 +80676,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, uid, gid) { try { - return orig.call(fs8, target, uid, gid); + return orig.call(fs7, target, uid, gid); } catch (er) { if (!chownErOk(er)) throw er; } @@ -80696,13 +80696,13 @@ var require_polyfills = __commonJS({ } if (cb) cb.apply(this, arguments); } - return options ? orig.call(fs8, target, options, callback) : orig.call(fs8, target, callback); + return options ? orig.call(fs7, target, options, callback) : orig.call(fs7, target, callback); }; } function statFixSync(orig) { if (!orig) return orig; return function(target, options) { - var stats = options ? orig.call(fs8, target, options) : orig.call(fs8, target); + var stats = options ? orig.call(fs7, target, options) : orig.call(fs7, target); if (stats) { if (stats.uid < 0) stats.uid += 4294967296; if (stats.gid < 0) stats.gid += 4294967296; @@ -80731,16 +80731,16 @@ var require_legacy_streams = __commonJS({ "node_modules/graceful-fs/legacy-streams.js"(exports2, module2) { var Stream = require("stream").Stream; module2.exports = legacy; - function legacy(fs8) { + function legacy(fs7) { return { ReadStream, WriteStream }; - function ReadStream(path7, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path7, options); + function ReadStream(path6, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path6, options); Stream.call(this); var self2 = this; - this.path = path7; + this.path = path6; this.fd = null; this.readable = true; this.paused = false; @@ -80774,7 +80774,7 @@ var require_legacy_streams = __commonJS({ }); return; } - fs8.open(this.path, this.flags, this.mode, function(err, fd) { + fs7.open(this.path, this.flags, this.mode, function(err, fd) { if (err) { self2.emit("error", err); self2.readable = false; @@ -80785,10 +80785,10 @@ var require_legacy_streams = __commonJS({ self2._read(); }); } - function WriteStream(path7, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path7, options); + function WriteStream(path6, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path6, options); Stream.call(this); - this.path = path7; + this.path = path6; this.fd = null; this.writable = true; this.flags = "w"; @@ -80813,7 +80813,7 @@ var require_legacy_streams = __commonJS({ this.busy = false; this._queue = []; if (this.fd === null) { - this._open = fs8.open; + this._open = fs7.open; this._queue.push([this._open, this.path, this.flags, this.mode, void 0]); this.flush(); } @@ -80848,7 +80848,7 @@ var require_clone = __commonJS({ // node_modules/graceful-fs/graceful-fs.js var require_graceful_fs = __commonJS({ "node_modules/graceful-fs/graceful-fs.js"(exports2, module2) { - var fs8 = require("fs"); + var fs7 = require("fs"); var polyfills = require_polyfills(); var legacy = require_legacy_streams(); var clone = require_clone(); @@ -80880,12 +80880,12 @@ var require_graceful_fs = __commonJS({ m = "GFS4: " + m.split(/\n/).join("\nGFS4: "); console.error(m); }; - if (!fs8[gracefulQueue]) { + if (!fs7[gracefulQueue]) { queue = global[gracefulQueue] || []; - publishQueue(fs8, queue); - fs8.close = (function(fs$close) { + publishQueue(fs7, queue); + fs7.close = (function(fs$close) { function close(fd, cb) { - return fs$close.call(fs8, fd, function(err) { + return fs$close.call(fs7, fd, function(err) { if (!err) { resetQueue(); } @@ -80897,48 +80897,48 @@ var require_graceful_fs = __commonJS({ value: fs$close }); return close; - })(fs8.close); - fs8.closeSync = (function(fs$closeSync) { + })(fs7.close); + fs7.closeSync = (function(fs$closeSync) { function closeSync(fd) { - fs$closeSync.apply(fs8, arguments); + fs$closeSync.apply(fs7, arguments); resetQueue(); } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }); return closeSync; - })(fs8.closeSync); + })(fs7.closeSync); if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) { process.on("exit", function() { - debug2(fs8[gracefulQueue]); - require("assert").equal(fs8[gracefulQueue].length, 0); + debug2(fs7[gracefulQueue]); + require("assert").equal(fs7[gracefulQueue].length, 0); }); } } var queue; if (!global[gracefulQueue]) { - publishQueue(global, fs8[gracefulQueue]); - } - module2.exports = patch(clone(fs8)); - if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs8.__patched) { - module2.exports = patch(fs8); - fs8.__patched = true; - } - function patch(fs9) { - polyfills(fs9); - fs9.gracefulify = patch; - fs9.createReadStream = createReadStream; - fs9.createWriteStream = createWriteStream2; - var fs$readFile = fs9.readFile; - fs9.readFile = readFile; - function readFile(path7, options, cb) { + publishQueue(global, fs7[gracefulQueue]); + } + module2.exports = patch(clone(fs7)); + if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs7.__patched) { + module2.exports = patch(fs7); + fs7.__patched = true; + } + function patch(fs8) { + polyfills(fs8); + fs8.gracefulify = patch; + fs8.createReadStream = createReadStream; + fs8.createWriteStream = createWriteStream2; + var fs$readFile = fs8.readFile; + fs8.readFile = readFile; + function readFile(path6, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$readFile(path7, options, cb); - function go$readFile(path8, options2, cb2, startTime) { - return fs$readFile(path8, options2, function(err) { + return go$readFile(path6, options, cb); + function go$readFile(path7, options2, cb2, startTime) { + return fs$readFile(path7, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$readFile, [path8, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$readFile, [path7, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80946,16 +80946,16 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$writeFile = fs9.writeFile; - fs9.writeFile = writeFile; - function writeFile(path7, data, options, cb) { + var fs$writeFile = fs8.writeFile; + fs8.writeFile = writeFile; + function writeFile(path6, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$writeFile(path7, data, options, cb); - function go$writeFile(path8, data2, options2, cb2, startTime) { - return fs$writeFile(path8, data2, options2, function(err) { + return go$writeFile(path6, data, options, cb); + function go$writeFile(path7, data2, options2, cb2, startTime) { + return fs$writeFile(path7, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$writeFile, [path8, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$writeFile, [path7, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80963,17 +80963,17 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$appendFile = fs9.appendFile; + var fs$appendFile = fs8.appendFile; if (fs$appendFile) - fs9.appendFile = appendFile; - function appendFile(path7, data, options, cb) { + fs8.appendFile = appendFile; + function appendFile(path6, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$appendFile(path7, data, options, cb); - function go$appendFile(path8, data2, options2, cb2, startTime) { - return fs$appendFile(path8, data2, options2, function(err) { + return go$appendFile(path6, data, options, cb); + function go$appendFile(path7, data2, options2, cb2, startTime) { + return fs$appendFile(path7, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$appendFile, [path8, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$appendFile, [path7, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -80981,9 +80981,9 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$copyFile = fs9.copyFile; + var fs$copyFile = fs8.copyFile; if (fs$copyFile) - fs9.copyFile = copyFile; + fs8.copyFile = copyFile; function copyFile(src, dest, flags, cb) { if (typeof flags === "function") { cb = flags; @@ -81001,34 +81001,34 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$readdir = fs9.readdir; - fs9.readdir = readdir; + var fs$readdir = fs8.readdir; + fs8.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; - function readdir(path7, options, cb) { + function readdir(path6, options, cb) { if (typeof options === "function") cb = options, options = null; - var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path8, options2, cb2, startTime) { - return fs$readdir(path8, fs$readdirCallback( - path8, + var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path7, options2, cb2, startTime) { + return fs$readdir(path7, fs$readdirCallback( + path7, options2, cb2, startTime )); - } : function go$readdir2(path8, options2, cb2, startTime) { - return fs$readdir(path8, options2, fs$readdirCallback( - path8, + } : function go$readdir2(path7, options2, cb2, startTime) { + return fs$readdir(path7, options2, fs$readdirCallback( + path7, options2, cb2, startTime )); }; - return go$readdir(path7, options, cb); - function fs$readdirCallback(path8, options2, cb2, startTime) { + return go$readdir(path6, options, cb); + function fs$readdirCallback(path7, options2, cb2, startTime) { return function(err, files) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) enqueue([ go$readdir, - [path8, options2, cb2], + [path7, options2, cb2], err, startTime || Date.now(), Date.now() @@ -81043,21 +81043,21 @@ var require_graceful_fs = __commonJS({ } } if (process.version.substr(0, 4) === "v0.8") { - var legStreams = legacy(fs9); + var legStreams = legacy(fs8); ReadStream = legStreams.ReadStream; WriteStream = legStreams.WriteStream; } - var fs$ReadStream = fs9.ReadStream; + var fs$ReadStream = fs8.ReadStream; if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype); ReadStream.prototype.open = ReadStream$open; } - var fs$WriteStream = fs9.WriteStream; + var fs$WriteStream = fs8.WriteStream; if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype); WriteStream.prototype.open = WriteStream$open; } - Object.defineProperty(fs9, "ReadStream", { + Object.defineProperty(fs8, "ReadStream", { get: function() { return ReadStream; }, @@ -81067,7 +81067,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(fs9, "WriteStream", { + Object.defineProperty(fs8, "WriteStream", { get: function() { return WriteStream; }, @@ -81078,7 +81078,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileReadStream = ReadStream; - Object.defineProperty(fs9, "FileReadStream", { + Object.defineProperty(fs8, "FileReadStream", { get: function() { return FileReadStream; }, @@ -81089,7 +81089,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileWriteStream = WriteStream; - Object.defineProperty(fs9, "FileWriteStream", { + Object.defineProperty(fs8, "FileWriteStream", { get: function() { return FileWriteStream; }, @@ -81099,7 +81099,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - function ReadStream(path7, options) { + function ReadStream(path6, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this; else @@ -81119,7 +81119,7 @@ var require_graceful_fs = __commonJS({ } }); } - function WriteStream(path7, options) { + function WriteStream(path6, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this; else @@ -81137,22 +81137,22 @@ var require_graceful_fs = __commonJS({ } }); } - function createReadStream(path7, options) { - return new fs9.ReadStream(path7, options); + function createReadStream(path6, options) { + return new fs8.ReadStream(path6, options); } - function createWriteStream2(path7, options) { - return new fs9.WriteStream(path7, options); + function createWriteStream2(path6, options) { + return new fs8.WriteStream(path6, options); } - var fs$open = fs9.open; - fs9.open = open; - function open(path7, flags, mode, cb) { + var fs$open = fs8.open; + fs8.open = open; + function open(path6, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; - return go$open(path7, flags, mode, cb); - function go$open(path8, flags2, mode2, cb2, startTime) { - return fs$open(path8, flags2, mode2, function(err, fd) { + return go$open(path6, flags, mode, cb); + function go$open(path7, flags2, mode2, cb2, startTime) { + return fs$open(path7, flags2, mode2, function(err, fd) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$open, [path8, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$open, [path7, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -81160,20 +81160,20 @@ var require_graceful_fs = __commonJS({ }); } } - return fs9; + return fs8; } function enqueue(elem) { debug2("ENQUEUE", elem[0].name, elem[1]); - fs8[gracefulQueue].push(elem); + fs7[gracefulQueue].push(elem); retry3(); } var retryTimer; function resetQueue() { var now = Date.now(); - for (var i = 0; i < fs8[gracefulQueue].length; ++i) { - if (fs8[gracefulQueue][i].length > 2) { - fs8[gracefulQueue][i][3] = now; - fs8[gracefulQueue][i][4] = now; + for (var i = 0; i < fs7[gracefulQueue].length; ++i) { + if (fs7[gracefulQueue][i].length > 2) { + fs7[gracefulQueue][i][3] = now; + fs7[gracefulQueue][i][4] = now; } } retry3(); @@ -81181,9 +81181,9 @@ var require_graceful_fs = __commonJS({ function retry3() { clearTimeout(retryTimer); retryTimer = void 0; - if (fs8[gracefulQueue].length === 0) + if (fs7[gracefulQueue].length === 0) return; - var elem = fs8[gracefulQueue].shift(); + var elem = fs7[gracefulQueue].shift(); var fn = elem[0]; var args = elem[1]; var err = elem[2]; @@ -81205,7 +81205,7 @@ var require_graceful_fs = __commonJS({ debug2("RETRY", fn.name, args); fn.apply(null, args.concat([startTime])); } else { - fs8[gracefulQueue].push(elem); + fs7[gracefulQueue].push(elem); } } if (retryTimer === void 0) { @@ -81505,7 +81505,7 @@ var require_BufferList = __commonJS({ this.head = this.tail = null; this.length = 0; }; - BufferList.prototype.join = function join8(s) { + BufferList.prototype.join = function join7(s) { if (this.length === 0) return ""; var p = this.head; var ret = "" + p.data; @@ -83253,22 +83253,22 @@ var require_lazystream = __commonJS({ // node_modules/normalize-path/index.js var require_normalize_path = __commonJS({ "node_modules/normalize-path/index.js"(exports2, module2) { - module2.exports = function(path7, stripTrailing) { - if (typeof path7 !== "string") { + module2.exports = function(path6, stripTrailing) { + if (typeof path6 !== "string") { throw new TypeError("expected path to be a string"); } - if (path7 === "\\" || path7 === "/") return "/"; - var len = path7.length; - if (len <= 1) return path7; + if (path6 === "\\" || path6 === "/") return "/"; + var len = path6.length; + if (len <= 1) return path6; var prefix = ""; - if (len > 4 && path7[3] === "\\") { - var ch = path7[2]; - if ((ch === "?" || ch === ".") && path7.slice(0, 2) === "\\\\") { - path7 = path7.slice(2); + if (len > 4 && path6[3] === "\\") { + var ch = path6[2]; + if ((ch === "?" || ch === ".") && path6.slice(0, 2) === "\\\\") { + path6 = path6.slice(2); prefix = "//"; } } - var segs = path7.split(/[/\\]+/); + var segs = path6.split(/[/\\]+/); if (stripTrailing !== false && segs[segs.length - 1] === "") { segs.pop(); } @@ -84024,8 +84024,8 @@ var require_primordials = __commonJS({ ArrayPrototypeIndexOf(self2, el) { return self2.indexOf(el); }, - ArrayPrototypeJoin(self2, sep4) { - return self2.join(sep4); + ArrayPrototypeJoin(self2, sep2) { + return self2.join(sep2); }, ArrayPrototypeMap(self2, fn) { return self2.map(fn); @@ -91803,11 +91803,11 @@ var require_commonjs13 = __commonJS({ return (f) => f.length === len && f !== "." && f !== ".."; }; var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; - var path7 = { + var path6 = { win32: { sep: "\\" }, posix: { sep: "/" } }; - exports2.sep = defaultPlatform === "win32" ? path7.win32.sep : path7.posix.sep; + exports2.sep = defaultPlatform === "win32" ? path6.win32.sep : path6.posix.sep; exports2.minimatch.sep = exports2.sep; exports2.GLOBSTAR = Symbol("globstar **"); exports2.minimatch.GLOBSTAR = exports2.GLOBSTAR; @@ -95055,12 +95055,12 @@ var require_commonjs16 = __commonJS({ /** * Get the Path object referenced by the string path, resolved from this Path */ - resolve(path7) { - if (!path7) { + resolve(path6) { + if (!path6) { return this; } - const rootPath = this.getRootString(path7); - const dir = path7.substring(rootPath.length); + const rootPath = this.getRootString(path6); + const dir = path6.substring(rootPath.length); const dirParts = dir.split(this.splitSep); const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts); return result; @@ -95813,8 +95813,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - getRootString(path7) { - return node_path_1.win32.parse(path7).root; + getRootString(path6) { + return node_path_1.win32.parse(path6).root; } /** * @internal @@ -95861,8 +95861,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - getRootString(path7) { - return path7.startsWith("/") ? "/" : ""; + getRootString(path6) { + return path6.startsWith("/") ? "/" : ""; } /** * @internal @@ -95912,8 +95912,8 @@ var require_commonjs16 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep4, { nocase, childrenCacheSize = 16 * 1024, fs: fs8 = defaultFS } = {}) { - this.#fs = fsFromOption(fs8); + constructor(cwd = process.cwd(), pathImpl, sep2, { nocase, childrenCacheSize = 16 * 1024, fs: fs7 = defaultFS } = {}) { + this.#fs = fsFromOption(fs7); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); } @@ -95923,7 +95923,7 @@ var require_commonjs16 = __commonJS({ this.#resolveCache = new ResolveCache(); this.#resolvePosixCache = new ResolveCache(); this.#children = new ChildrenCache(childrenCacheSize); - const split = cwdPath.substring(this.rootPath.length).split(sep4); + const split = cwdPath.substring(this.rootPath.length).split(sep2); if (split.length === 1 && !split[0]) { split.pop(); } @@ -95952,11 +95952,11 @@ var require_commonjs16 = __commonJS({ /** * Get the depth of a provided path, string, or the cwd */ - depth(path7 = this.cwd) { - if (typeof path7 === "string") { - path7 = this.cwd.resolve(path7); + depth(path6 = this.cwd) { + if (typeof path6 === "string") { + path6 = this.cwd.resolve(path6); } - return path7.depth(); + return path6.depth(); } /** * Return the cache of child entries. Exposed so subclasses can create @@ -96443,9 +96443,9 @@ var require_commonjs16 = __commonJS({ process2(); return results; } - chdir(path7 = this.cwd) { + chdir(path6 = this.cwd) { const oldCwd = this.cwd; - this.cwd = typeof path7 === "string" ? this.cwd.resolve(path7) : path7; + this.cwd = typeof path6 === "string" ? this.cwd.resolve(path6) : path6; this.cwd[setAsCwd](oldCwd); } }; @@ -96472,8 +96472,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - newRoot(fs8) { - return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs8 }); + newRoot(fs7) { + return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs7 }); } /** * Return true if the provided path string is an absolute path @@ -96502,8 +96502,8 @@ var require_commonjs16 = __commonJS({ /** * @internal */ - newRoot(fs8) { - return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs8 }); + newRoot(fs7) { + return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs7 }); } /** * Return true if the provided path string is an absolute path @@ -96766,10 +96766,10 @@ var require_ignore = __commonJS({ ignored(p) { const fullpath = p.fullpath(); const fullpaths = `${fullpath}/`; - const relative2 = p.relative() || "."; - const relatives = `${relative2}/`; + const relative = p.relative() || "."; + const relatives = `${relative}/`; for (const m of this.relative) { - if (m.match(relative2) || m.match(relatives)) + if (m.match(relative) || m.match(relatives)) return true; } for (const m of this.absolute) { @@ -96780,9 +96780,9 @@ var require_ignore = __commonJS({ } childrenIgnored(p) { const fullpath = p.fullpath() + "/"; - const relative2 = (p.relative() || ".") + "/"; + const relative = (p.relative() || ".") + "/"; for (const m of this.relativeChildren) { - if (m.match(relative2)) + if (m.match(relative)) return true; } for (const m of this.absoluteChildren) { @@ -96833,8 +96833,8 @@ var require_processor = __commonJS({ } // match, absolute, ifdir entries() { - return [...this.store.entries()].map(([path7, n]) => [ - path7, + return [...this.store.entries()].map(([path6, n]) => [ + path6, !!(n & 2), !!(n & 1) ]); @@ -97052,9 +97052,9 @@ var require_walker = __commonJS({ signal; maxDepth; includeChildMatches; - constructor(patterns, path7, opts) { + constructor(patterns, path6, opts) { this.patterns = patterns; - this.path = path7; + this.path = path6; this.opts = opts; this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/"; this.includeChildMatches = opts.includeChildMatches !== false; @@ -97073,11 +97073,11 @@ var require_walker = __commonJS({ }); } } - #ignored(path7) { - return this.seen.has(path7) || !!this.#ignore?.ignored?.(path7); + #ignored(path6) { + return this.seen.has(path6) || !!this.#ignore?.ignored?.(path6); } - #childrenIgnored(path7) { - return !!this.#ignore?.childrenIgnored?.(path7); + #childrenIgnored(path6) { + return !!this.#ignore?.childrenIgnored?.(path6); } // backpressure mechanism pause() { @@ -97293,8 +97293,8 @@ var require_walker = __commonJS({ exports2.GlobUtil = GlobUtil; var GlobWalker = class extends GlobUtil { matches = /* @__PURE__ */ new Set(); - constructor(patterns, path7, opts) { - super(patterns, path7, opts); + constructor(patterns, path6, opts) { + super(patterns, path6, opts); } matchEmit(e) { this.matches.add(e); @@ -97332,8 +97332,8 @@ var require_walker = __commonJS({ exports2.GlobWalker = GlobWalker; var GlobStream = class extends GlobUtil { results; - constructor(patterns, path7, opts) { - super(patterns, path7, opts); + constructor(patterns, path6, opts) { + super(patterns, path6, opts); this.results = new minipass_1.Minipass({ signal: this.signal, objectMode: true @@ -97688,8 +97688,8 @@ var require_commonjs17 = __commonJS({ // node_modules/archiver-utils/file.js var require_file3 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { - var fs8 = require_graceful_fs(); - var path7 = require("path"); + var fs7 = require_graceful_fs(); + var path6 = require("path"); var flatten = require_flatten(); var difference = require_difference(); var union = require_union(); @@ -97714,8 +97714,8 @@ var require_file3 = __commonJS({ return result; }; file.exists = function() { - var filepath = path7.join.apply(path7, arguments); - return fs8.existsSync(filepath); + var filepath = path6.join.apply(path6, arguments); + return fs7.existsSync(filepath); }; file.expand = function(...args) { var options = isPlainObject(args[0]) ? args.shift() : {}; @@ -97728,12 +97728,12 @@ var require_file3 = __commonJS({ }); if (options.filter) { matches = matches.filter(function(filepath) { - filepath = path7.join(options.cwd || "", filepath); + filepath = path6.join(options.cwd || "", filepath); try { if (typeof options.filter === "function") { return options.filter(filepath); } else { - return fs8.statSync(filepath)[options.filter](); + return fs7.statSync(filepath)[options.filter](); } } catch (e) { return false; @@ -97745,7 +97745,7 @@ var require_file3 = __commonJS({ file.expandMapping = function(patterns, destBase, options) { options = Object.assign({ rename: function(destBase2, destPath) { - return path7.join(destBase2 || "", destPath); + return path6.join(destBase2 || "", destPath); } }, options); var files = []; @@ -97753,14 +97753,14 @@ var require_file3 = __commonJS({ file.expand(options, patterns).forEach(function(src) { var destPath = src; if (options.flatten) { - destPath = path7.basename(destPath); + destPath = path6.basename(destPath); } if (options.ext) { destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } var dest = options.rename(destBase, destPath, options); if (options.cwd) { - src = path7.join(options.cwd, src); + src = path6.join(options.cwd, src); } dest = dest.replace(pathSeparatorRe, "/"); src = src.replace(pathSeparatorRe, "/"); @@ -97841,8 +97841,8 @@ var require_file3 = __commonJS({ // node_modules/archiver-utils/index.js var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { - var fs8 = require_graceful_fs(); - var path7 = require("path"); + var fs7 = require_graceful_fs(); + var path6 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); var normalizePath = require_normalize_path(); @@ -97890,7 +97890,7 @@ var require_archiver_utils = __commonJS({ }; utils.lazyReadStream = function(filepath) { return new lazystream.Readable(function() { - return fs8.createReadStream(filepath); + return fs7.createReadStream(filepath); }); }; utils.normalizeInputSource = function(source) { @@ -97918,7 +97918,7 @@ var require_archiver_utils = __commonJS({ callback = base; base = dirpath; } - fs8.readdir(dirpath, function(err, list) { + fs7.readdir(dirpath, function(err, list) { var i = 0; var file; var filepath; @@ -97930,11 +97930,11 @@ var require_archiver_utils = __commonJS({ if (!file) { return callback(null, results); } - filepath = path7.join(dirpath, file); - fs8.stat(filepath, function(err2, stats) { + filepath = path6.join(dirpath, file); + fs7.stat(filepath, function(err2, stats) { results.push({ path: filepath, - relative: path7.relative(base, filepath).replace(/\\/g, "/"), + relative: path6.relative(base, filepath).replace(/\\/g, "/"), stats }); if (stats && stats.isDirectory()) { @@ -97993,10 +97993,10 @@ var require_error2 = __commonJS({ // node_modules/archiver/lib/core.js var require_core2 = __commonJS({ "node_modules/archiver/lib/core.js"(exports2, module2) { - var fs8 = require("fs"); + var fs7 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); - var path7 = require("path"); + var path6 = require("path"); var util = require_archiver_utils(); var inherits = require("util").inherits; var ArchiverError = require_error2(); @@ -98057,7 +98057,7 @@ var require_core2 = __commonJS({ data.sourcePath = filepath; task.data = data; this._entriesCount++; - if (data.stats && data.stats instanceof fs8.Stats) { + if (data.stats && data.stats instanceof fs7.Stats) { task = this._updateQueueTaskWithStats(task, data.stats); if (task) { if (data.stats.size) { @@ -98228,7 +98228,7 @@ var require_core2 = __commonJS({ callback(); return; } - fs8.lstat(task.filepath, function(err, stats) { + fs7.lstat(task.filepath, function(err, stats) { if (this._state.aborted) { setImmediate(callback); return; @@ -98271,10 +98271,10 @@ var require_core2 = __commonJS({ task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { - var linkPath = fs8.readlinkSync(task.filepath); - var dirName = path7.dirname(task.filepath); + var linkPath = fs7.readlinkSync(task.filepath); + var dirName = path6.dirname(task.filepath); task.data.type = "symlink"; - task.data.linkname = path7.relative(dirName, path7.resolve(dirName, linkPath)); + task.data.linkname = path6.relative(dirName, path6.resolve(dirName, linkPath)); task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { @@ -102724,8 +102724,8 @@ var require_context2 = __commonJS({ if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path7 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); + const path6 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -103867,14 +103867,14 @@ var require_url_state_machine = __commonJS({ return url.replace(/\u0009|\u000A|\u000D/g, ""); } function shortenPath(url) { - const path7 = url.path; - if (path7.length === 0) { + const path6 = url.path; + if (path6.length === 0) { return; } - if (url.scheme === "file" && path7.length === 1 && isNormalizedWindowsDriveLetter(path7[0])) { + if (url.scheme === "file" && path6.length === 1 && isNormalizedWindowsDriveLetter(path6[0])) { return; } - path7.pop(); + path6.pop(); } function includesCredentials(url) { return url.username !== "" || url.password !== ""; @@ -108045,7 +108045,7 @@ var require_traverse = __commonJS({ })(this.value); }; function walk(root, cb, immutable) { - var path7 = []; + var path6 = []; var parents = []; var alive = true; return (function walker(node_) { @@ -108054,11 +108054,11 @@ var require_traverse = __commonJS({ var state = { node, node_, - path: [].concat(path7), + path: [].concat(path6), parent: parents.slice(-1)[0], - key: path7.slice(-1)[0], - isRoot: path7.length === 0, - level: path7.length, + key: path6.slice(-1)[0], + isRoot: path6.length === 0, + level: path6.length, circular: null, update: function(x) { if (!state.isRoot) { @@ -108113,7 +108113,7 @@ var require_traverse = __commonJS({ parents.push(state); var keys = Object.keys(state.node); keys.forEach(function(key, i2) { - path7.push(key); + path6.push(key); if (modifiers.pre) modifiers.pre.call(state, state.node[key], key); var child = walker(state.node[key]); if (immutable && Object.hasOwnProperty.call(state.node, key)) { @@ -108122,7 +108122,7 @@ var require_traverse = __commonJS({ child.isLast = i2 == keys.length - 1; child.isFirst = i2 == 0; if (modifiers.post) modifiers.post.call(state, child); - path7.pop(); + path6.pop(); }); parents.pop(); } @@ -109143,11 +109143,11 @@ var require_unzip_stream = __commonJS({ return requiredLength; case states.CENTRAL_DIRECTORY_FILE_HEADER_SUFFIX: var isUtf8 = (this.parsedEntity.flags & 2048) !== 0; - var path7 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); + var path6 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); var extraDataBuffer = chunk.slice(this.parsedEntity.fileNameLength, this.parsedEntity.fileNameLength + this.parsedEntity.extraFieldLength); var extra = this._readExtraFields(extraDataBuffer); if (extra && extra.parsed && extra.parsed.path && !isUtf8) { - path7 = extra.parsed.path; + path6 = extra.parsed.path; } this.parsedEntity.extra = extra.parsed; var isUnix = (this.parsedEntity.versionMadeBy & 65280) >> 8 === 3; @@ -109159,7 +109159,7 @@ var require_unzip_stream = __commonJS({ } if (this.options.debug) { const debugObj = Object.assign({}, this.parsedEntity, { - path: path7, + path: path6, flags: "0x" + this.parsedEntity.flags.toString(16), unixAttrs: unixAttrs && "0" + unixAttrs.toString(8), isSymlink, @@ -109596,8 +109596,8 @@ var require_parser_stream = __commonJS({ // node_modules/mkdirp/index.js var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { - var path7 = require("path"); - var fs8 = require("fs"); + var path6 = require("path"); + var fs7 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; function mkdirP(p, opts, f, made) { @@ -109608,7 +109608,7 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs8; + var xfs = opts.fs || fs7; if (mode === void 0) { mode = _0777; } @@ -109616,7 +109616,7 @@ var require_mkdirp = __commonJS({ var cb = f || /* istanbul ignore next */ function() { }; - p = path7.resolve(p); + p = path6.resolve(p); xfs.mkdir(p, mode, function(er) { if (!er) { made = made || p; @@ -109624,8 +109624,8 @@ var require_mkdirp = __commonJS({ } switch (er.code) { case "ENOENT": - if (path7.dirname(p) === p) return cb(er); - mkdirP(path7.dirname(p), opts, function(er2, made2) { + if (path6.dirname(p) === p) return cb(er); + mkdirP(path6.dirname(p), opts, function(er2, made2) { if (er2) cb(er2, made2); else mkdirP(p, opts, cb, made2); }); @@ -109647,19 +109647,19 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs8; + var xfs = opts.fs || fs7; if (mode === void 0) { mode = _0777; } if (!made) made = null; - p = path7.resolve(p); + p = path6.resolve(p); try { xfs.mkdirSync(p, mode); made = made || p; } catch (err0) { switch (err0.code) { case "ENOENT": - made = sync(path7.dirname(p), opts, made); + made = sync(path6.dirname(p), opts, made); sync(p, opts, made); break; // In the case of any other error, just see if there's a dir @@ -109684,8 +109684,8 @@ var require_mkdirp = __commonJS({ // node_modules/unzip-stream/lib/extract.js var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { - var fs8 = require("fs"); - var path7 = require("path"); + var fs7 = require("fs"); + var path6 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); var Transform = require("stream").Transform; @@ -109727,11 +109727,11 @@ var require_extract2 = __commonJS({ }; Extract.prototype._processEntry = function(entry) { var self2 = this; - var destPath = path7.join(this.opts.path, entry.path); - var directory = entry.isDirectory ? destPath : path7.dirname(destPath); + var destPath = path6.join(this.opts.path, entry.path); + var directory = entry.isDirectory ? destPath : path6.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { - var pipedStream = fs8.createWriteStream(destPath); + var pipedStream = fs7.createWriteStream(destPath); pipedStream.on("close", function() { self2.unfinishedEntries--; self2._notifyAwaiter(); @@ -109855,10 +109855,10 @@ var require_download_artifact = __commonJS({ parsed.search = ""; return parsed.toString(); }; - function exists(path7) { + function exists(path6) { return __awaiter4(this, void 0, void 0, function* () { try { - yield promises_1.default.access(path7); + yield promises_1.default.access(path6); return true; } catch (error2) { if (error2.code === "ENOENT") { @@ -110089,12 +110089,12 @@ var require_dist_node24 = __commonJS({ octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); - const path7 = requestOptions.url.replace(options.baseUrl, ""); + const path6 = requestOptions.url.replace(options.baseUrl, ""); return request(options).then((response) => { - octokit.log.info(`${requestOptions.method} ${path7} - ${response.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path6} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch((error2) => { - octokit.log.info(`${requestOptions.method} ${path7} - ${error2.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path6} - ${error2.status} in ${Date.now() - start}ms`); throw error2; }); }); @@ -110814,13 +110814,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.checkArtifactName = checkArtifactName; - function checkArtifactFilePath(path7) { - if (!path7) { - throw new Error(`Artifact path: ${path7}, is incorrectly provided`); + function checkArtifactFilePath(path6) { + if (!path6) { + throw new Error(`Artifact path: ${path6}, is incorrectly provided`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path7.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path7}. Contains the following character: ${errorMessageForCharacter} + if (path6.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path6}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -110866,25 +110866,25 @@ var require_upload_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadSpecification = void 0; - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation2(); function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { const specifications = []; - if (!fs8.existsSync(rootDirectory)) { + if (!fs7.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs8.statSync(rootDirectory).isDirectory()) { + if (!fs7.statSync(rootDirectory).isDirectory()) { throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); } rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of artifactFiles) { - if (!fs8.existsSync(file)) { + if (!fs7.existsSync(file)) { throw new Error(`File ${file} does not exist`); } - if (!fs8.statSync(file).isDirectory()) { + if (!fs7.statSync(file).isDirectory()) { file = (0, path_1.normalize)(file); file = (0, path_1.resolve)(file); if (!file.startsWith(rootDirectory)) { @@ -110909,11 +110909,11 @@ var require_upload_specification = __commonJS({ // node_modules/tmp/lib/tmp.js var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { - var fs8 = require("fs"); + var fs7 = require("fs"); var os = require("os"); - var path7 = require("path"); + var path6 = require("path"); var crypto = require("crypto"); - var _c = { fs: fs8.constants, os: os.constants }; + var _c = { fs: fs7.constants, os: os.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; var TEMPLATE_PATTERN = /XXXXXX/; var DEFAULT_TRIES = 3; @@ -110925,13 +110925,13 @@ var require_tmp = __commonJS({ var FILE_MODE = 384; var EXIT = "exit"; var _removeObjects = []; - var FN_RMDIR_SYNC = fs8.rmdirSync.bind(fs8); + var FN_RMDIR_SYNC = fs7.rmdirSync.bind(fs7); var _gracefulCleanup = false; function rimraf(dirPath, callback) { - return fs8.rm(dirPath, { recursive: true }, callback); + return fs7.rm(dirPath, { recursive: true }, callback); } function FN_RIMRAF_SYNC(dirPath) { - return fs8.rmSync(dirPath, { recursive: true }); + return fs7.rmSync(dirPath, { recursive: true }); } function tmpName(options, callback) { const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -110941,7 +110941,7 @@ var require_tmp = __commonJS({ (function _getUniqueName() { try { const name = _generateTmpName(sanitizedOptions); - fs8.stat(name, function(err2) { + fs7.stat(name, function(err2) { if (!err2) { if (tries-- > 0) return _getUniqueName(); return cb(new Error("Could not get a unique tmp filename, max tries reached " + name)); @@ -110961,7 +110961,7 @@ var require_tmp = __commonJS({ do { const name = _generateTmpName(sanitizedOptions); try { - fs8.statSync(name); + fs7.statSync(name); } catch (e) { return name; } @@ -110972,10 +110972,10 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs8.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { + fs7.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { if (err2) return cb(err2); if (opts.discardDescriptor) { - return fs8.close(fd, function _discardCallback(possibleErr) { + return fs7.close(fd, function _discardCallback(possibleErr) { return cb(possibleErr, name, void 0, _prepareTmpFileRemoveCallback(name, -1, opts, false)); }); } else { @@ -110989,9 +110989,9 @@ var require_tmp = __commonJS({ const args = _parseArguments(options), opts = args[0]; const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; const name = tmpNameSync(opts); - let fd = fs8.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + let fd = fs7.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); if (opts.discardDescriptor) { - fs8.closeSync(fd); + fs7.closeSync(fd); fd = void 0; } return { @@ -111004,7 +111004,7 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs8.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { + fs7.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { if (err2) return cb(err2); cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); }); @@ -111013,7 +111013,7 @@ var require_tmp = __commonJS({ function dirSync(options) { const args = _parseArguments(options), opts = args[0]; const name = tmpNameSync(opts); - fs8.mkdirSync(name, opts.mode || DIR_MODE); + fs7.mkdirSync(name, opts.mode || DIR_MODE); return { name, removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) @@ -111027,20 +111027,20 @@ var require_tmp = __commonJS({ next(); }; if (0 <= fdPath[0]) - fs8.close(fdPath[0], function() { - fs8.unlink(fdPath[1], _handler); + fs7.close(fdPath[0], function() { + fs7.unlink(fdPath[1], _handler); }); - else fs8.unlink(fdPath[1], _handler); + else fs7.unlink(fdPath[1], _handler); } function _removeFileSync(fdPath) { let rethrownException = null; try { - if (0 <= fdPath[0]) fs8.closeSync(fdPath[0]); + if (0 <= fdPath[0]) fs7.closeSync(fdPath[0]); } catch (e) { if (!_isEBADF(e) && !_isENOENT(e)) throw e; } finally { try { - fs8.unlinkSync(fdPath[1]); + fs7.unlinkSync(fdPath[1]); } catch (e) { if (!_isENOENT(e)) rethrownException = e; } @@ -111056,7 +111056,7 @@ var require_tmp = __commonJS({ return sync ? removeCallbackSync : removeCallback; } function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs8.rmdir.bind(fs8); + const removeFunction = opts.unsafeCleanup ? rimraf : fs7.rmdir.bind(fs7); const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); @@ -111118,35 +111118,35 @@ var require_tmp = __commonJS({ return [actualOptions, callback]; } function _resolvePath(name, tmpDir, cb) { - const pathToResolve = path7.isAbsolute(name) ? name : path7.join(tmpDir, name); - fs8.stat(pathToResolve, function(err) { + const pathToResolve = path6.isAbsolute(name) ? name : path6.join(tmpDir, name); + fs7.stat(pathToResolve, function(err) { if (err) { - fs8.realpath(path7.dirname(pathToResolve), function(err2, parentDir) { + fs7.realpath(path6.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); - cb(null, path7.join(parentDir, path7.basename(pathToResolve))); + cb(null, path6.join(parentDir, path6.basename(pathToResolve))); }); } else { - fs8.realpath(path7, cb); + fs7.realpath(path6, cb); } }); } function _resolvePathSync(name, tmpDir) { - const pathToResolve = path7.isAbsolute(name) ? name : path7.join(tmpDir, name); + const pathToResolve = path6.isAbsolute(name) ? name : path6.join(tmpDir, name); try { - fs8.statSync(pathToResolve); - return fs8.realpathSync(pathToResolve); + fs7.statSync(pathToResolve); + return fs7.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs8.realpathSync(path7.dirname(pathToResolve)); - return path7.join(parentDir, path7.basename(pathToResolve)); + const parentDir = fs7.realpathSync(path6.dirname(pathToResolve)); + return path6.join(parentDir, path6.basename(pathToResolve)); } } function _generateTmpName(opts) { const tmpDir = opts.tmpdir; if (!_isUndefined(opts.name)) { - return path7.join(tmpDir, opts.dir, opts.name); + return path6.join(tmpDir, opts.dir, opts.name); } if (!_isUndefined(opts.template)) { - return path7.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + return path6.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); } const name = [ opts.prefix ? opts.prefix : "tmp", @@ -111156,13 +111156,13 @@ var require_tmp = __commonJS({ _randomChars(12), opts.postfix ? "-" + opts.postfix : "" ].join(""); - return path7.join(tmpDir, opts.dir, name); + return path6.join(tmpDir, opts.dir, name); } function _assertOptionsBase(options) { if (!_isUndefined(options.name)) { const name = options.name; - if (path7.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); - const basename = path7.basename(name); + if (path6.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); + const basename = path6.basename(name); if (basename === ".." || basename === "." || basename !== name) throw new Error(`name option must not contain a path, found "${name}".`); } @@ -111184,7 +111184,7 @@ var require_tmp = __commonJS({ if (_isUndefined(name)) return cb(null); _resolvePath(name, tmpDir, function(err, resolvedPath) { if (err) return cb(err); - const relativePath = path7.relative(tmpDir, resolvedPath); + const relativePath = path6.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { return cb(new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`)); } @@ -111194,7 +111194,7 @@ var require_tmp = __commonJS({ function _getRelativePathSync(option, name, tmpDir) { if (_isUndefined(name)) return; const resolvedPath = _resolvePathSync(name, tmpDir); - const relativePath = path7.relative(tmpDir, resolvedPath); + const relativePath = path6.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { throw new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`); } @@ -111241,10 +111241,10 @@ var require_tmp = __commonJS({ _gracefulCleanup = true; } function _getTmpDir(options, cb) { - return fs8.realpath(options && options.tmpdir || os.tmpdir(), cb); + return fs7.realpath(options && options.tmpdir || os.tmpdir(), cb); } function _getTmpDirSync(options) { - return fs8.realpathSync(options && options.tmpdir || os.tmpdir()); + return fs7.realpathSync(options && options.tmpdir || os.tmpdir()); } process.addListener(EXIT, _garbageCollector); Object.defineProperty(module2.exports, "tmpdir", { @@ -111274,14 +111274,14 @@ var require_tmp_promise = __commonJS({ var fileWithOptions = promisify( (options, cb) => tmp.file( options, - (err, path7, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path7, fd, cleanup: promisify(cleanup) }) + (err, path6, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path6, fd, cleanup: promisify(cleanup) }) ) ); module2.exports.file = async (options) => fileWithOptions(options); module2.exports.withFile = async function withFile(fn, options) { - const { path: path7, fd, cleanup } = await module2.exports.file(options); + const { path: path6, fd, cleanup } = await module2.exports.file(options); try { - return await fn({ path: path7, fd }); + return await fn({ path: path6, fd }); } finally { await cleanup(); } @@ -111290,14 +111290,14 @@ var require_tmp_promise = __commonJS({ var dirWithOptions = promisify( (options, cb) => tmp.dir( options, - (err, path7, cleanup) => err ? cb(err) : cb(void 0, { path: path7, cleanup: promisify(cleanup) }) + (err, path6, cleanup) => err ? cb(err) : cb(void 0, { path: path6, cleanup: promisify(cleanup) }) ) ); module2.exports.dir = async (options) => dirWithOptions(options); module2.exports.withDir = async function withDir(fn, options) { - const { path: path7, cleanup } = await module2.exports.dir(options); + const { path: path6, cleanup } = await module2.exports.dir(options); try { - return await fn({ path: path7 }); + return await fn({ path: path6 }); } finally { await cleanup(); } @@ -112098,10 +112098,10 @@ var require_upload_gzip = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createGZipFileInBuffer = exports2.createGZipFileOnDisk = void 0; - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var zlib = __importStar4(require("zlib")); var util_1 = require("util"); - var stat = (0, util_1.promisify)(fs8.stat); + var stat = (0, util_1.promisify)(fs7.stat); var gzipExemptFileExtensions = [ ".gz", ".gzip", @@ -112134,9 +112134,9 @@ var require_upload_gzip = __commonJS({ } } return new Promise((resolve5, reject) => { - const inputStream = fs8.createReadStream(originalFilePath); + const inputStream = fs7.createReadStream(originalFilePath); const gzip = zlib.createGzip(); - const outputStream = fs8.createWriteStream(tempFilePath); + const outputStream = fs7.createWriteStream(tempFilePath); inputStream.pipe(gzip).pipe(outputStream); outputStream.on("finish", () => __awaiter4(this, void 0, void 0, function* () { const size = (yield stat(tempFilePath)).size; @@ -112154,7 +112154,7 @@ var require_upload_gzip = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { return new Promise((resolve5) => __awaiter4(this, void 0, void 0, function* () { var _a, e_1, _b, _c; - const inputStream = fs8.createReadStream(originalFilePath); + const inputStream = fs7.createReadStream(originalFilePath); const gzip = zlib.createGzip(); inputStream.pipe(gzip); const chunks = []; @@ -112363,7 +112363,7 @@ var require_upload_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var core14 = __importStar4(require_core()); var tmp = __importStar4(require_tmp_promise()); var stream = __importStar4(require("stream")); @@ -112377,7 +112377,7 @@ var require_upload_http_client = __commonJS({ var http_manager_1 = require_http_manager(); var upload_gzip_1 = require_upload_gzip(); var requestUtils_1 = require_requestUtils2(); - var stat = (0, util_1.promisify)(fs8.stat); + var stat = (0, util_1.promisify)(fs7.stat); var UploadHttpClient = class { constructor() { this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), "@actions/artifact-upload"); @@ -112514,7 +112514,7 @@ var require_upload_http_client = __commonJS({ let openUploadStream; if (totalFileSize < buffer.byteLength) { core14.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs8.createReadStream(parameters.file); + openUploadStream = () => fs7.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { @@ -112560,7 +112560,7 @@ var require_upload_http_client = __commonJS({ failedChunkSizes += chunkSize; continue; } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs8.createReadStream(uploadFilePath, { + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs7.createReadStream(uploadFilePath, { start: startChunkIndex, end: endChunkIndex, autoClose: false @@ -112755,7 +112755,7 @@ var require_download_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var core14 = __importStar4(require_core()); var zlib = __importStar4(require("zlib")); var utils_1 = require_utils10(); @@ -112846,7 +112846,7 @@ var require_download_http_client = __commonJS({ return __awaiter4(this, void 0, void 0, function* () { let retryCount = 0; const retryLimit = (0, config_variables_1.getRetryLimit)(); - let destinationStream = fs8.createWriteStream(downloadPath); + let destinationStream = fs7.createWriteStream(downloadPath); const headers = (0, utils_1.getDownloadHeaders)("application/json", true, true); const makeDownloadRequest = () => __awaiter4(this, void 0, void 0, function* () { const client = this.downloadHttpManager.getClient(httpClientIndex); @@ -112888,7 +112888,7 @@ var require_download_http_client = __commonJS({ } }); yield (0, utils_1.rmFile)(fileDownloadPath); - destinationStream = fs8.createWriteStream(fileDownloadPath); + destinationStream = fs7.createWriteStream(fileDownloadPath); }); while (retryCount <= retryLimit) { let response; @@ -113005,21 +113005,21 @@ var require_download_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadSpecification = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { const directories = /* @__PURE__ */ new Set(); const specifications = { - rootDownloadLocation: includeRootDirectory ? path7.join(downloadPath, artifactName) : downloadPath, + rootDownloadLocation: includeRootDirectory ? path6.join(downloadPath, artifactName) : downloadPath, directoryStructure: [], emptyFilesToCreate: [], filesToDownload: [] }; for (const entry of artifactEntries) { if (entry.path.startsWith(`${artifactName}/`) || entry.path.startsWith(`${artifactName}\\`)) { - const normalizedPathEntry = path7.normalize(entry.path); - const filePath = path7.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); + const normalizedPathEntry = path6.normalize(entry.path); + const filePath = path6.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); if (entry.itemType === "file") { - directories.add(path7.dirname(filePath)); + directories.add(path6.dirname(filePath)); if (entry.fileLength === 0) { specifications.emptyFilesToCreate.push(filePath); } else { @@ -113161,7 +113161,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz return uploadResponse; }); } - downloadArtifact(name, path7, options) { + downloadArtifact(name, path6, options) { return __awaiter4(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const artifacts = yield downloadHttpClient.listArtifacts(); @@ -113175,12 +113175,12 @@ Note: The size of downloaded zips can differ significantly from the reported siz throw new Error(`Unable to find an artifact with the name: ${name}`); } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path7) { - path7 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path6) { + path6 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path7 = (0, path_1.normalize)(path7); - path7 = (0, path_1.resolve)(path7); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path7, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + path6 = (0, path_1.normalize)(path6); + path6 = (0, path_1.resolve)(path6); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path6, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core14.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { @@ -113195,7 +113195,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz }; }); } - downloadAllArtifacts(path7) { + downloadAllArtifacts(path6) { return __awaiter4(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const response = []; @@ -113204,18 +113204,18 @@ Note: The size of downloaded zips can differ significantly from the reported siz core14.info("Unable to find any artifacts for the associated workflow"); return response; } - if (!path7) { - path7 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path6) { + path6 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path7 = (0, path_1.normalize)(path7); - path7 = (0, path_1.resolve)(path7); + path6 = (0, path_1.normalize)(path6); + path6 = (0, path_1.resolve)(path6); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; core14.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path7, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path6, true); if (downloadSpecification.filesToDownload.length === 0) { core14.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { @@ -113356,7 +113356,7 @@ var require_internal_path_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -113364,7 +113364,7 @@ var require_internal_path_helper2 = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path7.dirname(p); + let result = path6.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -113402,7 +113402,7 @@ var require_internal_path_helper2 = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path7.sep; + root += path6.sep; } return root + itemPath; } @@ -113440,10 +113440,10 @@ var require_internal_path_helper2 = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path7.sep)) { + if (!p.endsWith(path6.sep)) { return p; } - if (p === path7.sep) { + if (p === path6.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -113594,7 +113594,7 @@ var require_internal_path2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -113609,12 +113609,12 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path7.sep); + this.segments = itemPath.split(path6.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path7.basename(remaining); + const basename = path6.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -113632,7 +113632,7 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -113643,12 +113643,12 @@ var require_internal_path2 = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path7.sep; + result += path6.sep; } result += this.segments[i]; } @@ -113696,7 +113696,7 @@ var require_internal_pattern2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os = __importStar4(require("os")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -113725,7 +113725,7 @@ var require_internal_pattern2 = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -113749,8 +113749,8 @@ var require_internal_pattern2 = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path7.sep}`; + if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path6.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -113785,9 +113785,9 @@ var require_internal_pattern2 = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { homedir = homedir || os.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -113871,8 +113871,8 @@ var require_internal_search_state2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path7, level) { - this.path = path7; + constructor(path6, level) { + this.path = path6; this.level = level; } }; @@ -113996,9 +113996,9 @@ var require_internal_globber2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper2()); var internal_match_kind_1 = require_internal_match_kind2(); var internal_pattern_1 = require_internal_pattern2(); @@ -114050,7 +114050,7 @@ var require_internal_globber2 = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs8.promises.lstat(searchPath)); + yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -114074,7 +114074,7 @@ var require_internal_globber2 = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path7.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path6.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -114084,7 +114084,7 @@ var require_internal_globber2 = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -114119,7 +114119,7 @@ var require_internal_globber2 = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs8.promises.stat(item.path); + stats = yield fs7.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -114131,10 +114131,10 @@ var require_internal_globber2 = __commonJS({ throw err; } } else { - stats = yield fs8.promises.lstat(item.path); + stats = yield fs7.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs8.promises.realpath(item.path); + const realPath = yield fs7.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -114233,10 +114233,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = void 0; var crypto = __importStar4(require("crypto")); var core14 = __importStar4(require_core()); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); - var path7 = __importStar4(require("path")); + var path6 = __importStar4(require("path")); function hashFiles2(globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; @@ -114252,17 +114252,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path7.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path6.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs8.statSync(file).isDirectory()) { + if (fs7.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto.createHash("sha256"); const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs8.createReadStream(file), hash); + yield pipeline(fs7.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) { @@ -114348,7 +114348,7 @@ var require_glob3 = __commonJS({ }); // src/analyze-action-post.ts -var fs7 = __toESM(require("fs")); +var fs6 = __toESM(require("fs")); var core13 = __toESM(require_core()); // src/actions-util.ts @@ -114372,21 +114372,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs8 = options.fs || await import("node:fs/promises"); + const fs7 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs8.lstat(itemPath, { bigint: true }) : await fs8.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs8.readdir(itemPath) : await fs8.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -117282,8 +117282,8 @@ async function getGitHubVersion() { } // src/codeql.ts -var fs5 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -117525,8 +117525,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var fs3 = __toESM(require("fs")); +var path3 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -117548,10 +117548,6 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); -// src/diff-informed-analysis-utils.ts -var fs3 = __toESM(require("fs")); -var path3 = __toESM(require("path")); - // src/feature-flags.ts var semver4 = __toESM(require_semver2()); @@ -117649,8 +117645,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path7 = decodeGitFilePath(match[2]); - fileOidMap[path7] = oid; + const path6 = decodeGitFilePath(match[2]); + fileOidMap[path6] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -117765,42 +117761,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -117827,23 +117822,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path2.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path2.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -118031,24 +118009,6 @@ var featureConfig = { } }; -// src/diff-informed-analysis-utils.ts -function getDiffRangesJsonFilePath() { - return path3.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs3.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs3.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -118078,14 +118038,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path4.join(tempDir, "config"); + return path3.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs4.existsSync(configFile)) { + if (!fs3.existsSync(configFile)) { return void 0; } - const configString = fs4.readFileSync(configFile, "utf8"); + const configString = fs3.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -118194,17 +118154,17 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path5.join( + const tracingConfigPath = path4.join( extractorPath, "tools", "tracing-config.lua" ); - return fs5.existsSync(tracingConfigPath); + return fs4.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -118239,6 +118199,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -118270,7 +118231,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path5.join( + const autobuildCmd = path4.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -118660,7 +118621,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs5.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs4.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -118683,7 +118644,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path5.resolve(config.tempDir, "user-config.yaml"); + return path4.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -118704,8 +118665,8 @@ async function getJobRunUuidSarifOptions(codeql) { } // src/debug-artifacts.ts -var fs6 = __toESM(require("fs")); -var path6 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path5 = __toESM(require("path")); var artifact = __toESM(require_artifact2()); var artifactLegacy = __toESM(require_artifact_client2()); var core12 = __toESM(require_core()); @@ -118736,14 +118697,14 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion logger.info( "Uploading available combined SARIF files as Actions debugging artifact..." ); - const baseTempDir = path6.resolve(tempDir, "combined-sarif"); + const baseTempDir = path5.resolve(tempDir, "combined-sarif"); const toUpload = []; - if (fs6.existsSync(baseTempDir)) { - const outputDirs = fs6.readdirSync(baseTempDir); + if (fs5.existsSync(baseTempDir)) { + const outputDirs = fs5.readdirSync(baseTempDir); for (const outputDir of outputDirs) { - const sarifFiles = fs6.readdirSync(path6.resolve(baseTempDir, outputDir)).filter((f) => path6.extname(f) === ".sarif"); + const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => path5.extname(f) === ".sarif"); for (const sarifFile of sarifFiles) { - toUpload.push(path6.resolve(baseTempDir, outputDir, sarifFile)); + toUpload.push(path5.resolve(baseTempDir, outputDir, sarifFile)); } } } @@ -118795,8 +118756,8 @@ async function uploadDebugArtifacts(logger, toUpload, rootDir, artifactName, ghV try { await artifactUploader.uploadArtifact( sanitizeArtifactName(`${artifactName}${suffix}`), - toUpload.map((file) => path6.normalize(file)), - path6.normalize(rootDir), + toUpload.map((file) => path5.normalize(file)), + path5.normalize(rootDir), { // ensure we don't keep the debug artifacts around for too long since they can be large. retentionDays: 7 @@ -118845,9 +118806,9 @@ async function runWrapper() { } } const javaTempDependencyDir = getJavaTempDependencyDir(); - if (fs7.existsSync(javaTempDependencyDir)) { + if (fs6.existsSync(javaTempDependencyDir)) { try { - fs7.rmSync(javaTempDependencyDir, { recursive: true }); + fs6.rmSync(javaTempDependencyDir, { recursive: true }); } catch (error2) { logger.info( `Failed to remove temporary Java dependencies directory: ${getErrorMessage(error2)}` diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 28b4706d9a..ae343d4713 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative3 = []; + const relative2 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative3.push(pattern); + relative2.push(pattern); } } - return [absolute, relative3]; + return [absolute, relative2]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -80536,8 +80536,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative3, base) { - return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -90918,42 +90918,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -90980,23 +90979,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path7.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path7.sep).join("/"); - forced.add(rel); - } - return forced; -} var CACHE_VERSION = 1; var CACHE_PREFIX = "codeql-overlay-base-database"; var MAX_CACHE_OPERATION_MS = 6e5; @@ -92831,7 +92813,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -92866,6 +92848,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index f02a8cd480..4e82146071 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var os2 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs8.existsSync(filePath)) { + if (!fs7.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs8.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs7.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path8 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path7 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path8 && !path8.startsWith("/")) { - path8 = `/${path8}`; + if (path7 && !path7.startsWith("/")) { + path7 = `/${path7}`; } - url = new URL(origin + path8); + url = new URL(origin + path7); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path8) { - if (typeof path8 !== "string") { + module2.exports = function basename(path7) { + if (typeof path7 !== "string") { return ""; } - for (var i = path8.length - 1; i >= 0; --i) { - switch (path8.charCodeAt(i)) { + for (var i = path7.length - 1; i >= 0; --i) { + switch (path7.charCodeAt(i)) { case 47: // '/' case 92: - path8 = path8.slice(i + 1); - return path8 === ".." || path8 === "." ? "" : path8; + path7 = path7.slice(i + 1); + return path7 === ".." || path7 === "." ? "" : path7; } } - return path8 === ".." || path8 === "." ? "" : path8; + return path7 === ".." || path7 === "." ? "" : path7; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path8, + path: path7, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path8 !== "string") { + if (typeof path7 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path8[0] !== "/" && !(path8.startsWith("http://") || path8.startsWith("https://")) && method !== "CONNECT") { + } else if (path7[0] !== "/" && !(path7.startsWith("http://") || path7.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path8) !== null) { + } else if (invalidPathRegex.exec(path7) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path8, query) : path8; + this.path = query ? util.buildURL(path7, query) : path7; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path8 = search ? `${pathname}${search}` : pathname; + const path7 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path8; + this.opts.path = path7; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path8, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path7, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path8} HTTP/1.1\r + let header = `${method} ${path7} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path8, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path7, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path8; + headers[HTTP2_HEADER_PATH] = path7; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path8) { - if (typeof path8 !== "string") { - return path8; + function safeUrl(path7) { + if (typeof path7 !== "string") { + return path7; } - const pathSegments = path8.split("?"); + const pathSegments = path7.split("?"); if (pathSegments.length !== 2) { - return path8; + return path7; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path8, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path8); + function matchKey(mockDispatch2, { path: path7, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path7); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path8 }) => matchValue(safeUrl(path8), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path7 }) => matchValue(safeUrl(path7), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path8, method, body, headers, query } = opts; + const { path: path7, method, body, headers, query } = opts; return { - path: path8, + path: path7, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path8, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path7, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path8, + Path: path7, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path8) { - for (const char of path8) { + function validateCookiePath(path7) { + for (const char of path7) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path8 = opts.path; + let path7 = opts.path; if (!opts.path.startsWith("/")) { - path8 = `/${path8}`; + path7 = `/${path7}`; } - url = new URL(util.parseOrigin(url).origin + path8); + url = new URL(util.parseOrigin(url).origin + path7); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path8.sep); + return pth.replace(/[/\\]/g, path7.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs8 = __importStar4(require("fs")); - var path8 = __importStar4(require("path")); - _a = fs8.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs7 = __importStar4(require("fs")); + var path7 = __importStar4(require("path")); + _a = fs7.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs8.constants.O_RDONLY; + exports2.READONLY = fs7.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path8.extname(filePath).toUpperCase(); + const upperExt = path7.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path8.dirname(filePath); - const upperName = path8.basename(filePath).toUpperCase(); + const directory = path7.dirname(filePath); + const upperName = path7.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path8.join(directory, actualName); + filePath = path7.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path8.join(dest, path8.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path7.join(dest, path7.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path8.relative(source, newDest) === "") { + if (path7.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path8.join(dest, path8.basename(source)); + dest = path7.join(dest, path7.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path8.dirname(dest)); + yield mkdirP(path7.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path8.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path7.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path8.sep)) { + if (tool.includes(path7.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path8.delimiter)) { + for (const p of process.env.PATH.split(path7.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path8.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path7.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var io5 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path8.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path7.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io5.which(this.toolPath, true); return new Promise((resolve5, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar4(require("os")); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path8.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path7.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path8 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path8} does not exist${os_1.EOL}`); + const path7 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path7} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path8.dirname(p); + let result = path7.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path8.sep; + root += path7.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path8.sep)) { + if (!p.endsWith(path7.sep)) { return p; } - if (p === path8.sep) { + if (p === path7.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path8 = (function() { + var path7 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path8.sep; + minimatch.sep = path7.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path8.sep !== "/") { - pattern = pattern.split(path8.sep).join("/"); + if (!options.allowWindowsEscape && path7.sep !== "/") { + pattern = pattern.split(path7.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path8.sep !== "/") { - f = f.split(path8.sep).join("/"); + if (path7.sep !== "/") { + f = f.split(path7.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path8.sep); + this.segments = itemPath.split(path7.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path8.basename(remaining); + const basename = path7.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path8.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path7.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path8.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path7.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path8.sep; + result += path7.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os2 = __importStar4(require("os")); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path8.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path7.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path8.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path8.sep}`; + if (!itemPath.endsWith(path7.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path7.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path8.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path7.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path8.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path7.sep}`)) { homedir = homedir || os2.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path8, level) { - this.path = path8; + constructor(path7, level) { + this.path = path7; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs8.promises.lstat(searchPath)); + yield __await4(fs7.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs8.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path8.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs7.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path7.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs8.promises.stat(item.path); + stats = yield fs7.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs8.promises.lstat(item.path); + stats = yield fs7.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs8.promises.realpath(item.path); + const realPath = yield fs7.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob = __importStar4(require_glob()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); - var path8 = __importStar4(require("path")); + var fs7 = __importStar4(require("fs")); + var path7 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path8.join(baseLocation, "actions", "temp"); + tempDirectory = path7.join(baseLocation, "actions", "temp"); } - const dest = path8.join(tempDirectory, crypto.randomUUID()); + const dest = path7.join(tempDirectory, crypto.randomUUID()); yield io5.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs8.statSync(filePath).size; + return fs7.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path8.relative(workspace, file).replace(new RegExp(`\\${path8.sep}`, "g"), "/"); + const relativeFile = path7.relative(workspace, file).replace(new RegExp(`\\${path7.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs8.unlink)(filePath); + return util.promisify(fs7.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs8.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs7.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path8 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path8.startsWith("/")) { - path8 = path8.substring(1); + let path7 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path7.startsWith("/")) { + path7 = path7.substring(1); } - if (isAbsoluteUrl(path8)) { - requestUrl = path8; + if (isAbsoluteUrl(path7)) { + requestUrl = path7; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path8); + requestUrl = appendPath(requestUrl, path7); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path8 = pathToAppend.substring(0, searchStart); + const path7 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path8; + newPath = newPath + path7; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs8 = require("fs"); + var fs7 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs8); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs7); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path8 = urlParsed.pathname; - path8 = path8 || "/"; - path8 = escape(path8); - urlParsed.pathname = path8; + let path7 = urlParsed.pathname; + path7 = path7 || "/"; + path7 = escape(path7); + urlParsed.pathname = path7; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path8 = urlParsed.pathname; - path8 = path8 ? path8.endsWith("/") ? `${path8}${name}` : `${path8}/${name}` : name; - urlParsed.pathname = path8; + let path7 = urlParsed.pathname; + path7 = path7 ? path7.endsWith("/") ? `${path7}${name}` : `${path7}/${name}` : name; + urlParsed.pathname = path7; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path8 = getURLPath(request.url) || "/"; + const path7 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path8}`; + canonicalizedResourceString += `/${this.factory.accountName}${path7}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path8 = getURLPath(request.url) || "/"; + const path7 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path8}`; + canonicalizedResourceString += `/${options.accountName}${path7}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path8 = getURLPath(subRequest.url); - if (!path8 || path8 === "") { + const path7 = getURLPath(subRequest.url); + if (!path7 || path7 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path8 = getURLPath(url2); - if (path8 && path8 !== "/") { + const path7 = getURLPath(url2); + if (path7 && path7 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs8.createWriteStream(archivePath); + const writeStream = fs7.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs8.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs7.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs8.openSync(archivePath, "w"); + const fd = fs7.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs8.writeFileSync(fd, result); + fs7.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs8.closeSync(fd); + fs7.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs8.openSync(archivePath, "r"); + const fd = fs7.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs8.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs7.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs8.closeSync(fd); + fs7.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io5 = __importStar4(require_io()); var fs_1 = require("fs"); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path8.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path7.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path8.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path7.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path8.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path7.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path8.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path7.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path8.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path8.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path7.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path8.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path8.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path7.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs8 = require("fs"); + var fs7 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs8.existsSync(lsbReleaseFile)) { - contents = fs8.readFileSync(lsbReleaseFile).toString(); - } else if (fs8.existsSync(osReleaseFile)) { - contents = fs8.readFileSync(osReleaseFile).toString(); + if (fs7.existsSync(lsbReleaseFile)) { + contents = fs7.readFileSync(lsbReleaseFile).toString(); + } else if (fs7.existsSync(osReleaseFile)) { + contents = fs7.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs8 = __importStar4(require("fs")); + var fs7 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os2 = __importStar4(require("os")); - var path8 = __importStar4(require("path")); + var path7 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path8.join(_getTempDirectory(), crypto.randomUUID()); - yield io5.mkdirP(path8.dirname(dest)); + dest = dest || path7.join(_getTempDirectory(), crypto.randomUUID()); + yield io5.mkdirP(path7.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs8.existsSync(dest)) { + if (fs7.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs8.createWriteStream(dest)); + yield pipeline(readStream, fs7.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path8.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path7.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs8.statSync(sourceDir).isDirectory()) { + if (!fs7.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs8.readdirSync(sourceDir)) { - const s = path8.join(sourceDir, itemName); + for (const itemName of fs7.readdirSync(sourceDir)) { + const s = path7.join(sourceDir, itemName); yield io5.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch}`); core14.debug(`source file: ${sourceFile}`); - if (!fs8.statSync(sourceFile).isFile()) { + if (!fs7.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path8.join(destFolder, targetFile); + const destPath = path7.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path8.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch); core14.debug(`checking cache: ${cachePath}`); - if (fs8.existsSync(cachePath) && fs8.existsSync(`${cachePath}.complete`)) { + if (fs7.existsSync(cachePath) && fs7.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os2.arch(); - const toolPath = path8.join(_getCacheDirectory(), toolName); - if (fs8.existsSync(toolPath)) { - const children = fs8.readdirSync(toolPath); + const toolPath = path7.join(_getCacheDirectory(), toolName); + if (fs7.existsSync(toolPath)) { + const children = fs7.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path8.join(toolPath, child, arch || ""); - if (fs8.existsSync(fullPath) && fs8.existsSync(`${fullPath}.complete`)) { + const fullPath = path7.join(toolPath, child, arch || ""); + if (fs7.existsSync(fullPath) && fs7.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path8.join(_getTempDirectory(), crypto.randomUUID()); + dest = path7.join(_getTempDirectory(), crypto.randomUUID()); } yield io5.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path8.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path8.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path7.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs8.writeFileSync(markerPath, ""); + fs7.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); + function resolveUrl(relative, base) { + return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -74933,21 +74933,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs8 = options.fs || await import("node:fs/promises"); + const fs7 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs8.lstat(itemPath, { bigint: true }) : await fs8.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs7.lstat(itemPath, { bigint: true }) : await fs7.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs8.readdir(itemPath) : await fs8.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs7.readdir(itemPath) : await fs7.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -78024,8 +78024,8 @@ async function getAnalysisKey() { var core11 = __toESM(require_core()); // src/codeql.ts -var fs7 = __toESM(require("fs")); -var path7 = __toESM(require("path")); +var fs6 = __toESM(require("fs")); +var path6 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -78267,8 +78267,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs5 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -78290,10 +78290,6 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); -// src/diff-informed-analysis-utils.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); - // src/feature-flags.ts var fs3 = __toESM(require("fs")); var path3 = __toESM(require("path")); @@ -78397,8 +78393,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path8 = decodeGitFilePath(match[2]); - fileOidMap[path8] = oid; + const path7 = decodeGitFilePath(match[2]); + fileOidMap[path7] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -78505,42 +78501,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -78567,23 +78562,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path2.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path2.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -79033,24 +79011,6 @@ var GitHubFeatureFlags = class { } }; -// src/diff-informed-analysis-utils.ts -function getDiffRangesJsonFilePath() { - return path4.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs4.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs4.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -79080,14 +79040,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path5.join(tempDir, "config"); + return path4.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs5.existsSync(configFile)) { + if (!fs4.existsSync(configFile)) { return void 0; } - const configString = fs5.readFileSync(configFile, "utf8"); + const configString = fs4.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -79142,8 +79102,8 @@ var semver6 = __toESM(require_semver2()); var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; // src/tracer-config.ts -var fs6 = __toESM(require("fs")); -var path6 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path5 = __toESM(require("path")); async function shouldEnableIndirectTracing(codeql, config) { if (config.buildMode === "none" /* None */) { return false; @@ -79158,18 +79118,18 @@ async function endTracingForCluster(codeql, config, logger) { logger.info( "Unsetting build tracing environment variables. Subsequent steps of this job will not be traced." ); - const envVariablesFile = path6.resolve( + const envVariablesFile = path5.resolve( config.dbLocation, "temp/tracingEnvironment/end-tracing.json" ); - if (!fs6.existsSync(envVariablesFile)) { + if (!fs5.existsSync(envVariablesFile)) { throw new Error( `Environment file for ending tracing not found: ${envVariablesFile}` ); } try { const endTracingEnvVariables = JSON.parse( - fs6.readFileSync(envVariablesFile, "utf8") + fs5.readFileSync(envVariablesFile, "utf8") ); for (const [key, value] of Object.entries(endTracingEnvVariables)) { if (value !== null) { @@ -79229,17 +79189,17 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path7.join( + const tracingConfigPath = path6.join( extractorPath, "tools", "tracing-config.lua" ); - return fs7.existsSync(tracingConfigPath); + return fs6.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -79274,6 +79234,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -79305,7 +79266,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path7.join( + const autobuildCmd = path6.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -79695,7 +79656,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs7.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs6.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -79718,7 +79679,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path7.resolve(config.tempDir, "user-config.yaml"); + return path6.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 7fe5ff187e..fd8f7d18ab 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative3 = []; + const relative2 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative3.push(pattern); + relative2.push(pattern); } } - return [absolute, relative3]; + return [absolute, relative2]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -80536,8 +80536,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative3, base) { - return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -84278,8 +84278,8 @@ var require_readdir_glob = __commonJS({ useStat = true; } const filename = dir + "/" + name; - const relative3 = filename.slice(1); - const absolute = path19 + "/" + relative3; + const relative2 = filename.slice(1); + const absolute = path19 + "/" + relative2; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -84291,12 +84291,12 @@ var require_readdir_glob = __commonJS({ stats = { isDirectory: () => false }; } if (stats.isDirectory()) { - if (!shouldSkip(relative3)) { - yield { relative: relative3, absolute, stats }; + if (!shouldSkip(relative2)) { + yield { relative: relative2, absolute, stats }; yield* exploreWalkAsync(filename, path19, followSymlinks, useStat, shouldSkip, false); } } else { - yield { relative: relative3, absolute, stats }; + yield { relative: relative2, absolute, stats }; } } } @@ -84366,11 +84366,11 @@ var require_readdir_glob = __commonJS({ } setTimeout(() => this._next(), 0); } - _shouldSkipDirectory(relative3) { - return this.skipMatchers.some((m) => m.match(relative3)); + _shouldSkipDirectory(relative2) { + return this.skipMatchers.some((m) => m.match(relative2)); } - _fileMatches(relative3, isDirectory2) { - const file = relative3 + (isDirectory2 ? "/" : ""); + _fileMatches(relative2, isDirectory2) { + const file = relative2 + (isDirectory2 ? "/" : ""); return (this.matchers.length === 0 || this.matchers.some((m) => m.match(file))) && !this.ignoreMatchers.some((m) => m.match(file)) && (!this.options.nodir || !isDirectory2); } _next() { @@ -84379,16 +84379,16 @@ var require_readdir_glob = __commonJS({ if (!obj.done) { const isDirectory2 = obj.value.stats.isDirectory(); if (this._fileMatches(obj.value.relative, isDirectory2)) { - let relative3 = obj.value.relative; + let relative2 = obj.value.relative; let absolute = obj.value.absolute; if (this.options.mark && isDirectory2) { - relative3 += "/"; + relative2 += "/"; absolute += "/"; } if (this.options.stat) { - this.emit("match", { relative: relative3, absolute, stat: obj.value.stats }); + this.emit("match", { relative: relative2, absolute, stat: obj.value.stats }); } else { - this.emit("match", { relative: relative3, absolute }); + this.emit("match", { relative: relative2, absolute }); } } this._next(this.iterator); @@ -89873,8 +89873,8 @@ var require_primordials = __commonJS({ ArrayPrototypeIndexOf(self2, el) { return self2.indexOf(el); }, - ArrayPrototypeJoin(self2, sep6) { - return self2.join(sep6); + ArrayPrototypeJoin(self2, sep5) { + return self2.join(sep5); }, ArrayPrototypeMap(self2, fn) { return self2.map(fn); @@ -101761,7 +101761,7 @@ var require_commonjs16 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep6, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) { + constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) { this.#fs = fsFromOption(fs20); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); @@ -101772,7 +101772,7 @@ var require_commonjs16 = __commonJS({ this.#resolveCache = new ResolveCache(); this.#resolvePosixCache = new ResolveCache(); this.#children = new ChildrenCache(childrenCacheSize); - const split = cwdPath.substring(this.rootPath.length).split(sep6); + const split = cwdPath.substring(this.rootPath.length).split(sep5); if (split.length === 1 && !split[0]) { split.pop(); } @@ -102615,10 +102615,10 @@ var require_ignore2 = __commonJS({ ignored(p) { const fullpath = p.fullpath(); const fullpaths = `${fullpath}/`; - const relative3 = p.relative() || "."; - const relatives = `${relative3}/`; + const relative2 = p.relative() || "."; + const relatives = `${relative2}/`; for (const m of this.relative) { - if (m.match(relative3) || m.match(relatives)) + if (m.match(relative2) || m.match(relatives)) return true; } for (const m of this.absolute) { @@ -102629,9 +102629,9 @@ var require_ignore2 = __commonJS({ } childrenIgnored(p) { const fullpath = p.fullpath() + "/"; - const relative3 = (p.relative() || ".") + "/"; + const relative2 = (p.relative() || ".") + "/"; for (const m of this.relativeChildren) { - if (m.match(relative3)) + if (m.match(relative2)) return true; } for (const m of this.absoluteChildren) { @@ -129185,42 +129185,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -129247,23 +129246,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path7.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path7.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -130828,7 +130810,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -130863,6 +130845,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/init-action.js b/lib/init-action.js index 2448b3ff60..0ee3069126 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -29628,15 +29628,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative3 = []; + const relative2 = []; for (const pattern of patterns) { if (isAbsolute3(pattern)) { absolute.push(pattern); } else { - relative3.push(pattern); + relative2.push(pattern); } } - return [absolute, relative3]; + return [absolute, relative2]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute3(pattern) { @@ -81633,8 +81633,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative3, base) { - return useNativeURL ? new URL2(relative3, base) : parseUrl(url.resolve(base, relative3)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -86808,42 +86808,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path8.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs6.existsSync(path8.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path8.join( @@ -86870,23 +86869,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path8.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path8.sep).join("/"); - forced.add(rel); - } - return forced; -} var CACHE_VERSION = 1; var CACHE_PREFIX = "codeql-overlay-base-database"; var MAX_CACHE_OPERATION_MS = 6e5; @@ -87502,19 +87484,6 @@ function writeDiffRangesJsonFile(logger, ranges) { ${jsonContents}` ); } -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs8.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs8.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} async function getPullRequestEditedDiffRanges(branches, logger) { const fileDiffs = await getFileDiffsWithBasehead(branches, logger); if (fileDiffs === void 0) { @@ -89780,7 +89749,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -89815,6 +89784,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -90312,7 +90282,7 @@ async function initConfig2(inputs) { return await initConfig(inputs); }); } -async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) { +async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { fs16.mkdirSync(config.dbLocation, { recursive: true }); await wrapEnvironment( databaseInitEnvironment, @@ -90321,6 +90291,7 @@ async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, s sourceRoot, processName, qlconfigFile, + prDiffChangedFiles, logger ) ); @@ -90885,6 +90856,7 @@ async function run() { initializeEnvironment(getActionVersion()); persistInputs(); let config; + let prDiffChangedFiles; let codeql; let toolsDownloadStatusReport; let toolsFeatureFlagsValid; @@ -91014,7 +90986,7 @@ async function run() { logger }); await checkInstallPython311(config.languages, codeql); - await computeAndPersistDiffRangesEarly(codeql, features, logger); + prDiffChangedFiles = await computeAndPersistDiffRanges(codeql, features, logger); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); core13.setFailed(error2.message); @@ -91242,6 +91214,7 @@ exec ${goBinaryPath} "$@"` sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger ); if (config.overlayDatabaseMode !== "none" /* None */ && !await checkPacksForOverlayCompatibility(codeql, config, logger)) { @@ -91259,6 +91232,7 @@ exec ${goBinaryPath} "$@"` sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger ); } @@ -91306,31 +91280,33 @@ exec ${goBinaryPath} "$@"` logger ); } -async function computeAndPersistDiffRangesEarly(codeql, features, logger) { +async function computeAndPersistDiffRanges(codeql, features, logger) { try { - await withGroupAsync("Compute PR diff ranges", async () => { + return await withGroupAsync("Compute PR diff ranges", async () => { const branches = await getDiffInformedAnalysisBranches( codeql, features, logger ); if (!branches) { - return; + return void 0; } const ranges = await getPullRequestEditedDiffRanges(branches, logger); if (ranges === void 0) { - return; + return void 0; } writeDiffRangesJsonFile(logger, ranges); - const distinctFiles = new Set(ranges.map((r) => r.path)).size; + const distinctFiles = new Set(ranges.map((r) => r.path)); logger.info( - `Persisted ${ranges.length} diff range(s) across ${distinctFiles} file(s) for reuse during analyze step.` + `Persisted ${ranges.length} diff range(s) across ${distinctFiles.size} file(s) for reuse during analyze step.` ); + return distinctFiles; }); } catch (e) { logger.warning( `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}` ); + return void 0; } } function getTrapCachingEnabled() { diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index 26f781f2a1..e3b1867aac 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto = __importStar4(require("crypto")); - var fs6 = __importStar4(require("fs")); + var fs5 = __importStar4(require("fs")); var os2 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs6.existsSync(filePath)) { + if (!fs5.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs6.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs5.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path6 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path5 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path6 && !path6.startsWith("/")) { - path6 = `/${path6}`; + if (path5 && !path5.startsWith("/")) { + path5 = `/${path5}`; } - url = new URL(origin + path6); + url = new URL(origin + path5); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path6) { - if (typeof path6 !== "string") { + module2.exports = function basename(path5) { + if (typeof path5 !== "string") { return ""; } - for (var i = path6.length - 1; i >= 0; --i) { - switch (path6.charCodeAt(i)) { + for (var i = path5.length - 1; i >= 0; --i) { + switch (path5.charCodeAt(i)) { case 47: // '/' case 92: - path6 = path6.slice(i + 1); - return path6 === ".." || path6 === "." ? "" : path6; + path5 = path5.slice(i + 1); + return path5 === ".." || path5 === "." ? "" : path5; } } - return path6 === ".." || path6 === "." ? "" : path6; + return path5 === ".." || path5 === "." ? "" : path5; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path6, + path: path5, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path6 !== "string") { + if (typeof path5 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path6[0] !== "/" && !(path6.startsWith("http://") || path6.startsWith("https://")) && method !== "CONNECT") { + } else if (path5[0] !== "/" && !(path5.startsWith("http://") || path5.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path6) !== null) { + } else if (invalidPathRegex.exec(path5) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path6, query) : path6; + this.path = query ? util.buildURL(path5, query) : path5; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path6 = search ? `${pathname}${search}` : pathname; + const path5 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path6; + this.opts.path = path5; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path6, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path5, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path6} HTTP/1.1\r + let header = `${method} ${path5} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path6, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path5, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path6; + headers[HTTP2_HEADER_PATH] = path5; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path6) { - if (typeof path6 !== "string") { - return path6; + function safeUrl(path5) { + if (typeof path5 !== "string") { + return path5; } - const pathSegments = path6.split("?"); + const pathSegments = path5.split("?"); if (pathSegments.length !== 2) { - return path6; + return path5; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path6, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path6); + function matchKey(mockDispatch2, { path: path5, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path5); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path6 }) => matchValue(safeUrl(path6), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path5 }) => matchValue(safeUrl(path5), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path6, method, body, headers, query } = opts; + const { path: path5, method, body, headers, query } = opts; return { - path: path6, + path: path5, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path6, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path5, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path6, + Path: path5, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path6) { - for (const char of path6) { + function validateCookiePath(path5) { + for (const char of path5) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path6 = opts.path; + let path5 = opts.path; if (!opts.path.startsWith("/")) { - path6 = `/${path6}`; + path5 = `/${path5}`; } - url = new URL(util.parseOrigin(url).origin + path6); + url = new URL(util.parseOrigin(url).origin + path5); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path6.sep); + return pth.replace(/[/\\]/g, path5.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs6 = __importStar4(require("fs")); - var path6 = __importStar4(require("path")); - _a = fs6.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs5 = __importStar4(require("fs")); + var path5 = __importStar4(require("path")); + _a = fs5.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs6.constants.O_RDONLY; + exports2.READONLY = fs5.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path6.extname(filePath).toUpperCase(); + const upperExt = path5.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path6.dirname(filePath); - const upperName = path6.basename(filePath).toUpperCase(); + const directory = path5.dirname(filePath); + const upperName = path5.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path6.join(directory, actualName); + filePath = path5.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path6.join(dest, path6.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path5.join(dest, path5.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path6.relative(source, newDest) === "") { + if (path5.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path6.join(dest, path6.basename(source)); + dest = path5.join(dest, path5.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path6.dirname(dest)); + yield mkdirP(path5.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path6.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path5.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path6.sep)) { + if (tool.includes(path5.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path6.delimiter)) { + for (const p of process.env.PATH.split(path5.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path6.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path5.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var io5 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path6.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path5.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io5.which(this.toolPath, true); return new Promise((resolve4, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path6.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path5.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -19835,8 +19835,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path6 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path6} does not exist${os_1.EOL}`); + const path5 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path5} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -28203,7 +28203,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname2(p) { @@ -28211,7 +28211,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path6.dirname(p); + let result = path5.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -28249,7 +28249,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path6.sep; + root += path5.sep; } return root + itemPath; } @@ -28287,10 +28287,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path6.sep)) { + if (!p.endsWith(path5.sep)) { return p; } - if (p === path6.sep) { + if (p === path5.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -28623,7 +28623,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path6 = (function() { + var path5 = (function() { try { return require("path"); } catch (e) { @@ -28631,7 +28631,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path6.sep; + minimatch.sep = path5.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -28720,8 +28720,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path6.sep !== "/") { - pattern = pattern.split(path6.sep).join("/"); + if (!options.allowWindowsEscape && path5.sep !== "/") { + pattern = pattern.split(path5.sep).join("/"); } this.options = options; this.set = []; @@ -29090,8 +29090,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path6.sep !== "/") { - f = f.split(path6.sep).join("/"); + if (path5.sep !== "/") { + f = f.split(path5.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -29223,7 +29223,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -29238,12 +29238,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path6.sep); + this.segments = itemPath.split(path5.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path6.basename(remaining); + const basename = path5.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -29261,7 +29261,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path6.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path5.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -29272,12 +29272,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path6.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path5.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path6.sep; + result += path5.sep; } result += this.segments[i]; } @@ -29321,7 +29321,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os2 = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -29350,7 +29350,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path6.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path5.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -29374,8 +29374,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path6.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path6.sep}`; + if (!itemPath.endsWith(path5.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path5.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -29410,9 +29410,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path6.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path5.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path6.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path5.sep}`)) { homedir = homedir || os2.homedir(); assert_1.default(homedir, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -29496,8 +29496,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path6, level) { - this.path = path6; + constructor(path5, level) { + this.path = path5; this.level = level; } }; @@ -29617,9 +29617,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core13 = __importStar4(require_core()); - var fs6 = __importStar4(require("fs")); + var fs5 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -29669,7 +29669,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core13.debug(`Search path '${searchPath}'`); try { - yield __await4(fs6.promises.lstat(searchPath)); + yield __await4(fs5.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -29700,7 +29700,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs6.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path6.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs5.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path5.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -29735,7 +29735,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs6.promises.stat(item.path); + stats = yield fs5.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -29747,10 +29747,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs6.promises.lstat(item.path); + stats = yield fs5.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs6.promises.realpath(item.path); + const realPath = yield fs5.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -31084,8 +31084,8 @@ var require_cacheUtils = __commonJS({ var glob = __importStar4(require_glob()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs6 = __importStar4(require("fs")); - var path6 = __importStar4(require("path")); + var fs5 = __importStar4(require("fs")); + var path5 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants7(); @@ -31105,16 +31105,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path6.join(baseLocation, "actions", "temp"); + tempDirectory = path5.join(baseLocation, "actions", "temp"); } - const dest = path6.join(tempDirectory, crypto.randomUUID()); + const dest = path5.join(tempDirectory, crypto.randomUUID()); yield io5.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs6.statSync(filePath).size; + return fs5.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -31131,7 +31131,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path6.relative(workspace, file).replace(new RegExp(`\\${path6.sep}`, "g"), "/"); + const relativeFile = path5.relative(workspace, file).replace(new RegExp(`\\${path5.sep}`, "g"), "/"); core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -31154,7 +31154,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs6.unlink)(filePath); + return util.promisify(fs5.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -31199,7 +31199,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs6.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs5.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -39037,15 +39037,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path6 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path6.startsWith("/")) { - path6 = path6.substring(1); + let path5 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path5.startsWith("/")) { + path5 = path5.substring(1); } - if (isAbsoluteUrl(path6)) { - requestUrl = path6; + if (isAbsoluteUrl(path5)) { + requestUrl = path5; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path6); + requestUrl = appendPath(requestUrl, path5); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -39093,9 +39093,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path6 = pathToAppend.substring(0, searchStart); + const path5 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path6; + newPath = newPath + path5; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -42972,7 +42972,7 @@ var require_dist7 = __commonJS({ var stream = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs6 = require("fs"); + var fs5 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -42995,7 +42995,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs6); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs5); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -43244,10 +43244,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path6 = urlParsed.pathname; - path6 = path6 || "/"; - path6 = escape(path6); - urlParsed.pathname = path6; + let path5 = urlParsed.pathname; + path5 = path5 || "/"; + path5 = escape(path5); + urlParsed.pathname = path5; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -43332,9 +43332,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path6 = urlParsed.pathname; - path6 = path6 ? path6.endsWith("/") ? `${path6}${name}` : `${path6}/${name}` : name; - urlParsed.pathname = path6; + let path5 = urlParsed.pathname; + path5 = path5 ? path5.endsWith("/") ? `${path5}${name}` : `${path5}/${name}` : name; + urlParsed.pathname = path5; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -44415,9 +44415,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path5 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path6}`; + canonicalizedResourceString += `/${this.factory.accountName}${path5}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -44710,9 +44710,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path6 = getURLPath(request.url) || "/"; + const path5 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path6}`; + canonicalizedResourceString += `/${options.accountName}${path5}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -64014,8 +64014,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path6 = getURLPath(subRequest.url); - if (!path6 || path6 === "") { + const path5 = getURLPath(subRequest.url); + if (!path5 || path5 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -64075,8 +64075,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path6 = getURLPath(url2); - if (path6 && path6 !== "/") { + const path5 = getURLPath(url2); + if (path5 && path5 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -66843,7 +66843,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs6 = __importStar4(require("fs")); + var fs5 = __importStar4(require("fs")); var stream = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -66954,7 +66954,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs6.createWriteStream(archivePath); + const writeStream = fs5.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -66980,7 +66980,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs6.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs5.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -67097,7 +67097,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs6.openSync(archivePath, "w"); + const fd = fs5.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -67115,12 +67115,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs6.writeFileSync(fd, result); + fs5.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs6.closeSync(fd); + fs5.closeSync(fd); } } }); @@ -67419,7 +67419,7 @@ var require_cacheHttpClient = __commonJS({ var core13 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs6 = __importStar4(require("fs")); + var fs5 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -67557,7 +67557,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs6.openSync(archivePath, "r"); + const fd = fs5.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -67571,7 +67571,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs6.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs5.createReadStream(archivePath, { fd, start, end, @@ -67582,7 +67582,7 @@ Other caches with similar key:`); } }))); } finally { - fs6.closeSync(fd); + fs5.closeSync(fd); } return; }); @@ -72826,7 +72826,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io5 = __importStar4(require_io()); var fs_1 = require("fs"); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants7(); var IS_WINDOWS = process.platform === "win32"; @@ -72872,13 +72872,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path6.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path5.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -72924,7 +72924,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -72933,7 +72933,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path6.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path5.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -72948,7 +72948,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -72957,7 +72957,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path6.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path5.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -72997,7 +72997,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path6.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path5.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -73067,7 +73067,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core13 = __importStar4(require_core()); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -73164,7 +73164,7 @@ var require_cache3 = __commonJS({ core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core13.isDebug()) { @@ -73233,7 +73233,7 @@ var require_cache3 = __commonJS({ core13.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path6.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core13.debug(`Archive path: ${archivePath}`); core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -73296,7 +73296,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73360,7 +73360,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path6.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -73498,7 +73498,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs6 = require("fs"); + var fs5 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -73562,10 +73562,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs6.existsSync(lsbReleaseFile)) { - contents = fs6.readFileSync(lsbReleaseFile).toString(); - } else if (fs6.existsSync(osReleaseFile)) { - contents = fs6.readFileSync(osReleaseFile).toString(); + if (fs5.existsSync(lsbReleaseFile)) { + contents = fs5.readFileSync(lsbReleaseFile).toString(); + } else if (fs5.existsSync(osReleaseFile)) { + contents = fs5.readFileSync(osReleaseFile).toString(); } return contents; } @@ -73742,10 +73742,10 @@ var require_tool_cache = __commonJS({ var core13 = __importStar4(require_core()); var io5 = __importStar4(require_io()); var crypto = __importStar4(require("crypto")); - var fs6 = __importStar4(require("fs")); + var fs5 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os2 = __importStar4(require("os")); - var path6 = __importStar4(require("path")); + var path5 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver8 = __importStar4(require_semver2()); var stream = __importStar4(require("stream")); @@ -73766,8 +73766,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path6.join(_getTempDirectory(), crypto.randomUUID()); - yield io5.mkdirP(path6.dirname(dest)); + dest = dest || path5.join(_getTempDirectory(), crypto.randomUUID()); + yield io5.mkdirP(path5.dirname(dest)); core13.debug(`Downloading ${url}`); core13.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -73789,7 +73789,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs6.existsSync(dest)) { + if (fs5.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -73813,7 +73813,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs6.createWriteStream(dest)); + yield pipeline(readStream, fs5.createWriteStream(dest)); core13.debug("download complete"); succeeded = true; return dest; @@ -73854,7 +73854,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path6.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path5.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -74025,12 +74025,12 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core13.debug(`Caching tool ${tool} ${version} ${arch}`); core13.debug(`source dir: ${sourceDir}`); - if (!fs6.statSync(sourceDir).isDirectory()) { + if (!fs5.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs6.readdirSync(sourceDir)) { - const s = path6.join(sourceDir, itemName); + for (const itemName of fs5.readdirSync(sourceDir)) { + const s = path5.join(sourceDir, itemName); yield io5.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch); @@ -74044,11 +74044,11 @@ var require_tool_cache = __commonJS({ arch = arch || os2.arch(); core13.debug(`Caching tool ${tool} ${version} ${arch}`); core13.debug(`source file: ${sourceFile}`); - if (!fs6.statSync(sourceFile).isFile()) { + if (!fs5.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path6.join(destFolder, targetFile); + const destPath = path5.join(destFolder, targetFile); core13.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); @@ -74072,9 +74072,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver8.clean(versionSpec) || ""; - const cachePath = path6.join(_getCacheDirectory(), toolName, versionSpec, arch); + const cachePath = path5.join(_getCacheDirectory(), toolName, versionSpec, arch); core13.debug(`checking cache: ${cachePath}`); - if (fs6.existsSync(cachePath) && fs6.existsSync(`${cachePath}.complete`)) { + if (fs5.existsSync(cachePath) && fs5.existsSync(`${cachePath}.complete`)) { core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { @@ -74087,13 +74087,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch) { const versions = []; arch = arch || os2.arch(); - const toolPath = path6.join(_getCacheDirectory(), toolName); - if (fs6.existsSync(toolPath)) { - const children = fs6.readdirSync(toolPath); + const toolPath = path5.join(_getCacheDirectory(), toolName); + if (fs5.existsSync(toolPath)) { + const children = fs5.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path6.join(toolPath, child, arch || ""); - if (fs6.existsSync(fullPath) && fs6.existsSync(`${fullPath}.complete`)) { + const fullPath = path5.join(toolPath, child, arch || ""); + if (fs5.existsSync(fullPath) && fs5.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -74147,7 +74147,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path6.join(_getTempDirectory(), crypto.randomUUID()); + dest = path5.join(_getTempDirectory(), crypto.randomUUID()); } yield io5.mkdirP(dest); return dest; @@ -74155,7 +74155,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); @@ -74165,9 +74165,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch) { - const folderPath = path6.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); + const folderPath = path5.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; - fs6.writeFileSync(markerPath, ""); + fs5.writeFileSync(markerPath, ""); core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -74687,8 +74687,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative2, base) { - return useNativeURL ? new URL2(relative2, base) : parseUrl(url.resolve(base, relative2)); + function resolveUrl(relative, base) { + return useNativeURL ? new URL2(relative, base) : parseUrl(url.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -74933,21 +74933,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs6 = options.fs || await import("node:fs/promises"); + const fs5 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs6.lstat(itemPath, { bigint: true }) : await fs6.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs5.lstat(itemPath, { bigint: true }) : await fs5.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs6.readdir(itemPath) : await fs6.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs5.readdir(itemPath) : await fs5.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -78266,8 +78266,8 @@ function wrapCliConfigurationError(cliError) { } // src/config-utils.ts -var fs4 = __toESM(require("fs")); -var path4 = __toESM(require("path")); +var fs3 = __toESM(require("fs")); +var path3 = __toESM(require("path")); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -78289,10 +78289,6 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); -// src/diff-informed-analysis-utils.ts -var fs3 = __toESM(require("fs")); -var path3 = __toESM(require("path")); - // src/feature-flags.ts var semver4 = __toESM(require_semver2()); @@ -78390,8 +78386,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path6 = decodeGitFilePath(match[2]); - fileOidMap[path6] = oid; + const path5 = decodeGitFilePath(match[2]); + fileOidMap[path5] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -78498,42 +78494,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -78560,23 +78555,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path2.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path2.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -78760,24 +78738,6 @@ var featureConfig = { } }; -// src/diff-informed-analysis-utils.ts -function getDiffRangesJsonFilePath() { - return path3.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs3.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs3.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -78807,14 +78767,14 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; function getPathToParsedConfigFile(tempDir) { - return path4.join(tempDir, "config"); + return path3.join(tempDir, "config"); } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs4.existsSync(configFile)) { + if (!fs3.existsSync(configFile)) { return void 0; } - const configString = fs4.readFileSync(configFile, "utf8"); + const configString = fs3.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -78850,8 +78810,8 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { } // src/codeql.ts -var fs5 = __toESM(require("fs")); -var path5 = __toESM(require("path")); +var fs4 = __toESM(require("fs")); +var path4 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -78929,17 +78889,17 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path5.join( + const tracingConfigPath = path4.join( extractorPath, "tools", "tracing-config.lua" ); - return fs5.existsSync(tracingConfigPath); + return fs4.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -78974,6 +78934,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -79005,7 +78966,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path5.join( + const autobuildCmd = path4.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -79395,7 +79356,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs5.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs4.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -79418,7 +79379,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path5.resolve(config.tempDir, "user-config.yaml"); + return path4.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; diff --git a/lib/upload-lib.js b/lib/upload-lib.js index e29c14fb7f..1ab41cd3d7 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -29017,15 +29017,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative3 = []; + const relative2 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative3.push(pattern); + relative2.push(pattern); } } - return [absolute, relative3]; + return [absolute, relative2]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -81833,8 +81833,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative3, base) { - return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -89289,42 +89289,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -89351,23 +89350,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path7.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path7.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver3 = __toESM(require_semver2()); @@ -90662,7 +90644,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -90697,6 +90679,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index dab1fc0514..c1a10c1f2a 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -27720,15 +27720,15 @@ var require_pattern = __commonJS({ exports2.removeDuplicateSlashes = removeDuplicateSlashes; function partitionAbsoluteAndRelative(patterns) { const absolute = []; - const relative3 = []; + const relative2 = []; for (const pattern of patterns) { if (isAbsolute2(pattern)) { absolute.push(pattern); } else { - relative3.push(pattern); + relative2.push(pattern); } } - return [absolute, relative3]; + return [absolute, relative2]; } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute2(pattern) { @@ -81833,8 +81833,8 @@ var require_follow_redirects = __commonJS({ } return parsed; } - function resolveUrl(relative3, base) { - return useNativeURL ? new URL2(relative3, base) : parseUrl(url2.resolve(base, relative3)); + function resolveUrl(relative2, base) { + return useNativeURL ? new URL2(relative2, base) : parseUrl(url2.resolve(base, relative2)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { @@ -88949,8 +88949,8 @@ function wrapApiConfigurationError(e) { } // src/feature-flags.ts -var fs7 = __toESM(require("fs")); -var path9 = __toESM(require("path")); +var fs6 = __toESM(require("fs")); +var path8 = __toESM(require("path")); var semver3 = __toESM(require_semver2()); // src/defaults.json @@ -88958,8 +88958,8 @@ var bundleVersion = "codeql-bundle-v2.23.2"; var cliVersion = "2.23.2"; // src/overlay-database-utils.ts -var fs6 = __toESM(require("fs")); -var path8 = __toESM(require("path")); +var fs5 = __toESM(require("fs")); +var path7 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); // src/git-utils.ts @@ -89159,26 +89159,6 @@ async function isAnalyzingDefaultBranch() { return currentRef === defaultBranch; } -// src/diff-informed-analysis-utils.ts -var fs5 = __toESM(require("fs")); -var path7 = __toESM(require("path")); -function getDiffRangesJsonFilePath() { - return path7.join(getTemporaryDirectory(), "pr-diff-range.json"); -} -function readDiffRangesJsonFile(logger) { - const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs5.existsSync(jsonFilePath)) { - logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); - return void 0; - } - const jsonContents = fs5.readFileSync(jsonFilePath, "utf8"); - logger.debug( - `Read pr-diff-range JSON file from ${jsonFilePath}: -${jsonContents}` - ); - return JSON.parse(jsonContents); -} - // src/logging.ts var core7 = __toESM(require_core()); function getActionsLogger() { @@ -89204,12 +89184,12 @@ async function writeBaseDatabaseOidsFile(config, sourceRoot) { const gitFileOids = await getFileOidsUnderPath(sourceRoot); const gitFileOidsJson = JSON.stringify(gitFileOids); const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); - await fs6.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); + await fs5.promises.writeFile(baseDatabaseOidsFilePath, gitFileOidsJson); } async function readBaseDatabaseOidsFile(config, logger) { const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); try { - const contents = await fs6.promises.readFile( + const contents = await fs5.promises.readFile( baseDatabaseOidsFilePath, "utf-8" ); @@ -89222,54 +89202,53 @@ async function readBaseDatabaseOidsFile(config, logger) { } } function getBaseDatabaseOidsFilePath(config) { - return path8.join(config.dbLocation, "base-database-oids.json"); + return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); const originalCount = changedFiles.length; - let forcedAddedCount = 0; + let extraAddedCount = 0; try { - const forced = getForcedOverlayFilesFromDiff(logger); - if (forced.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of forced) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { - if (overlayFileOids[f] !== void 0 || fs6.existsSync(path8.join(sourceRoot, f))) { + if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) { existing.add(f); changedFiles.push(f); - forcedAddedCount++; + extraAddedCount++; } } } - if (forcedAddedCount > 0) { + if (extraAddedCount > 0) { logger.debug( - `Force-included ${forcedAddedCount} file(s) from diff ranges into overlay: ${changedFiles.slice(-forcedAddedCount).join(", ")}` + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` ); } else { logger.debug( - "All diff range files were already present in natural overlay changes (or none applicable)." + "All diff range files were already present in the diff from the base database." ); } } } catch (e) { logger.debug( - `Failed while attempting to force-include diff range files in overlay: ${e.message || e}` + `Failed while attempting to add diff range files in overlay: ${e.message || e}` ); } logger.info( - `Found ${originalCount} natural changed file(s); force-added ${forcedAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); - const overlayChangesFile = path8.join( + const overlayChangesFile = path7.join( getTemporaryDirectory(), "overlay-changes.json" ); logger.debug( `Writing overlay changed files to ${overlayChangesFile}: ${changedFilesJson}` ); - await fs6.promises.writeFile(overlayChangesFile, changedFilesJson); + await fs5.promises.writeFile(overlayChangesFile, changedFilesJson); return overlayChangesFile; } function computeChangedFiles(baseFileOids, overlayFileOids) { @@ -89286,23 +89265,6 @@ function computeChangedFiles(baseFileOids, overlayFileOids) { } return changes; } -function getForcedOverlayFilesFromDiff(logger) { - const forced = /* @__PURE__ */ new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - const checkoutPath = getRequiredInput("checkout_path"); - for (const r of diffRanges) { - const absPath = r.path; - if (!absPath) continue; - let rel = path8.relative(checkoutPath, absPath); - if (!rel || rel.startsWith("..")) continue; - rel = rel.split(path8.sep).join("/"); - forced.add(rel); - } - return forced; -} // src/tools-features.ts var semver2 = __toESM(require_semver2()); @@ -89495,7 +89457,7 @@ var Features = class { this.gitHubFeatureFlags = new GitHubFeatureFlags( gitHubVersion, repositoryNwo, - path9.join(tempDir, FEATURE_FLAGS_FILE_NAME), + path8.join(tempDir, FEATURE_FLAGS_FILE_NAME), logger ); } @@ -89674,12 +89636,12 @@ var GitHubFeatureFlags = class { } async readLocalFlags() { try { - if (fs7.existsSync(this.featureFlagsFile)) { + if (fs6.existsSync(this.featureFlagsFile)) { this.logger.debug( `Loading feature flags from ${this.featureFlagsFile}` ); return JSON.parse( - fs7.readFileSync(this.featureFlagsFile, "utf8") + fs6.readFileSync(this.featureFlagsFile, "utf8") ); } } catch (e) { @@ -89692,7 +89654,7 @@ var GitHubFeatureFlags = class { async writeLocalFlags(flags) { try { this.logger.debug(`Writing feature flags to ${this.featureFlagsFile}`); - fs7.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); + fs6.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); } catch (e) { this.logger.warning( `Error writing cached feature flags file ${this.featureFlagsFile}: ${e}.` @@ -89773,6 +89735,26 @@ var PACK_IDENTIFIER_PATTERN = (function() { return new RegExp(`^${component}/${component}$`); })(); +// src/diff-informed-analysis-utils.ts +var fs7 = __toESM(require("fs")); +var path9 = __toESM(require("path")); +function getDiffRangesJsonFilePath() { + return path9.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function readDiffRangesJsonFile(logger) { + const jsonFilePath = getDiffRangesJsonFilePath(); + if (!fs7.existsSync(jsonFilePath)) { + logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); + return void 0; + } + const jsonContents = fs7.readFileSync(jsonFilePath, "utf8"); + logger.debug( + `Read pr-diff-range JSON file from ${jsonFilePath}: +${jsonContents}` + ); + return JSON.parse(jsonContents); +} + // src/trap-caching.ts var actionsCache2 = __toESM(require_cache3()); @@ -91334,7 +91316,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -91369,6 +91351,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/src/codeql.test.ts b/src/codeql.test.ts index a5422b1e38..57b9865f9f 100644 --- a/src/codeql.test.ts +++ b/src/codeql.test.ts @@ -517,6 +517,7 @@ const injectedConfigMacro = test.macro({ "", undefined, undefined, + undefined, getRunnerLogger(true), ); @@ -803,6 +804,7 @@ test("passes a code scanning config AND qlconfig to the CLI", async (t: Executio "", undefined, "/path/to/qlconfig.yml", + undefined, getRunnerLogger(true), ); @@ -831,6 +833,7 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu "", undefined, undefined, // undefined qlconfigFile + undefined, getRunnerLogger(true), ); @@ -1080,6 +1083,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP "sourceRoot", undefined, undefined, + undefined, getRunnerLogger(false), ); diff --git a/src/codeql.ts b/src/codeql.ts index 5241ff2476..c4b536ee4b 100644 --- a/src/codeql.ts +++ b/src/codeql.ts @@ -96,6 +96,7 @@ export interface CodeQL { sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise; /** @@ -560,6 +561,7 @@ export async function getCodeQLForCmd( sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ) { const extraArgs = config.languages.map( @@ -602,6 +604,7 @@ export async function getCodeQLForCmd( const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger, ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/src/init-action.ts b/src/init-action.ts index b140b34a01..d96c7ab525 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -180,6 +180,7 @@ async function run() { persistInputs(); let config: configUtils.Config | undefined; + let prDiffChangedFiles: Set | undefined; let codeql: CodeQL; let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined; let toolsFeatureFlagsValid: boolean | undefined; @@ -342,7 +343,7 @@ async function run() { await checkInstallPython311(config.languages, codeql); - await computeAndPersistDiffRangesEarly(codeql, features, logger); + prDiffChangedFiles = await computeAndPersistDiffRanges(codeql, features, logger); } catch (unwrappedError) { const error = wrapError(unwrappedError); core.setFailed(error.message); @@ -669,6 +670,7 @@ async function run() { sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger, ); @@ -698,6 +700,7 @@ async function run() { sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger, ); } @@ -760,35 +763,37 @@ async function run() { * is enabled (feature flag + PR context). This writes the standard pr-diff-range.json * file for later reuse in the analyze step. Failures are logged but non-fatal. */ -async function computeAndPersistDiffRangesEarly( +async function computeAndPersistDiffRanges( codeql: CodeQL, features: Features, logger: Logger, -): Promise { +): Promise | undefined> { try { - await withGroupAsync("Compute PR diff ranges", async () => { + return await withGroupAsync("Compute PR diff ranges", async () => { const branches = await getDiffInformedAnalysisBranches( codeql, features, logger, ); if (!branches) { - return; + return undefined; } const ranges = await getPullRequestEditedDiffRanges(branches, logger); if (ranges === undefined) { - return; + return undefined; } writeDiffRangesJsonFile(logger, ranges); - const distinctFiles = new Set(ranges.map((r) => r.path)).size; + const distinctFiles = new Set(ranges.map((r) => r.path)); logger.info( - `Persisted ${ranges.length} diff range(s) across ${distinctFiles} file(s) for reuse during analyze step.`, + `Persisted ${ranges.length} diff range(s) across ${distinctFiles.size} file(s) for reuse during analyze step.`, ); + return distinctFiles; }); } catch (e) { logger.warning( `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}`, ); + return undefined; } } diff --git a/src/init.ts b/src/init.ts index 687afc1227..8bb4f963e1 100644 --- a/src/init.ts +++ b/src/init.ts @@ -73,6 +73,7 @@ export async function runDatabaseInitCluster( sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise { fs.mkdirSync(config.dbLocation, { recursive: true }); @@ -84,6 +85,7 @@ export async function runDatabaseInitCluster( sourceRoot, processName, qlconfigFile, + prDiffChangedFiles, logger, ), ); diff --git a/src/overlay-database-utils.test.ts b/src/overlay-database-utils.test.ts index ca52f1d88a..b7b63340fd 100644 --- a/src/overlay-database-utils.test.ts +++ b/src/overlay-database-utils.test.ts @@ -69,6 +69,7 @@ test("writeOverlayChangesFile generates correct changes file", async (t) => { const changesFilePath = await writeOverlayChangesFile( config, sourceRoot, + new Set([]), // The PR didn't touch any files logger, ); getFileOidsStubForOverlay.restore(); diff --git a/src/overlay-database-utils.ts b/src/overlay-database-utils.ts index 32d52dba27..51f0f3f005 100644 --- a/src/overlay-database-utils.ts +++ b/src/overlay-database-utils.ts @@ -9,7 +9,6 @@ import { getAutomationID } from "./api-client"; import { type CodeQL } from "./codeql"; import { type Config } from "./config-utils"; import { getCommitOid, getFileOidsUnderPath } from "./git-utils"; -import { readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; import { Logger, withGroupAsync } from "./logging"; import { isInTestMode, @@ -117,6 +116,7 @@ function getBaseDatabaseOidsFilePath(config: Config): string { export async function writeOverlayChangesFile( config: Config, sourceRoot: string, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); @@ -128,10 +128,9 @@ export async function writeOverlayChangesFile( const originalCount = changedFiles.length; let extraAddedCount = 0; try { - const diffChangedFiles = getFilesFromDiff(logger); - if (diffChangedFiles.size > 0) { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { const existing = new Set(changedFiles); - for (const f of diffChangedFiles) { + for (const f of prDiffChangedFiles) { if (!existing.has(f)) { // Only include if file still exists (added/modified) — skip deleted files that might appear in diff. if (overlayFileOids[f] !== undefined || fs.existsSync(path.join(sourceRoot, f))) { @@ -191,22 +190,6 @@ function computeChangedFiles( return changes; } -/** - * Derive the set of repository-relative file paths that have at least one edited range - * in the precomputed diff ranges JSON. Returns an empty set if no JSON exists. - */ -function getFilesFromDiff(logger: Logger): Set { - const forced = new Set(); - const diffRanges = readDiffRangesJsonFile(logger); - if (!diffRanges || diffRanges.length === 0) { - return forced; - } - for (const r of diffRanges) { - forced.add(r.path); - } - return forced; -} - // Constants for database caching const CACHE_VERSION = 1; const CACHE_PREFIX = "codeql-overlay-base-database"; From 5a8e345008b409b2af6cf7248196be41b0b19210 Mon Sep 17 00:00:00 2001 From: Alex Eyers-Taylor Date: Fri, 10 Oct 2025 13:06:10 +0100 Subject: [PATCH 6/6] Fix linitng issues. --- lib/analyze-action.js | 4 +++- lib/init-action.js | 6 +++++- src/analyze.test.ts | 1 - src/analyze.ts | 14 ++++++++------ src/diff-informed-analysis-utils.test.ts | 7 ++++--- src/diff-informed-analysis-utils.ts | 6 +++--- src/init-action.ts | 16 ++++++++++------ src/overlay-database-utils.ts | 5 ++++- 8 files changed, 37 insertions(+), 22 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index ae343d4713..06b857bbdf 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -93637,7 +93637,9 @@ async function setupDiffInformedQueryRun(logger) { ); return void 0; } - const fileCount = new Set(diffRanges.filter((r) => r.path).map((r) => r.path)).size; + const fileCount = new Set( + diffRanges.filter((r) => r.path).map((r) => r.path) + ).size; logger.info( `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).` ); diff --git a/lib/init-action.js b/lib/init-action.js index 0ee3069126..056562fbb0 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -90986,7 +90986,11 @@ async function run() { logger }); await checkInstallPython311(config.languages, codeql); - prDiffChangedFiles = await computeAndPersistDiffRanges(codeql, features, logger); + prDiffChangedFiles = await computeAndPersistDiffRanges( + codeql, + features, + logger + ); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); core13.setFailed(error2.message); diff --git a/src/analyze.test.ts b/src/analyze.test.ts index afc7368177..b6880b43da 100644 --- a/src/analyze.test.ts +++ b/src/analyze.test.ts @@ -129,7 +129,6 @@ test("status report fields", async (t) => { }); }); - test("resolveQuerySuiteAlias", (t) => { // default query suite names should resolve to something language-specific ending in `.qls`. for (const suite of defaultSuites) { diff --git a/src/analyze.ts b/src/analyze.ts index a9daa82340..1a5da2f1bf 100644 --- a/src/analyze.ts +++ b/src/analyze.ts @@ -14,7 +14,10 @@ import { type CodeQL } from "./codeql"; import * as configUtils from "./config-utils"; import { getJavaTempDependencyDir } from "./dependency-caching"; import { addDiagnostic, makeDiagnostic } from "./diagnostics"; -import { DiffThunkRange, readDiffRangesJsonFile } from "./diff-informed-analysis-utils"; +import { + DiffThunkRange, + readDiffRangesJsonFile, +} from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { FeatureEnablement, Feature } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; @@ -303,12 +306,14 @@ export async function setupDiffInformedQueryRun( return undefined; } - const fileCount = new Set(diffRanges.filter((r) => r.path).map((r) => r.path)).size; + const fileCount = new Set( + diffRanges.filter((r) => r.path).map((r) => r.path), + ).size; logger.info( `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).`, ); - const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); + const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); if (packDir === undefined) { logger.warning( "Cannot create diff range extension pack for diff-informed queries; " + @@ -324,8 +329,6 @@ export async function setupDiffInformedQueryRun( ); } - - /** * Create an extension pack in the temporary directory that contains the file * line ranges that were added or modified in the pull request. @@ -406,7 +409,6 @@ extensions: `Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`, ); - return diffRangeDir; } diff --git a/src/diff-informed-analysis-utils.test.ts b/src/diff-informed-analysis-utils.test.ts index 99115aa311..a67b714b19 100644 --- a/src/diff-informed-analysis-utils.test.ts +++ b/src/diff-informed-analysis-utils.test.ts @@ -4,7 +4,10 @@ import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; import type { PullRequestBranches } from "./actions-util"; import * as apiClient from "./api-client"; -import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils"; +import { + shouldPerformDiffInformedAnalysis, + exportedForTesting as diffExportedForTesting, +} from "./diff-informed-analysis-utils"; import { Feature, Features } from "./feature-flags"; import { getRunnerLogger } from "./logging"; import { parseRepositoryNwo } from "./repository"; @@ -187,8 +190,6 @@ test( // --------------------------------------------------------------------------- // Tests for getDiffRanges (moved from analyze.test.ts after extraction) // --------------------------------------------------------------------------- -import { exportedForTesting as diffExportedForTesting } from "./diff-informed-analysis-utils"; - function runGetDiffRanges(changes: number, patch: string[] | undefined): any { sinon .stub(actionsUtil, "getRequiredInput") diff --git a/src/diff-informed-analysis-utils.ts b/src/diff-informed-analysis-utils.ts index f0cf8670e5..9395f4e316 100644 --- a/src/diff-informed-analysis-utils.ts +++ b/src/diff-informed-analysis-utils.ts @@ -1,15 +1,15 @@ import * as fs from "fs"; import * as path from "path"; -import * as actionsUtil from "./actions-util"; import type { PullRequestBranches } from "./actions-util"; +import * as actionsUtil from "./actions-util"; +import { getRequiredInput } from "./actions-util"; import { getGitHubVersion, getApiClient } from "./api-client"; import type { CodeQL } from "./codeql"; import { Feature, FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; -import { GitHubVariant, satisfiesGHESVersion } from "./util"; import { getRepositoryNwoFromEnv } from "./repository"; -import { getRequiredInput } from "./actions-util"; +import { GitHubVariant, satisfiesGHESVersion } from "./util"; /** * This interface is an abbreviated version of the file diff object returned by diff --git a/src/init-action.ts b/src/init-action.ts index d96c7ab525..a481d28915 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -33,6 +33,11 @@ import { logUnwrittenDiagnostics, makeDiagnostic, } from "./diagnostics"; +import { + getPullRequestEditedDiffRanges, + writeDiffRangesJsonFile, + getDiffInformedAnalysisBranches, +} from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { Feature, Features } from "./feature-flags"; import { loadPropertiesFromApi } from "./feature-flags/properties"; @@ -66,11 +71,6 @@ import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; import { ToolsFeature } from "./tools-features"; import { getCombinedTracerConfig } from "./tracer-config"; -import { - getPullRequestEditedDiffRanges, - writeDiffRangesJsonFile, - getDiffInformedAnalysisBranches, -} from "./diff-informed-analysis-utils"; import { checkDiskUsage, checkForTimeout, @@ -343,7 +343,11 @@ async function run() { await checkInstallPython311(config.languages, codeql); - prDiffChangedFiles = await computeAndPersistDiffRanges(codeql, features, logger); + prDiffChangedFiles = await computeAndPersistDiffRanges( + codeql, + features, + logger, + ); } catch (unwrappedError) { const error = wrapError(unwrappedError); core.setFailed(error.message); diff --git a/src/overlay-database-utils.ts b/src/overlay-database-utils.ts index 51f0f3f005..b4a894a124 100644 --- a/src/overlay-database-utils.ts +++ b/src/overlay-database-utils.ts @@ -133,7 +133,10 @@ export async function writeOverlayChangesFile( for (const f of prDiffChangedFiles) { if (!existing.has(f)) { // Only include if file still exists (added/modified) — skip deleted files that might appear in diff. - if (overlayFileOids[f] !== undefined || fs.existsSync(path.join(sourceRoot, f))) { + if ( + overlayFileOids[f] !== undefined || + fs.existsSync(path.join(sourceRoot, f)) + ) { existing.add(f); changedFiles.push(f); extraAddedCount++;