diff --git a/lib/analyze-action.js b/lib/analyze-action.js index d514c96d92..4135aeff96 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -184,7 +184,7 @@ var require_file_command = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); var os5 = __importStar4(require("os")); var utils_1 = require_utils(); @@ -202,7 +202,7 @@ var require_file_command = __commonJS({ } exports2.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const delimiter = `ghadelimiter_${crypto2.randomUUID()}`; const convertedValue = (0, utils_1.toCommandValue)(value); if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); @@ -3637,11 +3637,11 @@ var require_util2 = __commonJS({ var assert = require("assert"); var { isUint8Array } = require("util/types"); var supportedHashes = []; - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); const possibleRelevantHashes = ["sha256", "sha384", "sha512"]; - supportedHashes = crypto.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2)); + supportedHashes = crypto2.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2)); } catch { } function responseURL(response) { @@ -3918,7 +3918,7 @@ var require_util2 = __commonJS({ } } function bytesMatch(bytes, metadataList) { - if (crypto === void 0) { + if (crypto2 === void 0) { return true; } const parsedMetadata = parseMetadata(metadataList); @@ -3933,7 +3933,7 @@ var require_util2 = __commonJS({ for (const item of metadata) { const algorithm = item.algo; const expectedValue = item.hash; - let actualValue = crypto.createHash(algorithm).update(bytes).digest("base64"); + let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64"); if (actualValue[actualValue.length - 1] === "=") { if (actualValue[actualValue.length - 2] === "=") { actualValue = actualValue.slice(0, -2); @@ -5279,8 +5279,8 @@ var require_body = __commonJS({ var { parseMIMEType, serializeAMimeType } = require_dataURL(); var random; try { - const crypto = require("node:crypto"); - random = (max) => crypto.randomInt(0, max); + const crypto2 = require("node:crypto"); + random = (max) => crypto2.randomInt(0, max); } catch { random = (max) => Math.floor(Math.random(max)); } @@ -16330,9 +16330,9 @@ var require_connection = __commonJS({ channels.open = diagnosticsChannel.channel("undici:websocket:open"); channels.close = diagnosticsChannel.channel("undici:websocket:close"); channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error"); - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); } catch { } function establishWebSocketConnection(url2, protocols, ws, onEstablish, options) { @@ -16351,7 +16351,7 @@ var require_connection = __commonJS({ const headersList = new Headers(options.headers)[kHeadersList]; request.headersList = headersList; } - const keyValue = crypto.randomBytes(16).toString("base64"); + const keyValue = crypto2.randomBytes(16).toString("base64"); request.headersList.append("sec-websocket-key", keyValue); request.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { @@ -16380,7 +16380,7 @@ var require_connection = __commonJS({ return; } const secWSAccept = response.headersList.get("Sec-WebSocket-Accept"); - const digest = crypto.createHash("sha1").update(keyValue + uid).digest("base64"); + const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64"); if (secWSAccept !== digest) { failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header."); return; @@ -16460,9 +16460,9 @@ var require_frame = __commonJS({ "node_modules/undici/lib/websocket/frame.js"(exports2, module2) { "use strict"; var { maxUnsigned16Bit } = require_constants5(); - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); } catch { } var WebsocketFrameSend = class { @@ -16471,7 +16471,7 @@ var require_frame = __commonJS({ */ constructor(data) { this.frameData = data; - this.maskKey = crypto.randomBytes(4); + this.maskKey = crypto2.randomBytes(4); } createFrame(opcode) { const bodyLength = this.frameData?.byteLength ?? 0; @@ -36929,7 +36929,7 @@ var require_cacheUtils = __commonJS({ var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); var path20 = __importStar4(require("path")); var semver8 = __importStar4(require_semver3()); @@ -36953,7 +36953,7 @@ var require_cacheUtils = __commonJS({ } tempDirectory = path20.join(baseLocation, "actions", "temp"); } - const dest = path20.join(tempDirectory, crypto.randomUUID()); + const dest = path20.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); @@ -37069,7 +37069,7 @@ var require_cacheUtils = __commonJS({ components.push("windows-only"); } components.push(versionSalt); - return crypto.createHash("sha256").update(components.join("|")).digest("hex"); + return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); } exports2.getCacheVersion = getCacheVersion; function getRuntimeToken() { @@ -48813,7 +48813,7 @@ var require_dist7 = __commonJS({ var coreXml = require_commonjs9(); var logger$1 = require_dist(); var abortController = require_commonjs10(); - var crypto = require("crypto"); + var crypto2 = require("crypto"); var coreTracing = require_commonjs4(); var stream2 = require("stream"); var coreLro = require_dist6(); @@ -50321,7 +50321,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + return crypto2.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; var AnonymousCredentialPolicy = class extends CredentialPolicy { @@ -50519,7 +50519,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.RANGE) ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); - const signature = crypto.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + const signature = crypto2.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } function getHeaderValueToSign(request, headerName) { @@ -64278,7 +64278,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + return crypto2.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } }; function ipRangeToString(ipRange) { @@ -79549,7 +79549,7 @@ var require_tool_cache = __commonJS({ exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; var core15 = __importStar4(require_core()); var io7 = __importStar4(require_io()); - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs20 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os5 = __importStar4(require("os")); @@ -79574,7 +79574,7 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url2, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path20.join(_getTempDirectory(), crypto.randomUUID()); + dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path20.dirname(dest)); core15.debug(`Downloading ${url2}`); core15.debug(`Destination ${dest}`); @@ -79955,7 +79955,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path20.join(_getTempDirectory(), crypto.randomUUID()); + dest = path20.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -81563,7 +81563,7 @@ var require_internal_hash_files = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var core15 = __importStar4(require_core()); var fs20 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); @@ -81576,7 +81576,7 @@ var require_internal_hash_files = __commonJS({ const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const result = crypto.createHash("sha256"); + const result = crypto2.createHash("sha256"); let count = 0; try { for (var _e = true, _f = __asyncValues4(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { @@ -81592,7 +81592,7 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Skip directory '${file}'.`); continue; } - const hash2 = crypto.createHash("sha256"); + const hash2 = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); yield pipeline(fs20.createReadStream(file), hash2); result.write(hash2.digest()); @@ -90237,6 +90237,11 @@ async function getAnalysisKey() { core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } +async function getAutomationID() { + const analysis_key = await getAnalysisKey(); + const environment = getRequiredInput("matrix"); + return computeAutomationID(analysis_key, environment); +} function computeAutomationID(analysis_key, environment) { let automationID = `${analysis_key}/`; const matrix = parseMatrixInput(environment); @@ -90561,6 +90566,7 @@ var bundleVersion = "codeql-bundle-v2.23.0"; var cliVersion = "2.23.0"; // src/overlay-database-utils.ts +var crypto = __toESM(require("crypto")); var fs6 = __toESM(require("fs")); var path7 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); @@ -90908,14 +90914,18 @@ async function uploadOverlayBaseDatabaseToCache(codeql, config, logger) { } const codeQlVersion = (await codeql.getVersion()).version; const checkoutPath = getRequiredInput("checkout_path"); - const cacheKey3 = await generateCacheKey(config, codeQlVersion, checkoutPath); + const cacheSaveKey = await getCacheSaveKey( + config, + codeQlVersion, + checkoutPath + ); logger.info( - `Uploading overlay-base database to Actions cache with key ${cacheKey3}` + `Uploading overlay-base database to Actions cache with key ${cacheSaveKey}` ); try { const cacheId = await withTimeout( MAX_CACHE_OPERATION_MS, - actionsCache.saveCache([dbLocation], cacheKey3), + actionsCache.saveCache([dbLocation], cacheSaveKey), () => { } ); @@ -90932,13 +90942,26 @@ async function uploadOverlayBaseDatabaseToCache(codeql, config, logger) { logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`); return true; } -async function generateCacheKey(config, codeQlVersion, checkoutPath) { +async function getCacheSaveKey(config, codeQlVersion, checkoutPath) { const sha = await getCommitOid(checkoutPath); - return `${getCacheRestoreKey(config, codeQlVersion)}${sha}`; + const restoreKeyPrefix = await getCacheRestoreKeyPrefix( + config, + codeQlVersion + ); + return `${restoreKeyPrefix}${sha}`; } -function getCacheRestoreKey(config, codeQlVersion) { +async function getCacheRestoreKeyPrefix(config, codeQlVersion) { const languages = [...config.languages].sort().join("_"); - return `${CACHE_PREFIX}-${CACHE_VERSION}-${languages}-${codeQlVersion}-`; + const cacheKeyComponents = { + automationID: await getAutomationID() + // Add more components here as needed in the future + }; + const componentsHash = createCacheKeyHash(cacheKeyComponents); + return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`; +} +function createCacheKeyHash(components) { + const componentsJson = JSON.stringify(components); + return crypto.createHash("sha256").update(componentsJson).digest("hex").substring(0, 16); } // src/tools-features.ts @@ -95307,7 +95330,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo return JSON.parse(fs18.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { - const automationID = getAutomationID(category, analysis_key, environment); + const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { for (const run2 of sarif.runs || []) { if (run2.automationDetails === void 0) { @@ -95320,7 +95343,7 @@ function populateRunAutomationDetails(sarif, category, analysis_key, environment } return sarif; } -function getAutomationID(category, analysis_key, environment) { +function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { let automationID = category; if (!automationID.endsWith("/")) { diff --git a/lib/init-action-post.js b/lib/init-action-post.js index bd0902abb6..9ccfca2116 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -132789,7 +132789,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo return JSON.parse(fs17.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { - const automationID = getAutomationID(category, analysis_key, environment); + const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { for (const run2 of sarif.runs || []) { if (run2.automationDetails === void 0) { @@ -132802,7 +132802,7 @@ function populateRunAutomationDetails(sarif, category, analysis_key, environment } return sarif; } -function getAutomationID(category, analysis_key, environment) { +function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { let automationID = category; if (!automationID.endsWith("/")) { diff --git a/lib/init-action.js b/lib/init-action.js index 99e80044fb..3c3d617f06 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -184,7 +184,7 @@ var require_file_command = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs18 = __importStar4(require("fs")); var os5 = __importStar4(require("os")); var utils_1 = require_utils(); @@ -202,7 +202,7 @@ var require_file_command = __commonJS({ } exports2.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const delimiter = `ghadelimiter_${crypto2.randomUUID()}`; const convertedValue = (0, utils_1.toCommandValue)(value); if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); @@ -3637,11 +3637,11 @@ var require_util2 = __commonJS({ var assert = require("assert"); var { isUint8Array } = require("util/types"); var supportedHashes = []; - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); const possibleRelevantHashes = ["sha256", "sha384", "sha512"]; - supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); + supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); } catch { } function responseURL(response) { @@ -3918,7 +3918,7 @@ var require_util2 = __commonJS({ } } function bytesMatch(bytes, metadataList) { - if (crypto === void 0) { + if (crypto2 === void 0) { return true; } const parsedMetadata = parseMetadata(metadataList); @@ -3933,7 +3933,7 @@ var require_util2 = __commonJS({ for (const item of metadata) { const algorithm = item.algo; const expectedValue = item.hash; - let actualValue = crypto.createHash(algorithm).update(bytes).digest("base64"); + let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64"); if (actualValue[actualValue.length - 1] === "=") { if (actualValue[actualValue.length - 2] === "=") { actualValue = actualValue.slice(0, -2); @@ -5279,8 +5279,8 @@ var require_body = __commonJS({ var { parseMIMEType, serializeAMimeType } = require_dataURL(); var random; try { - const crypto = require("node:crypto"); - random = (max) => crypto.randomInt(0, max); + const crypto2 = require("node:crypto"); + random = (max) => crypto2.randomInt(0, max); } catch { random = (max) => Math.floor(Math.random(max)); } @@ -16330,9 +16330,9 @@ var require_connection = __commonJS({ channels.open = diagnosticsChannel.channel("undici:websocket:open"); channels.close = diagnosticsChannel.channel("undici:websocket:close"); channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error"); - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); } catch { } function establishWebSocketConnection(url, protocols, ws, onEstablish, options) { @@ -16351,7 +16351,7 @@ var require_connection = __commonJS({ const headersList = new Headers(options.headers)[kHeadersList]; request.headersList = headersList; } - const keyValue = crypto.randomBytes(16).toString("base64"); + const keyValue = crypto2.randomBytes(16).toString("base64"); request.headersList.append("sec-websocket-key", keyValue); request.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { @@ -16380,7 +16380,7 @@ var require_connection = __commonJS({ return; } const secWSAccept = response.headersList.get("Sec-WebSocket-Accept"); - const digest = crypto.createHash("sha1").update(keyValue + uid).digest("base64"); + const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64"); if (secWSAccept !== digest) { failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header."); return; @@ -16460,9 +16460,9 @@ var require_frame = __commonJS({ "node_modules/undici/lib/websocket/frame.js"(exports2, module2) { "use strict"; var { maxUnsigned16Bit } = require_constants5(); - var crypto; + var crypto2; try { - crypto = require("crypto"); + crypto2 = require("crypto"); } catch { } var WebsocketFrameSend = class { @@ -16471,7 +16471,7 @@ var require_frame = __commonJS({ */ constructor(data) { this.frameData = data; - this.maskKey = crypto.randomBytes(4); + this.maskKey = crypto2.randomBytes(4); } createFrame(opcode) { const bodyLength = this.frameData?.byteLength ?? 0; @@ -36929,7 +36929,7 @@ var require_cacheUtils = __commonJS({ var exec2 = __importStar4(require_exec()); var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs18 = __importStar4(require("fs")); var path19 = __importStar4(require("path")); var semver9 = __importStar4(require_semver3()); @@ -36953,7 +36953,7 @@ var require_cacheUtils = __commonJS({ } tempDirectory = path19.join(baseLocation, "actions", "temp"); } - const dest = path19.join(tempDirectory, crypto.randomUUID()); + const dest = path19.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); @@ -37069,7 +37069,7 @@ var require_cacheUtils = __commonJS({ components.push("windows-only"); } components.push(versionSalt); - return crypto.createHash("sha256").update(components.join("|")).digest("hex"); + return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); } exports2.getCacheVersion = getCacheVersion; function getRuntimeToken() { @@ -48813,7 +48813,7 @@ var require_dist7 = __commonJS({ var coreXml = require_commonjs9(); var logger$1 = require_dist(); var abortController = require_commonjs10(); - var crypto = require("crypto"); + var crypto2 = require("crypto"); var coreTracing = require_commonjs4(); var stream2 = require("stream"); var coreLro = require_dist6(); @@ -50321,7 +50321,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + return crypto2.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; var AnonymousCredentialPolicy = class extends CredentialPolicy { @@ -50519,7 +50519,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.RANGE) ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); - const signature = crypto.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); + const signature = crypto2.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } function getHeaderValueToSign(request, headerName) { @@ -64278,7 +64278,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + return crypto2.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } }; function ipRangeToString(ipRange) { @@ -80223,7 +80223,7 @@ var require_internal_hash_files = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = void 0; - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var core14 = __importStar4(require_core()); var fs18 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); @@ -80236,7 +80236,7 @@ var require_internal_hash_files = __commonJS({ const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const result = crypto.createHash("sha256"); + const result = crypto2.createHash("sha256"); let count = 0; try { for (var _e = true, _f = __asyncValues4(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { @@ -80252,7 +80252,7 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Skip directory '${file}'.`); continue; } - const hash = crypto.createHash("sha256"); + const hash = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); yield pipeline(fs18.createReadStream(file), hash); result.write(hash.digest()); @@ -80646,7 +80646,7 @@ var require_tool_cache = __commonJS({ exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; var core14 = __importStar4(require_core()); var io7 = __importStar4(require_io()); - var crypto = __importStar4(require("crypto")); + var crypto2 = __importStar4(require("crypto")); var fs18 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os5 = __importStar4(require("os")); @@ -80671,7 +80671,7 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path19.join(_getTempDirectory(), crypto.randomUUID()); + dest = dest || path19.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path19.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); @@ -81052,7 +81052,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path19.join(_getTempDirectory(), crypto.randomUUID()); + dest = path19.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -85649,6 +85649,12 @@ function isHostedRunner() { process.env["RUNNER_TOOL_CACHE"]?.includes("hostedtoolcache") ); } +function parseMatrixInput(matrixInput) { + if (matrixInput === void 0 || matrixInput === "null") { + return void 0; + } + return JSON.parse(matrixInput); +} function wrapError(error2) { return error2 instanceof Error ? error2 : new Error(String(error2)); } @@ -86096,6 +86102,25 @@ async function getAnalysisKey() { core5.exportVariable(analysisKeyEnvVar, analysisKey); return analysisKey; } +async function getAutomationID() { + const analysis_key = await getAnalysisKey(); + const environment = getRequiredInput("matrix"); + return computeAutomationID(analysis_key, environment); +} +function computeAutomationID(analysis_key, environment) { + let automationID = `${analysis_key}/`; + const matrix = parseMatrixInput(environment); + if (matrix !== void 0) { + for (const entry of Object.entries(matrix).sort()) { + if (typeof entry[1] === "string") { + automationID += `${entry[0]}:${entry[1]}/`; + } else { + automationID += `${entry[0]}:/`; + } + } + } + return automationID; +} // src/caching-utils.ts var core6 = __toESM(require_core()); @@ -86178,6 +86203,7 @@ var bundleVersion = "codeql-bundle-v2.23.0"; var cliVersion = "2.23.0"; // src/overlay-database-utils.ts +var crypto = __toESM(require("crypto")); var fs6 = __toESM(require("fs")); var path7 = __toESM(require("path")); var actionsCache = __toESM(require_cache3()); @@ -86477,16 +86503,19 @@ async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) { } const dbLocation = config.dbLocation; const codeQlVersion = (await codeql.getVersion()).version; - const restoreKey = getCacheRestoreKey(config, codeQlVersion); + const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix( + config, + codeQlVersion + ); logger.info( - `Looking in Actions cache for overlay-base database with restore key ${restoreKey}` + `Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}` ); let databaseDownloadDurationMs = 0; try { const databaseDownloadStart = performance.now(); const foundKey = await withTimeout( MAX_CACHE_OPERATION_MS, - actionsCache.restoreCache([dbLocation], restoreKey), + actionsCache.restoreCache([dbLocation], cacheRestoreKeyPrefix), () => { logger.info("Timed out downloading overlay-base database from cache"); } @@ -86529,9 +86558,18 @@ async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) { databaseDownloadDurationMs }; } -function getCacheRestoreKey(config, codeQlVersion) { +async function getCacheRestoreKeyPrefix(config, codeQlVersion) { const languages = [...config.languages].sort().join("_"); - return `${CACHE_PREFIX}-${CACHE_VERSION}-${languages}-${codeQlVersion}-`; + const cacheKeyComponents = { + automationID: await getAutomationID() + // Add more components here as needed in the future + }; + const componentsHash = createCacheKeyHash(cacheKeyComponents); + return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`; +} +function createCacheKeyHash(components) { + const componentsJson = JSON.stringify(components); + return crypto.createHash("sha256").update(componentsJson).digest("hex").substring(0, 16); } // src/tools-features.ts diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 3b711ba536..fd01f27879 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -92180,7 +92180,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo return JSON.parse(fs13.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { - const automationID = getAutomationID(category, analysis_key, environment); + const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { for (const run of sarif.runs || []) { if (run.automationDetails === void 0) { @@ -92193,7 +92193,7 @@ function populateRunAutomationDetails(sarif, category, analysis_key, environment } return sarif; } -function getAutomationID(category, analysis_key, environment) { +function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { let automationID = category; if (!automationID.endsWith("/")) { diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 514c7265cc..236a0aa34d 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -92862,7 +92862,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo return JSON.parse(fs14.readFileSync(outputFile, "utf8")); } function populateRunAutomationDetails(sarif, category, analysis_key, environment) { - const automationID = getAutomationID(category, analysis_key, environment); + const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { for (const run2 of sarif.runs || []) { if (run2.automationDetails === void 0) { @@ -92875,7 +92875,7 @@ function populateRunAutomationDetails(sarif, category, analysis_key, environment } return sarif; } -function getAutomationID(category, analysis_key, environment) { +function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { let automationID = category; if (!automationID.endsWith("/")) { diff --git a/src/overlay-database-utils.test.ts b/src/overlay-database-utils.test.ts index 61fcf48af9..ca52f1d88a 100644 --- a/src/overlay-database-utils.test.ts +++ b/src/overlay-database-utils.test.ts @@ -6,6 +6,7 @@ import test from "ava"; import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; +import * as apiClient from "./api-client"; import * as gitUtils from "./git-utils"; import { getRunnerLogger } from "./logging"; import { @@ -133,6 +134,11 @@ const testDownloadOverlayBaseDatabaseFromCache = test.macro({ const stubs: sinon.SinonStub[] = []; + const getAutomationIDStub = sinon + .stub(apiClient, "getAutomationID") + .resolves("test-automation-id/"); + stubs.push(getAutomationIDStub); + const isInTestModeStub = sinon .stub(utils, "isInTestMode") .returns(testCase.isInTestMode); diff --git a/src/overlay-database-utils.ts b/src/overlay-database-utils.ts index 1df46aa785..c4f6ae7c00 100644 --- a/src/overlay-database-utils.ts +++ b/src/overlay-database-utils.ts @@ -1,9 +1,11 @@ +import * as crypto from "crypto"; import * as fs from "fs"; import * as path from "path"; import * as actionsCache from "@actions/cache"; import { getRequiredInput, getTemporaryDirectory } from "./actions-util"; +import { getAutomationID } from "./api-client"; import { type CodeQL } from "./codeql"; import { type Config } from "./config-utils"; import { getCommitOid, getFileOidsUnderPath } from "./git-utils"; @@ -251,15 +253,19 @@ export async function uploadOverlayBaseDatabaseToCache( const codeQlVersion = (await codeql.getVersion()).version; const checkoutPath = getRequiredInput("checkout_path"); - const cacheKey = await generateCacheKey(config, codeQlVersion, checkoutPath); + const cacheSaveKey = await getCacheSaveKey( + config, + codeQlVersion, + checkoutPath, + ); logger.info( - `Uploading overlay-base database to Actions cache with key ${cacheKey}`, + `Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`, ); try { const cacheId = await withTimeout( MAX_CACHE_OPERATION_MS, - actionsCache.saveCache([dbLocation], cacheKey), + actionsCache.saveCache([dbLocation], cacheSaveKey), () => {}, ); if (cacheId === undefined) { @@ -322,10 +328,14 @@ export async function downloadOverlayBaseDatabaseFromCache( const dbLocation = config.dbLocation; const codeQlVersion = (await codeql.getVersion()).version; - const restoreKey = getCacheRestoreKey(config, codeQlVersion); + const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix( + config, + codeQlVersion, + ); logger.info( - `Looking in Actions cache for overlay-base database with restore key ${restoreKey}`, + "Looking in Actions cache for overlay-base database with " + + `restore key ${cacheRestoreKeyPrefix}`, ); let databaseDownloadDurationMs = 0; @@ -333,7 +343,7 @@ export async function downloadOverlayBaseDatabaseFromCache( const databaseDownloadStart = performance.now(); const foundKey = await withTimeout( MAX_CACHE_OPERATION_MS, - actionsCache.restoreCache([dbLocation], restoreKey), + actionsCache.restoreCache([dbLocation], cacheRestoreKeyPrefix), () => { logger.info("Timed out downloading overlay-base database from cache"); }, @@ -387,25 +397,87 @@ export async function downloadOverlayBaseDatabaseFromCache( }; } -async function generateCacheKey( +/** + * Computes the cache key for saving the overlay-base database to the GitHub + * Actions cache. + * + * The key consists of the restore key prefix (which does not include the + * commit SHA) and the commit SHA of the current checkout. + */ +async function getCacheSaveKey( config: Config, codeQlVersion: string, checkoutPath: string, ): Promise { const sha = await getCommitOid(checkoutPath); - return `${getCacheRestoreKey(config, codeQlVersion)}${sha}`; + const restoreKeyPrefix = await getCacheRestoreKeyPrefix( + config, + codeQlVersion, + ); + return `${restoreKeyPrefix}${sha}`; } -function getCacheRestoreKey(config: Config, codeQlVersion: string): string { - // The restore key (prefix) specifies which cached overlay-base databases are - // compatible with the current analysis: the cached database must have the - // same cache version and the same CodeQL bundle version. - // - // Actions cache supports using multiple restore keys to indicate preference. - // Technically we prefer a cached overlay-base database with the same SHA as - // we are analyzing. However, since overlay-base databases are built from the - // default branch and used in PR analysis, it is exceedingly unlikely that - // the commit SHA will ever be the same, so we can just leave it out. +/** + * Computes the cache key prefix for restoring the overlay-base database from + * the GitHub Actions cache. + * + * Actions cache supports using multiple restore keys to indicate preference, + * and this function could in principle take advantage of that feature by + * returning a list of restore key prefixes. However, since overlay-base + * databases are built from the default branch and used in PR analysis, it is + * exceedingly unlikely that the commit SHA will ever be the same. + * + * Therefore, this function returns only a single restore key prefix, which does + * not include the commit SHA. This allows us to restore the most recent + * compatible overlay-base database. + */ +async function getCacheRestoreKeyPrefix( + config: Config, + codeQlVersion: string, +): Promise { const languages = [...config.languages].sort().join("_"); - return `${CACHE_PREFIX}-${CACHE_VERSION}-${languages}-${codeQlVersion}-`; + + const cacheKeyComponents = { + automationID: await getAutomationID(), + // Add more components here as needed in the future + }; + const componentsHash = createCacheKeyHash(cacheKeyComponents); + + // For a cached overlay-base database to be considered compatible for overlay + // analysis, all components in the cache restore key must match: + // + // CACHE_PREFIX: distinguishes overlay-base databases from other cache objects + // CACHE_VERSION: cache format version + // componentsHash: hash of additional components (see above for details) + // languages: the languages included in the overlay-base database + // codeQlVersion: CodeQL bundle version + // + // Technically we can also include languages and codeQlVersion in the + // componentsHash, but including them explicitly in the cache key makes it + // easier to debug and understand the cache key structure. + return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`; +} + +/** + * Creates a SHA-256 hash of the cache key components to ensure uniqueness + * while keeping the cache key length manageable. + * + * @param components Object containing all components that should influence cache key uniqueness + * @returns A short SHA-256 hash (first 16 characters) of the components + */ +function createCacheKeyHash(components: Record): string { + // From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify + // + // "Properties are visited using the same algorithm as Object.keys(), which + // has a well-defined order and is stable across implementations. For example, + // JSON.stringify on the same object will always produce the same string, and + // JSON.parse(JSON.stringify(obj)) would produce an object with the same key + // ordering as the original (assuming the object is completely + // JSON-serializable)." + const componentsJson = JSON.stringify(components); + return crypto + .createHash("sha256") + .update(componentsJson) + .digest("hex") + .substring(0, 16); }