From ffcbb4c0c106f3f2c218ed4400e45ae5dd1ae3b7 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 11 Sep 2025 15:32:30 +0100 Subject: [PATCH 1/3] Move `UPDATEJOB_PROXY` constants to `start-proxy.ts` --- lib/start-proxy-action.js | 6 +++--- src/start-proxy-action.ts | 12 +++++++----- src/start-proxy.ts | 5 +++++ 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 6e121d96a7..3a24687b26 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -47800,6 +47800,9 @@ var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { })(KnownLanguage || {}); // src/start-proxy.ts +var UPDATEJOB_PROXY = "update-job-proxy"; +var UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901"; +var UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/"; var LANGUAGE_ALIASES = { c: "cpp" /* cpp */, "c++": "cpp" /* cpp */, @@ -47894,9 +47897,6 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language } // src/start-proxy-action.ts -var UPDATEJOB_PROXY = "update-job-proxy"; -var UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901"; -var UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/"; var KEY_SIZE = 2048; var KEY_EXPIRY_YEARS = 2; var CERT_SUBJECT = [ diff --git a/src/start-proxy-action.ts b/src/start-proxy-action.ts index 1efedb7d5c..73204edf56 100644 --- a/src/start-proxy-action.ts +++ b/src/start-proxy-action.ts @@ -7,13 +7,15 @@ import { pki } from "node-forge"; import * as actionsUtil from "./actions-util"; import { getActionsLogger, Logger } from "./logging"; -import { Credential, getCredentials } from "./start-proxy"; +import { + Credential, + getCredentials, + UPDATEJOB_PROXY, + UPDATEJOB_PROXY_URL_PREFIX, + UPDATEJOB_PROXY_VERSION, +} from "./start-proxy"; import * as util from "./util"; -const UPDATEJOB_PROXY = "update-job-proxy"; -const UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901"; -const UPDATEJOB_PROXY_URL_PREFIX = - "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/"; const KEY_SIZE = 2048; const KEY_EXPIRY_YEARS = 2; diff --git a/src/start-proxy.ts b/src/start-proxy.ts index 18e8faa158..b29b143172 100644 --- a/src/start-proxy.ts +++ b/src/start-proxy.ts @@ -4,6 +4,11 @@ import { KnownLanguage } from "./languages"; import { Logger } from "./logging"; import { ConfigurationError, isDefined } from "./util"; +export const UPDATEJOB_PROXY = "update-job-proxy"; +export const UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901"; +export const UPDATEJOB_PROXY_URL_PREFIX = + "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/"; + export type Credential = { type: string; host?: string; From 9df23425dc732362852b94732912a2cef4f1cc55 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 11 Sep 2025 18:56:19 +0100 Subject: [PATCH 2/3] Search release pointed at by `defaults.json` for registry proxy artifact --- lib/start-proxy-action.js | 1770 +++++++++++++++++++++++++++++++++++-- src/start-proxy-action.ts | 24 +- src/start-proxy.test.ts | 68 ++ src/start-proxy.ts | 88 +- 4 files changed, 1875 insertions(+), 75 deletions(-) diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 3a24687b26..239f4662bb 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -19650,7 +19650,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable2(name, val) { + function exportVariable3(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -19659,7 +19659,7 @@ var require_core = __commonJS({ } (0, command_1.issueCommand)("set-env", { name }, convertedVal); } - exports2.exportVariable = exportVariable2; + exports2.exportVariable = exportVariable3; function setSecret2(secret) { (0, command_1.issueCommand)("add-mask", {}, secret); } @@ -21915,7 +21915,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core8 = __importStar(require_core()); + var core9 = __importStar(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -21938,10 +21938,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core8.info(err.message); + core9.info(err.message); } const seconds = this.getSleepAmount(); - core8.info(`Waiting ${seconds} seconds before trying again`); + core9.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -22021,7 +22021,7 @@ var require_tool_cache = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core8 = __importStar(require_core()); + var core9 = __importStar(require_core()); var io3 = __importStar(require_io()); var crypto = __importStar(require("crypto")); var fs = __importStar(require("fs")); @@ -22050,8 +22050,8 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { dest = dest || path2.join(_getTempDirectory(), crypto.randomUUID()); yield io3.mkdirP(path2.dirname(dest)); - core8.debug(`Downloading ${url}`); - core8.debug(`Destination ${dest}`); + core9.debug(`Downloading ${url}`); + core9.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -22078,7 +22078,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth) { - core8.debug("set auth"); + core9.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -22087,7 +22087,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError(response.message.statusCode); - core8.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core9.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -22096,16 +22096,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs.createWriteStream(dest)); - core8.debug("download complete"); + core9.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core8.debug("download failed"); + core9.debug("download failed"); try { yield io3.rmRF(dest); } catch (err) { - core8.debug(`Failed to delete '${dest}'. ${err.message}`); + core9.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -22120,7 +22120,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core8.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core9.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", logLevel, @@ -22170,7 +22170,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core8.debug("Checking tar --version"); + core9.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -22180,7 +22180,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core8.debug(versionOutput.trim()); + core9.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -22188,7 +22188,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core8.isDebug() && !flags.includes("v")) { + if (core9.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -22220,7 +22220,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core8.isDebug()) { + if (core9.isDebug()) { args.push("-v"); } const xarPath = yield io3.which("xar", true); @@ -22265,7 +22265,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core8.debug(`Using pwsh at path: ${pwshPath}`); + core9.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -22285,7 +22285,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io3.which("powershell", true); - core8.debug(`Using powershell at path: ${powershellPath}`); + core9.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -22294,7 +22294,7 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { const unzipPath = yield io3.which("unzip", true); const args = [file]; - if (!core8.isDebug()) { + if (!core9.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -22305,8 +22305,8 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { version = semver2.clean(version) || version; arch = arch || os.arch(); - core8.debug(`Caching tool ${tool} ${version} ${arch}`); - core8.debug(`source dir: ${sourceDir}`); + core9.debug(`Caching tool ${tool} ${version} ${arch}`); + core9.debug(`source dir: ${sourceDir}`); if (!fs.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -22324,14 +22324,14 @@ var require_tool_cache = __commonJS({ return __awaiter(this, void 0, void 0, function* () { version = semver2.clean(version) || version; arch = arch || os.arch(); - core8.debug(`Caching tool ${tool} ${version} ${arch}`); - core8.debug(`source file: ${sourceFile}`); + core9.debug(`Caching tool ${tool} ${version} ${arch}`); + core9.debug(`source file: ${sourceFile}`); if (!fs.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path2.join(destFolder, targetFile); - core8.debug(`destination file ${destPath}`); + core9.debug(`destination file ${destPath}`); yield io3.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -22355,12 +22355,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver2.clean(versionSpec) || ""; const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); - core8.debug(`checking cache: ${cachePath}`); + core9.debug(`checking cache: ${cachePath}`); if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { - core8.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core9.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core8.debug("not found"); + core9.debug("not found"); } } return toolPath; @@ -22391,7 +22391,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth) { - core8.debug("set auth"); + core9.debug("set auth"); headers.authorization = auth; } const response = yield http.getJson(treeUrl, headers); @@ -22412,7 +22412,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core8.debug("Invalid json"); + core9.debug("Invalid json"); } } return releases; @@ -22438,7 +22438,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter(this, void 0, void 0, function* () { const folderPath = path2.join(_getCacheDirectory(), tool, semver2.clean(version) || version, arch || ""); - core8.debug(`destination ${folderPath}`); + core9.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io3.rmRF(folderPath); yield io3.rmRF(markerPath); @@ -22450,19 +22450,19 @@ var require_tool_cache = __commonJS({ const folderPath = path2.join(_getCacheDirectory(), tool, semver2.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs.writeFileSync(markerPath, ""); - core8.debug("finished caching tool"); + core9.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver2.clean(versionSpec) || ""; - core8.debug(`isExplicit: ${c}`); + core9.debug(`isExplicit: ${c}`); const valid = semver2.valid(c) != null; - core8.debug(`explicit? ${valid}`); + core9.debug(`explicit? ${valid}`); return valid; } exports2.isExplicitVersion = isExplicitVersion; function evaluateVersions(versions, versionSpec) { let version = ""; - core8.debug(`evaluating ${versions.length} versions`); + core9.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver2.gt(a, b)) { return 1; @@ -22478,9 +22478,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core8.debug(`matched: ${version}`); + core9.debug(`matched: ${version}`); } else { - core8.debug("match not found"); + core9.debug("match not found"); } return version; } @@ -44905,7 +44905,7 @@ var require_utils4 = __commonJS({ } }; exports2.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports2.defaults); - function getOctokitOptions(token, options) { + function getOctokitOptions2(token, options) { const opts = Object.assign({}, options || {}); const auth = Utils.getAuthString(token, opts); if (auth) { @@ -44913,7 +44913,7 @@ var require_utils4 = __commonJS({ } return opts; } - exports2.getOctokitOptions = getOctokitOptions; + exports2.getOctokitOptions = getOctokitOptions2; } }); @@ -45070,10 +45070,1580 @@ var require_package = __commonJS({ } }); +// node_modules/bottleneck/light.js +var require_light = __commonJS({ + "node_modules/bottleneck/light.js"(exports2, module2) { + (function(global2, factory) { + typeof exports2 === "object" && typeof module2 !== "undefined" ? module2.exports = factory() : typeof define === "function" && define.amd ? define(factory) : global2.Bottleneck = factory(); + })(exports2, (function() { + "use strict"; + var commonjsGlobal = typeof globalThis !== "undefined" ? globalThis : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : {}; + function getCjsExportFromNamespace(n) { + return n && n["default"] || n; + } + var load2 = function(received, defaults, onto = {}) { + var k, ref, v; + for (k in defaults) { + v = defaults[k]; + onto[k] = (ref = received[k]) != null ? ref : v; + } + return onto; + }; + var overwrite = function(received, defaults, onto = {}) { + var k, v; + for (k in received) { + v = received[k]; + if (defaults[k] !== void 0) { + onto[k] = v; + } + } + return onto; + }; + var parser = { + load: load2, + overwrite + }; + var DLList; + DLList = class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; + } + push(value) { + var node; + this.length++; + if (typeof this.incr === "function") { + this.incr(); + } + node = { + value, + prev: this._last, + next: null + }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } else { + this._first = this._last = node; + } + return void 0; + } + shift() { + var value; + if (this._first == null) { + return; + } else { + this.length--; + if (typeof this.decr === "function") { + this.decr(); + } + } + value = this._first.value; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } else { + this._last = null; + } + return value; + } + first() { + if (this._first != null) { + return this._first.value; + } + } + getArray() { + var node, ref, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, ref.value)); + } + return results; + } + forEachShift(cb) { + var node; + node = this.shift(); + while (node != null) { + cb(node), node = this.shift(); + } + return void 0; + } + debug() { + var node, ref, ref1, ref2, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, { + value: ref.value, + prev: (ref1 = ref.prev) != null ? ref1.value : void 0, + next: (ref2 = ref.next) != null ? ref2.value : void 0 + })); + } + return results; + } + }; + var DLList_1 = DLList; + var Events; + Events = class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => { + return this._addListener(name, "many", cb); + }; + this.instance.once = (name, cb) => { + return this._addListener(name, "once", cb); + }; + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + return delete this._events[name]; + } else { + return this._events = {}; + } + }; + } + _addListener(name, status, cb) { + var base; + if ((base = this._events)[name] == null) { + base[name] = []; + } + this._events[name].push({ cb, status }); + return this.instance; + } + listenerCount(name) { + if (this._events[name] != null) { + return this._events[name].length; + } else { + return 0; + } + } + async trigger(name, ...args) { + var e, promises; + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) { + return; + } + this._events[name] = this._events[name].filter(function(listener) { + return listener.status !== "none"; + }); + promises = this._events[name].map(async (listener) => { + var e2, returned; + if (listener.status === "none") { + return; + } + if (listener.status === "once") { + listener.status = "none"; + } + try { + returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; + if (typeof (returned != null ? returned.then : void 0) === "function") { + return await returned; + } else { + return returned; + } + } catch (error2) { + e2 = error2; + { + this.trigger("error", e2); + } + return null; + } + }); + return (await Promise.all(promises)).find(function(x) { + return x != null; + }); + } catch (error2) { + e = error2; + { + this.trigger("error", e); + } + return null; + } + } + }; + var Events_1 = Events; + var DLList$1, Events$1, Queues; + DLList$1 = DLList_1; + Events$1 = Events_1; + Queues = class Queues { + constructor(num_priorities) { + var i; + this.Events = new Events$1(this); + this._length = 0; + this._lists = (function() { + var j, ref, results; + results = []; + for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) { + results.push(new DLList$1((() => { + return this.incr(); + }), (() => { + return this.decr(); + }))); + } + return results; + }).call(this); + } + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } + } + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } + } + push(job) { + return this._lists[job.options.priority].push(job); + } + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } else { + return this._length; + } + } + shiftAll(fn) { + return this._lists.forEach(function(list) { + return list.forEachShift(fn); + }); + } + getFirst(arr = this._lists) { + var j, len, list; + for (j = 0, len = arr.length; j < len; j++) { + list = arr[j]; + if (list.length > 0) { + return list; + } + } + return []; + } + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } + }; + var Queues_1 = Queues; + var BottleneckError; + BottleneckError = class BottleneckError extends Error { + }; + var BottleneckError_1 = BottleneckError; + var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; + NUM_PRIORITIES = 10; + DEFAULT_PRIORITY = 5; + parser$1 = parser; + BottleneckError$1 = BottleneckError_1; + Job = class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events2, _states, Promise2) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events2; + this._states = _states; + this.Promise = Promise2; + this.options = parser$1.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new this.Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; + } + _sanitizePriority(priority) { + var sProperty; + sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } else { + return sProperty; + } + } + _randomIndex() { + return Math.random().toString(36).slice(2); + } + doDrop({ error: error2, message = "This job has been dropped by Bottleneck" } = {}) { + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error2 != null ? error2 : new BottleneckError$1(message)); + } + this.Events.trigger("dropped", { args: this.args, options: this.options, task: this.task, promise: this.promise }); + return true; + } else { + return false; + } + } + _assertStatus(expected) { + var status; + status = this._states.jobStatus(this.options.id); + if (!(status === expected || expected === "DONE" && status === null)) { + throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); + } + } + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", { args: this.args, options: this.options }); + } + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", { args: this.args, options: this.options, reachedHWM, blocked }); + } + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", { args: this.args, options: this.options }); + } + async doExecute(chained, clearGlobalState, run, free) { + var error2, eventInfo, passed; + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + this.Events.trigger("executing", eventInfo); + try { + passed = await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)); + if (clearGlobalState()) { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } catch (error1) { + error2 = error1; + return this._onFailure(error2, eventInfo, clearGlobalState, run, free); + } + } + doExpire(clearGlobalState, run, free) { + var error2, eventInfo; + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + error2 = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error2, eventInfo, clearGlobalState, run, free); + } + async _onFailure(error2, eventInfo, clearGlobalState, run, free) { + var retry3, retryAfter; + if (clearGlobalState()) { + retry3 = await this.Events.trigger("failed", error2, eventInfo); + if (retry3 != null) { + retryAfter = ~~retry3; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run(retryAfter); + } else { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error2); + } + } + } + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } + }; + var Job_1 = Job; + var BottleneckError$2, LocalDatastore, parser$2; + parser$2 = parser; + BottleneckError$2 = BottleneckError_1; + LocalDatastore = class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser$2.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = this.Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); + } + _startHeartbeat() { + var base; + if (this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null)) { + return typeof (base = this.heartbeat = setInterval(() => { + var amount, incr, maximum, now, reservoir; + now = Date.now(); + if (this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if (this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + ({ + reservoirIncreaseAmount: amount, + reservoirIncreaseMaximum: maximum, + reservoir + } = this.storeOptions); + this._lastReservoirIncrease = now; + incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0; + } else { + return clearInterval(this.heartbeat); + } + } + async __publish__(message) { + await this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + } + async __disconnect__(flush) { + await this.yieldLoop(); + clearInterval(this.heartbeat); + return this.Promise.resolve(); + } + yieldLoop(t = 0) { + return new this.Promise(function(resolve2, reject) { + return setTimeout(resolve2, t); + }); + } + computePenalty() { + var ref; + return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5e3; + } + async __updateSettings__(options) { + await this.yieldLoop(); + parser$2.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + } + async __running__() { + await this.yieldLoop(); + return this._running; + } + async __queued__() { + await this.yieldLoop(); + return this.instance.queued(); + } + async __done__() { + await this.yieldLoop(); + return this._done; + } + async __groupCheck__(time) { + await this.yieldLoop(); + return this._nextRequest + this.timeout < time; + } + computeCapacity() { + var maxConcurrent, reservoir; + ({ maxConcurrent, reservoir } = this.storeOptions); + if (maxConcurrent != null && reservoir != null) { + return Math.min(maxConcurrent - this._running, reservoir); + } else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } else if (reservoir != null) { + return reservoir; + } else { + return null; + } + } + conditionsCheck(weight) { + var capacity; + capacity = this.computeCapacity(); + return capacity == null || weight <= capacity; + } + async __incrementReservoir__(incr) { + var reservoir; + await this.yieldLoop(); + reservoir = this.storeOptions.reservoir += incr; + this.instance._drainAll(this.computeCapacity()); + return reservoir; + } + async __currentReservoir__() { + await this.yieldLoop(); + return this.storeOptions.reservoir; + } + isBlocked(now) { + return this._unblockTime >= now; + } + check(weight, now) { + return this.conditionsCheck(weight) && this._nextRequest - now <= 0; + } + async __check__(weight) { + var now; + await this.yieldLoop(); + now = Date.now(); + return this.check(weight, now); + } + async __register__(index, weight, expiration) { + var now, wait; + await this.yieldLoop(); + now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { + success: true, + wait, + reservoir: this.storeOptions.reservoir + }; + } else { + return { + success: false + }; + } + } + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + async __submit__(queueLength, weight) { + var blocked, now, reachedHWM; + await this.yieldLoop(); + if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); + } + now = Date.now(); + reachedHWM = this.storeOptions.highWater != null && queueLength === this.storeOptions.highWater && !this.check(weight, now); + blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { + reachedHWM, + blocked, + strategy: this.storeOptions.strategy + }; + } + async __free__(index, weight) { + await this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { + running: this._running + }; + } + }; + var LocalDatastore_1 = LocalDatastore; + var BottleneckError$3, States; + BottleneckError$3 = BottleneckError_1; + States = class States { + constructor(status1) { + this.status = status1; + this._jobs = {}; + this.counts = this.status.map(function() { + return 0; + }); + } + next(id) { + var current, next; + current = this._jobs[id]; + next = current + 1; + if (current != null && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + return this._jobs[id]++; + } else if (current != null) { + this.counts[current]--; + return delete this._jobs[id]; + } + } + start(id) { + var initial; + initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + remove(id) { + var current; + current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + jobStatus(id) { + var ref; + return (ref = this.status[this._jobs[id]]) != null ? ref : null; + } + statusJobs(status) { + var k, pos, ref, results, v; + if (status != null) { + pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError$3(`status must be one of ${this.status.join(", ")}`); + } + ref = this._jobs; + results = []; + for (k in ref) { + v = ref[k]; + if (v === pos) { + results.push(k); + } + } + return results; + } else { + return Object.keys(this._jobs); + } + } + statusCounts() { + return this.counts.reduce(((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }), {}); + } + }; + var States_1 = States; + var DLList$2, Sync; + DLList$2 = DLList_1; + Sync = class Sync { + constructor(name, Promise2) { + this.schedule = this.schedule.bind(this); + this.name = name; + this.Promise = Promise2; + this._running = 0; + this._queue = new DLList$2(); + } + isEmpty() { + return this._queue.length === 0; + } + async _tryToRun() { + var args, cb, error2, reject, resolve2, returned, task; + if (this._running < 1 && this._queue.length > 0) { + this._running++; + ({ task, args, resolve: resolve2, reject } = this._queue.shift()); + cb = await (async function() { + try { + returned = await task(...args); + return function() { + return resolve2(returned); + }; + } catch (error1) { + error2 = error1; + return function() { + return reject(error2); + }; + } + })(); + this._running--; + this._tryToRun(); + return cb(); + } + } + schedule(task, ...args) { + var promise, reject, resolve2; + resolve2 = reject = null; + promise = new this.Promise(function(_resolve, _reject) { + resolve2 = _resolve; + return reject = _reject; + }); + this._queue.push({ task, args, resolve: resolve2, reject }); + this._tryToRun(); + return promise; + } + }; + var Sync_1 = Sync; + var version = "2.19.5"; + var version$1 = { + version + }; + var version$2 = /* @__PURE__ */ Object.freeze({ + version, + default: version$1 + }); + var require$$2 = () => console.log("You must import the full version of Bottleneck in order to use this feature."); + var require$$3 = () => console.log("You must import the full version of Bottleneck in order to use this feature."); + var require$$4 = () => console.log("You must import the full version of Bottleneck in order to use this feature."); + var Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3; + parser$3 = parser; + Events$2 = Events_1; + RedisConnection$1 = require$$2; + IORedisConnection$1 = require$$3; + Scripts$1 = require$$4; + Group = (function() { + class Group2 { + constructor(limiterOptions = {}) { + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions; + parser$3.load(this.limiterOptions, this.defaults, this); + this.Events = new Events$2(this); + this.instances = {}; + this.Bottleneck = Bottleneck_1; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } + } + } + key(key = "") { + var ref; + return (ref = this.instances[key]) != null ? ref : (() => { + var limiter; + limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection + })); + this.Events.trigger("created", limiter, key); + return limiter; + })(); + } + async deleteKey(key = "") { + var deleted, instance; + instance = this.instances[key]; + if (this.connection) { + deleted = await this.connection.__runCommand__(["del", ...Scripts$1.allKeys(`${this.id}-${key}`)]); + } + if (instance != null) { + delete this.instances[key]; + await instance.disconnect(); + } + return instance != null || deleted > 0; + } + limiters() { + var k, ref, results, v; + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + results.push({ + key: k, + limiter: v + }); + } + return results; + } + keys() { + return Object.keys(this.instances); + } + async clusterKeys() { + var cursor, end, found, i, k, keys, len, next, start; + if (this.connection == null) { + return this.Promise.resolve(this.keys()); + } + keys = []; + cursor = null; + start = `b_${this.id}-`.length; + end = "_settings".length; + while (cursor !== 0) { + [next, found] = await this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${this.id}-*_settings`, "count", 1e4]); + cursor = ~~next; + for (i = 0, len = found.length; i < len; i++) { + k = found[i]; + keys.push(k.slice(start, -end)); + } + } + return keys; + } + _startAutoCleanup() { + var base; + clearInterval(this.interval); + return typeof (base = this.interval = setInterval(async () => { + var e, k, ref, results, time, v; + time = Date.now(); + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + try { + if (await v._store.__groupCheck__(time)) { + results.push(this.deleteKey(k)); + } else { + results.push(void 0); + } + } catch (error2) { + e = error2; + results.push(v.Events.trigger("error", e)); + } + } + return results; + }, this.timeout / 2)).unref === "function" ? base.unref() : void 0; + } + updateSettings(options = {}) { + parser$3.overwrite(options, this.defaults, this); + parser$3.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } + } + disconnect(flush = true) { + var ref; + if (!this.sharedConnection) { + return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; + } + } + } + Group2.prototype.defaults = { + timeout: 1e3 * 60 * 5, + connection: null, + Promise, + id: "group-key" + }; + return Group2; + }).call(commonjsGlobal); + var Group_1 = Group; + var Batcher, Events$3, parser$4; + parser$4 = parser; + Events$3 = Events_1; + Batcher = (function() { + class Batcher2 { + constructor(options = {}) { + this.options = options; + parser$4.load(this.options, this.defaults, this); + this.Events = new Events$3(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); + } + _resetPromise() { + return this._promise = new this.Promise((res, rej) => { + return this._resolve = res; + }); + } + _flush() { + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + return this._resetPromise(); + } + add(data) { + var ret; + this._arr.push(data); + ret = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } else if (this.maxTime != null && this._arr.length === 1) { + this._timeout = setTimeout(() => { + return this._flush(); + }, this.maxTime); + } + return ret; + } + } + Batcher2.prototype.defaults = { + maxTime: null, + maxSize: null, + Promise + }; + return Batcher2; + }).call(commonjsGlobal); + var Batcher_1 = Batcher; + var require$$4$1 = () => console.log("You must import the full version of Bottleneck in order to use this feature."); + var require$$8 = getCjsExportFromNamespace(version$2); + var Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5, splice = [].splice; + NUM_PRIORITIES$1 = 10; + DEFAULT_PRIORITY$1 = 5; + parser$5 = parser; + Queues$1 = Queues_1; + Job$1 = Job_1; + LocalDatastore$1 = LocalDatastore_1; + RedisDatastore$1 = require$$4$1; + Events$4 = Events_1; + States$1 = States_1; + Sync$1 = Sync_1; + Bottleneck = (function() { + class Bottleneck2 { + constructor(options = {}, ...invalid) { + var storeInstanceOptions, storeOptions; + this._addToQueue = this._addToQueue.bind(this); + this._validateOptions(options, invalid); + parser$5.load(options, this.instanceDefaults, this); + this._queues = new Queues$1(NUM_PRIORITIES$1); + this._scheduled = {}; + this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); + this._limiter = null; + this.Events = new Events$4(this); + this._submitLock = new Sync$1("submit", this.Promise); + this._registerLock = new Sync$1("register", this.Promise); + storeOptions = parser$5.load(options, this.storeDefaults, {}); + this._store = (function() { + if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) { + storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {}); + return new RedisDatastore$1(this, storeOptions, storeInstanceOptions); + } else if (this.datastore === "local") { + storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {}); + return new LocalDatastore$1(this, storeOptions, storeInstanceOptions); + } else { + throw new Bottleneck2.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + }).call(this); + this._queues.on("leftzero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; + }); + this._queues.on("zero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; + }); + } + _validateOptions(options, invalid) { + if (!(options != null && typeof options === "object" && invalid.length === 0)) { + throw new Bottleneck2.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); + } + } + ready() { + return this._store.ready; + } + clients() { + return this._store.clients; + } + channel() { + return `b_${this.id}`; + } + channel_client() { + return `b_${this.id}_${this._store.clientId}`; + } + publish(message) { + return this._store.__publish__(message); + } + disconnect(flush = true) { + return this._store.__disconnect__(flush); + } + chain(_limiter) { + this._limiter = _limiter; + return this; + } + queued(priority) { + return this._queues.queued(priority); + } + clusterQueued() { + return this._store.__queued__(); + } + empty() { + return this.queued() === 0 && this._submitLock.isEmpty(); + } + running() { + return this._store.__running__(); + } + done() { + return this._store.__done__(); + } + jobStatus(id) { + return this._states.jobStatus(id); + } + jobs(status) { + return this._states.statusJobs(status); + } + counts() { + return this._states.statusCounts(); + } + _randomIndex() { + return Math.random().toString(36).slice(2); + } + check(weight = 1) { + return this._store.__check__(weight); + } + _clearGlobalState(index) { + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } else { + return false; + } + } + async _free(index, job, options, eventInfo) { + var e, running; + try { + ({ running } = await this._store.__free__(index, options.weight)); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } catch (error1) { + e = error1; + return this.Events.trigger("error", e); + } + } + _run(index, job, wait) { + var clearGlobalState, free, run; + job.doRun(); + clearGlobalState = this._clearGlobalState.bind(this, index); + run = this._run.bind(this, index, job); + free = this._free.bind(this, index, job); + return this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: job.options.expiration != null ? setTimeout(function() { + return job.doExpire(clearGlobalState, run, free); + }, wait + job.options.expiration) : void 0, + job + }; + } + _drainOne(capacity) { + return this._registerLock.schedule(() => { + var args, index, next, options, queue; + if (this.queued() === 0) { + return this.Promise.resolve(null); + } + queue = this._queues.getFirst(); + ({ options, args } = next = queue.first()); + if (capacity != null && options.weight > capacity) { + return this.Promise.resolve(null); + } + this.Events.trigger("debug", `Draining ${options.id}`, { args, options }); + index = this._randomIndex(); + return this._store.__register__(index, options.weight, options.expiration).then(({ success, wait, reservoir }) => { + var empty; + this.Events.trigger("debug", `Drained ${options.id}`, { success, args, options }); + if (success) { + queue.shift(); + empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return this.Promise.resolve(options.weight); + } else { + return this.Promise.resolve(null); + } + }); + }); + } + _drainAll(capacity, total = 0) { + return this._drainOne(capacity).then((drained) => { + var newCapacity; + if (drained != null) { + newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } else { + return this.Promise.resolve(total); + } + }).catch((e) => { + return this.Events.trigger("error", e); + }); + } + _dropAllQueued(message) { + return this._queues.shiftAll(function(job) { + return job.doDrop({ message }); + }); + } + stop(options = {}) { + var done, waitForExecuting; + options = parser$5.load(options, this.stopDefaults); + waitForExecuting = (at) => { + var finished; + finished = () => { + var counts; + counts = this._states.counts; + return counts[0] + counts[1] + counts[2] + counts[3] === at; + }; + return new this.Promise((resolve2, reject) => { + if (finished()) { + return resolve2(); + } else { + return this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + return resolve2(); + } + }); + } + }); + }; + done = options.dropWaitingJobs ? (this._run = function(index, next) { + return next.doDrop({ + message: options.dropErrorMessage + }); + }, this._drainOne = () => { + return this.Promise.resolve(null); + }, this._registerLock.schedule(() => { + return this._submitLock.schedule(() => { + var k, ref, v; + ref = this._scheduled; + for (k in ref) { + v = ref[k]; + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ + message: options.dropErrorMessage + }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + }); + })) : this.schedule({ + priority: NUM_PRIORITIES$1 - 1, + weight: 0 + }, () => { + return waitForExecuting(1); + }); + this._receive = function(job) { + return job._reject(new Bottleneck2.prototype.BottleneckError(options.enqueueErrorMessage)); + }; + this.stop = () => { + return this.Promise.reject(new Bottleneck2.prototype.BottleneckError("stop() has already been called")); + }; + return done; + } + async _addToQueue(job) { + var args, blocked, error2, options, reachedHWM, shifted, strategy; + ({ args, options } = job); + try { + ({ reachedHWM, blocked, strategy } = await this._store.__submit__(this.queued(), options.weight)); + } catch (error1) { + error2 = error1; + this.Events.trigger("debug", `Could not queue ${options.id}`, { args, options, error: error2 }); + job.doDrop({ error: error2 }); + return false; + } + if (blocked) { + job.doDrop(); + return true; + } else if (reachedHWM) { + shifted = strategy === Bottleneck2.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck2.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck2.prototype.strategy.OVERFLOW ? job : void 0; + if (shifted != null) { + shifted.doDrop(); + } + if (shifted == null || strategy === Bottleneck2.prototype.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + await this._drainAll(); + return reachedHWM; + } + _receive(job) { + if (this._states.jobStatus(job.options.id) != null) { + job._reject(new Bottleneck2.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); + return false; + } else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } + submit(...args) { + var cb, fn, job, options, ref, ref1, task; + if (typeof args[0] === "function") { + ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1); + options = parser$5.load({}, this.jobDefaults); + } else { + ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1); + options = parser$5.load(options, this.jobDefaults); + } + task = (...args2) => { + return new this.Promise(function(resolve2, reject) { + return fn(...args2, function(...args3) { + return (args3[0] != null ? reject : resolve2)(args3); + }); + }); + }; + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + job.promise.then(function(args2) { + return typeof cb === "function" ? cb(...args2) : void 0; + }).catch(function(args2) { + if (Array.isArray(args2)) { + return typeof cb === "function" ? cb(...args2) : void 0; + } else { + return typeof cb === "function" ? cb(args2) : void 0; + } + }); + return this._receive(job); + } + schedule(...args) { + var job, options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } else { + [options, task, ...args] = args; + } + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + this._receive(job); + return job.promise; + } + wrap(fn) { + var schedule, wrapped; + schedule = this.schedule.bind(this); + wrapped = function(...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = function(options, ...args) { + return schedule(options, fn, ...args); + }; + return wrapped; + } + async updateSettings(options = {}) { + await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults)); + parser$5.overwrite(options, this.instanceDefaults, this); + return this; + } + currentReservoir() { + return this._store.__currentReservoir__(); + } + incrementReservoir(incr = 0) { + return this._store.__incrementReservoir__(incr); + } + } + Bottleneck2.default = Bottleneck2; + Bottleneck2.Events = Events$4; + Bottleneck2.version = Bottleneck2.prototype.version = require$$8.version; + Bottleneck2.strategy = Bottleneck2.prototype.strategy = { + LEAK: 1, + OVERFLOW: 2, + OVERFLOW_PRIORITY: 4, + BLOCK: 3 + }; + Bottleneck2.BottleneckError = Bottleneck2.prototype.BottleneckError = BottleneckError_1; + Bottleneck2.Group = Bottleneck2.prototype.Group = Group_1; + Bottleneck2.RedisConnection = Bottleneck2.prototype.RedisConnection = require$$2; + Bottleneck2.IORedisConnection = Bottleneck2.prototype.IORedisConnection = require$$3; + Bottleneck2.Batcher = Bottleneck2.prototype.Batcher = Batcher_1; + Bottleneck2.prototype.jobDefaults = { + priority: DEFAULT_PRIORITY$1, + weight: 1, + expiration: null, + id: "" + }; + Bottleneck2.prototype.storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck2.prototype.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null + }; + Bottleneck2.prototype.localStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 250 + }; + Bottleneck2.prototype.redisStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 5e3, + clientTimeout: 1e4, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null + }; + Bottleneck2.prototype.instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise + }; + Bottleneck2.prototype.stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped." + }; + return Bottleneck2; + }).call(commonjsGlobal); + var Bottleneck_1 = Bottleneck; + var lib = Bottleneck_1; + return lib; + })); + } +}); + +// node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/dist-node/index.js +var require_dist_node14 = __commonJS({ + "node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/dist-node/index.js"(exports2, module2) { + "use strict"; + var __create2 = Object.create; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __getProtoOf2 = Object.getPrototypeOf; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export = (target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }; + var __copyProps2 = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }; + var __toESM2 = (mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, + mod + )); + var __toCommonJS = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod); + var dist_src_exports = {}; + __export(dist_src_exports, { + RequestError: () => RequestError + }); + module2.exports = __toCommonJS(dist_src_exports); + var import_deprecation = require_dist_node3(); + var import_once = __toESM2(require_once()); + var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + /(? { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }; + var __copyProps2 = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }; + var __toESM2 = (mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, + mod + )); + var __toCommonJS = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod); + var dist_src_exports = {}; + __export(dist_src_exports, { + VERSION: () => VERSION, + retry: () => retry3 + }); + module2.exports = __toCommonJS(dist_src_exports); + var import_core = require_dist_node11(); + async function errorRequest(state, octokit, error2, options) { + if (!error2.request || !error2.request.request) { + throw error2; + } + if (error2.status >= 400 && !state.doNotRetry.includes(error2.status)) { + const retries = options.request.retries != null ? options.request.retries : state.retries; + const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2); + throw octokit.retry.retryRequest(error2, retries, retryAfter); + } + throw error2; + } + var import_light = __toESM2(require_light()); + var import_request_error = require_dist_node14(); + async function wrapRequest(state, octokit, request, options) { + const limiter = new import_light.default(); + limiter.on("failed", function(error2, info3) { + const maxRetries = ~~error2.request.request.retries; + const after = ~~error2.request.request.retryAfter; + options.request.retryCount = info3.retryCount + 1; + if (maxRetries > info3.retryCount) { + return after * state.retryAfterBaseValue; + } + }); + return limiter.schedule( + requestWithGraphqlErrorHandling.bind(null, state, octokit, request), + options + ); + } + async function requestWithGraphqlErrorHandling(state, octokit, request, options) { + const response = await request(request, options); + if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test( + response.data.errors[0].message + )) { + const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, { + request: options, + response + }); + return errorRequest(state, octokit, error2, options); + } + return response; + } + var VERSION = "6.1.0"; + function retry3(octokit, octokitOptions) { + const state = Object.assign( + { + enabled: true, + retryAfterBaseValue: 1e3, + doNotRetry: [400, 401, 403, 404, 422, 451], + retries: 3 + }, + octokitOptions.retry + ); + if (state.enabled) { + octokit.hook.error("request", errorRequest.bind(null, state, octokit)); + octokit.hook.wrap("request", wrapRequest.bind(null, state, octokit)); + } + return { + retry: { + retryRequest: (error2, retries, retryAfter) => { + error2.request.request = Object.assign({}, error2.request.request, { + retries, + retryAfter + }); + return error2; + } + } + }; + } + retry3.VERSION = VERSION; + } +}); + +// node_modules/console-log-level/index.js +var require_console_log_level = __commonJS({ + "node_modules/console-log-level/index.js"(exports2, module2) { + "use strict"; + var util = require("util"); + var levels = ["trace", "debug", "info", "warn", "error", "fatal"]; + var noop = function() { + }; + module2.exports = function(opts) { + opts = opts || {}; + opts.level = opts.level || "info"; + var logger = {}; + var shouldLog = function(level) { + return levels.indexOf(level) >= levels.indexOf(opts.level); + }; + levels.forEach(function(level) { + logger[level] = shouldLog(level) ? log : noop; + function log() { + var prefix = opts.prefix; + var normalizedLevel; + if (opts.stderr) { + normalizedLevel = "error"; + } else { + switch (level) { + case "trace": + normalizedLevel = "info"; + break; + case "debug": + normalizedLevel = "info"; + break; + case "fatal": + normalizedLevel = "error"; + break; + default: + normalizedLevel = level; + } + } + if (prefix) { + if (typeof prefix === "function") prefix = prefix(level); + arguments[0] = util.format(prefix, arguments[0]); + } + console[normalizedLevel](util.format.apply(util, arguments)); + } + }); + return logger; + }; + } +}); + // src/start-proxy-action.ts var import_child_process = require("child_process"); var path = __toESM(require("path")); -var core7 = __toESM(require_core()); +var core8 = __toESM(require_core()); var toolcache = __toESM(require_tool_cache()); var import_node_forge = __toESM(require_lib2()); @@ -47759,6 +49329,13 @@ function isDefined(value) { // src/actions-util.ts var pkg = require_package(); +var getRequiredInput = function(name) { + const value = core4.getInput(name); + if (!value) { + throw new ConfigurationError(`Input required and not supplied: ${name}`); + } + return value; +}; var getOptionalInput = function(name) { const value = core4.getInput(name); return value.length > 0 ? value : void 0; @@ -47767,6 +49344,9 @@ function getTemporaryDirectory() { const value = process.env["CODEQL_ACTION_TEMP"]; return value !== void 0 && value !== "" ? value : getRequiredEnvParam("RUNNER_TEMP"); } +function getActionVersion() { + return pkg.version; +} var persistedInputsKey = "persisted_inputs"; var persistInputs = function() { const inputEnvironmentVariables = Object.entries(process.env).filter( @@ -47782,7 +49362,38 @@ function getActionsLogger() { } // src/start-proxy.ts +var core7 = __toESM(require_core()); + +// src/api-client.ts var core6 = __toESM(require_core()); +var githubUtils = __toESM(require_utils4()); +var retry = __toESM(require_dist_node15()); +var import_console_log_level = __toESM(require_console_log_level()); +function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { + const auth = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; + const retryingOctokit = githubUtils.GitHub.plugin(retry.retry); + return new retryingOctokit( + githubUtils.getOctokitOptions(auth, { + baseUrl: apiDetails.apiURL, + userAgent: `CodeQL-Action/${getActionVersion()}`, + log: (0, import_console_log_level.default)({ level: "debug" }) + }) + ); +} +function getApiDetails() { + return { + auth: getRequiredInput("token"), + url: getRequiredEnvParam("GITHUB_SERVER_URL"), + apiURL: getRequiredEnvParam("GITHUB_API_URL") + }; +} +function getApiClient() { + return createApiClientWithDetails(getApiDetails()); +} + +// src/defaults.json +var bundleVersion = "codeql-bundle-v2.23.0"; +var cliVersion = "2.23.0"; // src/languages.ts var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { @@ -47863,10 +49474,10 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language throw new ConfigurationError("Invalid credentials - must be an object"); } if (isDefined(e.password)) { - core6.setSecret(e.password); + core7.setSecret(e.password); } if (isDefined(e.token)) { - core6.setSecret(e.token); + core7.setSecret(e.token); } if (!isDefined(e.url) && !isDefined(e.host)) { throw new ConfigurationError( @@ -47895,6 +49506,51 @@ function getCredentials(logger, registrySecrets, registriesCredentials, language } return out; } +function getProxyPackage() { + const platform = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64"; + return `${UPDATEJOB_PROXY}-${platform}.tar.gz`; +} +function getFallbackUrl(proxyPackage) { + return `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`; +} +async function getCurrentRelease() { + return getApiClient().rest.repos.getReleaseByTag({ + owner: "github", + repo: "codeql-action", + tag: bundleVersion + }); +} +async function getDownloadUrl(logger) { + const proxyPackage = getProxyPackage(); + try { + const cliRelease = await getCurrentRelease(); + for (const asset of cliRelease.data.assets) { + if (asset.name === proxyPackage) { + logger.info( + `Found '${proxyPackage}' in release '${bundleVersion}' at '${asset.url}'` + ); + return { + url: asset.url, + // The `update-job-proxy` doesn't have a version as such. Since we now bundle it + // with CodeQL CLI bundle releases, we use the corresponding CLI version to + // differentiate between (potentially) different versions of `update-job-proxy`. + version: cliVersion + }; + } + } + } catch (ex) { + logger.warning( + `Failed to retrieve information about the current release: ${getErrorMessage(ex)}` + ); + } + logger.info( + `Did not find '${proxyPackage}' in current release, falling back to hard-coded version.` + ); + return { + url: getFallbackUrl(proxyPackage), + version: UPDATEJOB_PROXY_VERSION + }; +} // src/start-proxy-action.ts var KEY_SIZE = 2048; @@ -47948,7 +49604,7 @@ async function runWrapper() { const logger = getActionsLogger(); const tempDir = getTemporaryDirectory(); const proxyLogFilePath = path.resolve(tempDir, "proxy.log"); - core7.saveState("proxy-log-file", proxyLogFilePath); + core8.saveState("proxy-log-file", proxyLogFilePath); const credentials = getCredentials( logger, getOptionalInput("registry_secrets"), @@ -47968,7 +49624,7 @@ async function runWrapper() { all_credentials: credentials, ca }; - const proxyBin = await getProxyBinaryPath(); + const proxyBin = await getProxyBinaryPath(logger); await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger); } async function startProxy(binPath, config, logFilePath, logger) { @@ -47989,7 +49645,7 @@ async function startProxy(binPath, config, logFilePath, logger) { ); subprocess.unref(); if (subprocess.pid) { - core7.saveState("proxy-process-pid", `${subprocess.pid}`); + core8.saveState("proxy-process-pid", `${subprocess.pid}`); } subprocess.on("error", (error2) => { subprocessError = error2; @@ -48008,31 +49664,29 @@ async function startProxy(binPath, config, logFilePath, logger) { throw subprocessError; } logger.info(`Proxy started on ${host}:${port}`); - core7.setOutput("proxy_host", host); - core7.setOutput("proxy_port", port.toString()); - core7.setOutput("proxy_ca_certificate", config.ca.cert); + core8.setOutput("proxy_host", host); + core8.setOutput("proxy_port", port.toString()); + core8.setOutput("proxy_ca_certificate", config.ca.cert); const registry_urls = config.all_credentials.filter((credential) => credential.url !== void 0).map((credential) => ({ type: credential.type, url: credential.url })); - core7.setOutput("proxy_urls", JSON.stringify(registry_urls)); + core8.setOutput("proxy_urls", JSON.stringify(registry_urls)); } catch (error2) { - core7.setFailed(`start-proxy action failed: ${getErrorMessage(error2)}`); + core8.setFailed(`start-proxy action failed: ${getErrorMessage(error2)}`); } } -async function getProxyBinaryPath() { +async function getProxyBinaryPath(logger) { const proxyFileName = process.platform === "win32" ? `${UPDATEJOB_PROXY}.exe` : UPDATEJOB_PROXY; - const platform = process.platform === "win32" ? "win64" : process.platform === "darwin" ? "osx64" : "linux64"; - const proxyPackage = `${UPDATEJOB_PROXY}-${platform}.tar.gz`; - const proxyURL = `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`; - let proxyBin = toolcache.find(proxyFileName, UPDATEJOB_PROXY_VERSION); + const proxyInfo = await getDownloadUrl(logger); + let proxyBin = toolcache.find(proxyFileName, proxyInfo.version); if (!proxyBin) { - const temp = await toolcache.downloadTool(proxyURL); + const temp = await toolcache.downloadTool(proxyInfo.url); const extracted = await toolcache.extractTar(temp); proxyBin = await toolcache.cacheDir( extracted, proxyFileName, - UPDATEJOB_PROXY_VERSION + proxyInfo.version ); } proxyBin = path.join(proxyBin, proxyFileName); diff --git a/src/start-proxy-action.ts b/src/start-proxy-action.ts index 73204edf56..6ce3b70ff4 100644 --- a/src/start-proxy-action.ts +++ b/src/start-proxy-action.ts @@ -10,9 +10,8 @@ import { getActionsLogger, Logger } from "./logging"; import { Credential, getCredentials, + getDownloadUrl, UPDATEJOB_PROXY, - UPDATEJOB_PROXY_URL_PREFIX, - UPDATEJOB_PROXY_VERSION, } from "./start-proxy"; import * as util from "./util"; @@ -121,7 +120,7 @@ async function runWrapper() { }; // Start the Proxy - const proxyBin = await getProxyBinaryPath(); + const proxyBin = await getProxyBinaryPath(logger); await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger); } @@ -186,26 +185,19 @@ async function startProxy( } } -async function getProxyBinaryPath(): Promise { +async function getProxyBinaryPath(logger: Logger): Promise { const proxyFileName = process.platform === "win32" ? `${UPDATEJOB_PROXY}.exe` : UPDATEJOB_PROXY; - const platform = - process.platform === "win32" - ? "win64" - : process.platform === "darwin" - ? "osx64" - : "linux64"; - const proxyPackage = `${UPDATEJOB_PROXY}-${platform}.tar.gz`; - const proxyURL = `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`; - - let proxyBin = toolcache.find(proxyFileName, UPDATEJOB_PROXY_VERSION); + const proxyInfo = await getDownloadUrl(logger); + + let proxyBin = toolcache.find(proxyFileName, proxyInfo.version); if (!proxyBin) { - const temp = await toolcache.downloadTool(proxyURL); + const temp = await toolcache.downloadTool(proxyInfo.url); const extracted = await toolcache.extractTar(temp); proxyBin = await toolcache.cacheDir( extracted, proxyFileName, - UPDATEJOB_PROXY_VERSION, + proxyInfo.version, ); } proxyBin = path.join(proxyBin, proxyFileName); diff --git a/src/start-proxy.test.ts b/src/start-proxy.test.ts index 3e2748aaf9..e344c17734 100644 --- a/src/start-proxy.test.ts +++ b/src/start-proxy.test.ts @@ -1,5 +1,8 @@ import test from "ava"; +import sinon from "sinon"; +import * as apiClient from "./api-client"; +import * as defaults from "./defaults.json"; import { KnownLanguage } from "./languages"; import { getRunnerLogger } from "./logging"; import * as startProxyExports from "./start-proxy"; @@ -197,3 +200,68 @@ test("parseLanguage", async (t) => { t.deepEqual(parseLanguage(" "), undefined); t.deepEqual(parseLanguage(""), undefined); }); + +function mockGetReleaseByTag(assets?: Array<{ name: string; url?: string }>) { + const mockClient = sinon.stub(apiClient, "getApiClient"); + const getReleaseByTag = + assets === undefined + ? sinon.stub().rejects() + : sinon.stub().resolves({ + status: 200, + data: { assets }, + headers: {}, + url: "GET /repos/:owner/:repo/releases/tags/:tag", + }); + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + mockClient.returns({ + rest: { + repos: { + getReleaseByTag, + }, + }, + } as any); + return mockClient; +} + +test("getDownloadUrl returns fallback when `getCurrentRelease` rejects", async (t) => { + mockGetReleaseByTag(); + + const info = await startProxyExports.getDownloadUrl(getRunnerLogger(true)); + + t.is(info.version, startProxyExports.UPDATEJOB_PROXY_VERSION); + t.is( + info.url, + startProxyExports.getFallbackUrl(startProxyExports.getProxyPackage()), + ); +}); + +test("getDownloadUrl returns fallback when there's no matching release asset", async (t) => { + const testAssets = [[], [{ name: "foo" }]]; + + for (const assets of testAssets) { + const stub = mockGetReleaseByTag(assets); + const info = await startProxyExports.getDownloadUrl(getRunnerLogger(true)); + + t.is(info.version, startProxyExports.UPDATEJOB_PROXY_VERSION); + t.is( + info.url, + startProxyExports.getFallbackUrl(startProxyExports.getProxyPackage()), + ); + + stub.restore(); + } +}); + +test("getDownloadUrl returns matching release asset", async (t) => { + const assets = [ + { name: "foo", url: "other-url" }, + { name: startProxyExports.getProxyPackage(), url: "url-we-want" }, + ]; + mockGetReleaseByTag(assets); + + const info = await startProxyExports.getDownloadUrl(getRunnerLogger(true)); + + t.is(info.version, defaults.cliVersion); + t.is(info.url, "url-we-want"); +}); diff --git a/src/start-proxy.ts b/src/start-proxy.ts index b29b143172..2de9b5bbbe 100644 --- a/src/start-proxy.ts +++ b/src/start-proxy.ts @@ -1,8 +1,10 @@ import * as core from "@actions/core"; +import { getApiClient } from "./api-client"; +import * as defaults from "./defaults.json"; import { KnownLanguage } from "./languages"; import { Logger } from "./logging"; -import { ConfigurationError, isDefined } from "./util"; +import { ConfigurationError, getErrorMessage, isDefined } from "./util"; export const UPDATEJOB_PROXY = "update-job-proxy"; export const UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901"; @@ -171,3 +173,87 @@ export function getCredentials( } return out; } + +/** + * Gets the name of the proxy release asset for the current platform. + */ +export function getProxyPackage(): string { + const platform = + process.platform === "win32" + ? "win64" + : process.platform === "darwin" + ? "osx64" + : "linux64"; + return `${UPDATEJOB_PROXY}-${platform}.tar.gz`; +} + +/** + * Gets the fallback URL for downloading the proxy release asset. + * + * @param proxyPackage The asset name. + * @returns The full URL to download the specified asset from the fallback release. + */ +export function getFallbackUrl(proxyPackage: string): string { + return `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`; +} + +/** + * Uses the GitHub API to obtain information about the CodeQL CLI bundle release + * that is pointed at by `defaults.json`. + * + * @returns The response from the GitHub API. + */ +export async function getCurrentRelease() { + return getApiClient().rest.repos.getReleaseByTag({ + owner: "github", + repo: "codeql-action", + tag: defaults.bundleVersion, + }); +} + +/** + * Determines the URL of the proxy release asset that we should download if its not + * already in the toolcache, and its version. + * + * @param logger The logger to use. + * @returns Returns the download URL and version of the proxy package we plan to use. + */ +export async function getDownloadUrl( + logger: Logger, +): Promise<{ url: string; version: string }> { + const proxyPackage = getProxyPackage(); + + try { + // Try to retrieve information about the CLI bundle release pointed at by `defaults.json`. + const cliRelease = await getCurrentRelease(); + + // Search the release's assets to find the one we are looking for. + for (const asset of cliRelease.data.assets) { + if (asset.name === proxyPackage) { + logger.info( + `Found '${proxyPackage}' in release '${defaults.bundleVersion}' at '${asset.url}'`, + ); + return { + url: asset.url, + // The `update-job-proxy` doesn't have a version as such. Since we now bundle it + // with CodeQL CLI bundle releases, we use the corresponding CLI version to + // differentiate between (potentially) different versions of `update-job-proxy`. + version: defaults.cliVersion, + }; + } + } + } catch (ex) { + logger.warning( + `Failed to retrieve information about the current release: ${getErrorMessage(ex)}`, + ); + } + + // Fallback to the hard-coded URL. + logger.info( + `Did not find '${proxyPackage}' in current release, falling back to hard-coded version.`, + ); + return { + url: getFallbackUrl(proxyPackage), + version: UPDATEJOB_PROXY_VERSION, + }; +} From e2636d2e4f6b1eb32a4bde1bbdb849c62c21436d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Fri, 12 Sep 2025 11:15:03 +0100 Subject: [PATCH 3/3] Change "current release" to "linked release" --- lib/start-proxy-action.js | 8 ++++---- src/start-proxy.test.ts | 2 +- src/start-proxy.ts | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 239f4662bb..9d9d46230c 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -49513,7 +49513,7 @@ function getProxyPackage() { function getFallbackUrl(proxyPackage) { return `${UPDATEJOB_PROXY_URL_PREFIX}${proxyPackage}`; } -async function getCurrentRelease() { +async function getLinkedRelease() { return getApiClient().rest.repos.getReleaseByTag({ owner: "github", repo: "codeql-action", @@ -49523,7 +49523,7 @@ async function getCurrentRelease() { async function getDownloadUrl(logger) { const proxyPackage = getProxyPackage(); try { - const cliRelease = await getCurrentRelease(); + const cliRelease = await getLinkedRelease(); for (const asset of cliRelease.data.assets) { if (asset.name === proxyPackage) { logger.info( @@ -49540,11 +49540,11 @@ async function getDownloadUrl(logger) { } } catch (ex) { logger.warning( - `Failed to retrieve information about the current release: ${getErrorMessage(ex)}` + `Failed to retrieve information about the linked release: ${getErrorMessage(ex)}` ); } logger.info( - `Did not find '${proxyPackage}' in current release, falling back to hard-coded version.` + `Did not find '${proxyPackage}' in the linked release, falling back to hard-coded version.` ); return { url: getFallbackUrl(proxyPackage), diff --git a/src/start-proxy.test.ts b/src/start-proxy.test.ts index e344c17734..dfd55d72fc 100644 --- a/src/start-proxy.test.ts +++ b/src/start-proxy.test.ts @@ -224,7 +224,7 @@ function mockGetReleaseByTag(assets?: Array<{ name: string; url?: string }>) { return mockClient; } -test("getDownloadUrl returns fallback when `getCurrentRelease` rejects", async (t) => { +test("getDownloadUrl returns fallback when `getLinkedRelease` rejects", async (t) => { mockGetReleaseByTag(); const info = await startProxyExports.getDownloadUrl(getRunnerLogger(true)); diff --git a/src/start-proxy.ts b/src/start-proxy.ts index 2de9b5bbbe..dd1e443b76 100644 --- a/src/start-proxy.ts +++ b/src/start-proxy.ts @@ -203,7 +203,7 @@ export function getFallbackUrl(proxyPackage: string): string { * * @returns The response from the GitHub API. */ -export async function getCurrentRelease() { +export async function getLinkedRelease() { return getApiClient().rest.repos.getReleaseByTag({ owner: "github", repo: "codeql-action", @@ -225,7 +225,7 @@ export async function getDownloadUrl( try { // Try to retrieve information about the CLI bundle release pointed at by `defaults.json`. - const cliRelease = await getCurrentRelease(); + const cliRelease = await getLinkedRelease(); // Search the release's assets to find the one we are looking for. for (const asset of cliRelease.data.assets) { @@ -244,13 +244,13 @@ export async function getDownloadUrl( } } catch (ex) { logger.warning( - `Failed to retrieve information about the current release: ${getErrorMessage(ex)}`, + `Failed to retrieve information about the linked release: ${getErrorMessage(ex)}`, ); } // Fallback to the hard-coded URL. logger.info( - `Did not find '${proxyPackage}' in current release, falling back to hard-coded version.`, + `Did not find '${proxyPackage}' in the linked release, falling back to hard-coded version.`, ); return { url: getFallbackUrl(proxyPackage),