From 0eea7b85d4e50c6ac8275a8b9fb0a00436c73f7e Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Sat, 7 Nov 2020 10:22:23 +0100 Subject: [PATCH] update dependencies and rebuild --- dist/restore/index.js | 2635 +++++++++++++++++++++------------------ dist/save/index.js | 2746 ++++++++++++++++++++++------------------- package-lock.json | 74 +- package.json | 8 +- tsconfig.json | 2 +- 5 files changed, 2915 insertions(+), 2550 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index ca5e51c..e0e35ed 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -6,152 +6,152 @@ module.exports = /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const path = __importStar(__webpack_require__(5622)); -const utils = __importStar(__webpack_require__(1518)); -const cacheHttpClient = __importStar(__webpack_require__(8245)); -const tar_1 = __webpack_require__(6490); -class ValidationError extends Error { - constructor(message) { - super(message); - this.name = 'ValidationError'; - Object.setPrototypeOf(this, ValidationError.prototype); - } -} -exports.ValidationError = ValidationError; -class ReserveCacheError extends Error { - constructor(message) { - super(message); - this.name = 'ReserveCacheError'; - Object.setPrototypeOf(this, ReserveCacheError.prototype); - } -} -exports.ReserveCacheError = ReserveCacheError; -function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); - } -} -function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); - } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); - } -} -/** - * Restores cache from keys - * - * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key - * @param downloadOptions cache download options - * @returns string returns the key for the cache hit, otherwise returns undefined - */ -function restoreCache(paths, primaryKey, restoreKeys, options) { - return __awaiter(this, void 0, void 0, function* () { - checkPaths(paths); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core.debug('Resolved Keys:'); - core.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - const compressionMethod = yield utils.getCompressionMethod(); - // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } - const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core.debug(`Archive Path: ${archivePath}`); - try { - // Download the cache from the cache entry - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); - core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield tar_1.extractTar(archivePath, compressionMethod); - } - finally { - // Try to delete the archive to save space - try { - yield utils.unlinkFile(archivePath); - } - catch (error) { - core.debug(`Failed to delete archive: ${error}`); - } - } - return cacheEntry.cacheKey; - }); -} -exports.restoreCache = restoreCache; -/** - * Saves a list of files with the specified key - * - * @param paths a list of file paths to be cached - * @param key an explicit key for restoring the cache - * @param options cache upload options - * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails - */ -function saveCache(paths, key, options) { - return __awaiter(this, void 0, void 0, function* () { - checkPaths(paths); - checkKey(key); - const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); - const cachePaths = yield utils.resolvePaths(paths); - core.debug('Cache Paths:'); - core.debug(`${JSON.stringify(cachePaths)}`); - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core.debug(`Archive Path: ${archivePath}`); - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); - const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit - const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); - core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); - } - core.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); - return cacheId; - }); -} -exports.saveCache = saveCache; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const path = __importStar(__webpack_require__(5622)); +const utils = __importStar(__webpack_require__(1518)); +const cacheHttpClient = __importStar(__webpack_require__(8245)); +const tar_1 = __webpack_require__(6490); +class ValidationError extends Error { + constructor(message) { + super(message); + this.name = 'ValidationError'; + Object.setPrototypeOf(this, ValidationError.prototype); + } +} +exports.ValidationError = ValidationError; +class ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = 'ReserveCacheError'; + Object.setPrototypeOf(this, ReserveCacheError.prototype); + } +} +exports.ReserveCacheError = ReserveCacheError; +function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + } +} +function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } +} +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +function restoreCache(paths, primaryKey, restoreKeys, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core.debug('Resolved Keys:'); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + // path are needed to compute version + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // Cache not found + return undefined; + } + const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield tar_1.extractTar(archivePath, compressionMethod); + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheEntry.cacheKey; + }); +} +exports.restoreCache = restoreCache; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +function saveCache(paths, key, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + checkKey(key); + const compressionMethod = yield utils.getCompressionMethod(); + core.debug('Reserving Cache'); + const cacheId = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod + }); + if (cacheId === -1) { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); + } + core.debug(`Cache ID: ${cacheId}`); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); + return cacheId; + }); +} +exports.saveCache = saveCache; //# sourceMappingURL=cache.js.map /***/ }), @@ -160,217 +160,220 @@ exports.saveCache = saveCache; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -const auth_1 = __webpack_require__(3702); -const crypto = __importStar(__webpack_require__(6417)); -const fs = __importStar(__webpack_require__(5747)); -const url_1 = __webpack_require__(8835); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -const downloadUtils_1 = __webpack_require__(5500); -const options_1 = __webpack_require__(6215); -const requestUtils_1 = __webpack_require__(3981); -const versionSalt = '1.0'; -function getCacheApiUrl(resource) { - // Ideally we just use ACTIONS_CACHE_URL - const baseUrl = (process.env['ACTIONS_CACHE_URL'] || - process.env['ACTIONS_RUNTIME_URL'] || - '').replace('pipelines', 'artifactcache'); - if (!baseUrl) { - throw new Error('Cache Service Url not found, unable to restore cache.'); - } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core.debug(`Resource Url: ${url}`); - return url; -} -function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; -} -function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader('application/json', '6.0-preview.1') - } - }; - return requestOptions; -} -function createHttpClient() { - const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); -} -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); - // Add salt to cache version to support breaking changes in cache entry - components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); -} -exports.getCacheVersion = getCacheVersion; -function getCacheEntry(keys, paths, options) { - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); - const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; - const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - if (response.statusCode === 204) { - return null; - } - if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); - } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error('Cache not found.'); - } - core.setSecret(cacheDownloadUrl); - core.debug(`Cache Result:`); - core.debug(JSON.stringify(cacheResult)); - return cacheResult; - }); -} -exports.getCacheEntry = getCacheEntry; -function downloadCache(archiveLocation, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = options_1.getDownloadOptions(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); - } - else { - // Otherwise, download using the Actions http-client. - yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); - } - }); -} -exports.downloadCache = downloadCache; -// Reserve Cache -function reserveCache(key, paths, options) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); - const reserveCacheRequest = { - key, - version - }; - const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); - })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; - }); -} -exports.reserveCache = reserveCache; -function getContentRange(start, end) { - // Format: `bytes start-end/filesize - // start and end are inclusive - // filesize can be * - // For a 200 byte chunk starting at byte 0: - // Content-Range: bytes 0-199/* - return `bytes ${start}-${end}/*`; -} -function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - 'Content-Type': 'application/octet-stream', - 'Content-Range': getContentRange(start, end) - }; - yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { - return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); - })); - }); -} -function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - // Upload Chunks - const fileSize = fs.statSync(archivePath).size; - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs.openSync(archivePath, 'r'); - const uploadOptions = options_1.getUploadOptions(options); - const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core.debug('Awaiting all uploads'); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs - .createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - .on('error', error => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), start, end); - } - }))); - } - finally { - fs.closeSync(fd); - } - return; - }); -} -function commitCache(httpClient, cacheId, filesize) { - return __awaiter(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); - }); -} -function saveCache(cacheId, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - core.debug('Upload cache'); - yield uploadFile(httpClient, cacheId, archivePath, options); - // Commit Cache - core.debug('Commiting cache'); - const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); - } - core.info('Cache saved successfully'); - }); -} -exports.saveCache = saveCache; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const auth_1 = __webpack_require__(3702); +const crypto = __importStar(__webpack_require__(6417)); +const fs = __importStar(__webpack_require__(5747)); +const url_1 = __webpack_require__(8835); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +const downloadUtils_1 = __webpack_require__(5500); +const options_1 = __webpack_require__(6215); +const requestUtils_1 = __webpack_require__(3981); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + // Ideally we just use ACTIONS_CACHE_URL + const baseUrl = (process.env['ACTIONS_CACHE_URL'] || + process.env['ACTIONS_RUNTIME_URL'] || + '').replace('pipelines', 'artifactcache'); + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod) { + const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip + ? [] + : [compressionMethod]); + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 204) { + return null; + } + if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = options_1.getDownloadOptions(options); + if (downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const reserveCacheRequest = { + key, + version + }; + const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size; + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = options_1.getUploadOptions(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; //# sourceMappingURL=cacheHttpClient.js.map /***/ }), @@ -379,173 +382,173 @@ exports.saveCache = saveCache; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const exec = __importStar(__webpack_require__(1514)); -const glob = __importStar(__webpack_require__(8090)); -const io = __importStar(__webpack_require__(7436)); -const fs = __importStar(__webpack_require__(5747)); -const path = __importStar(__webpack_require__(5622)); -const semver = __importStar(__webpack_require__(5911)); -const util = __importStar(__webpack_require__(1669)); -const uuid_1 = __webpack_require__(2155); -const constants_1 = __webpack_require__(8840); -// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 -function createTempDirectory() { - return __awaiter(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === 'win32'; - let tempDirectory = process.env['RUNNER_TEMP'] || ''; - if (!tempDirectory) { - let baseLocation; - if (IS_WINDOWS) { - // On Windows use the USERPROFILE env variable - baseLocation = process.env['USERPROFILE'] || 'C:\\'; - } - else { - if (process.platform === 'darwin') { - baseLocation = '/Users'; - } - else { - baseLocation = '/home'; - } - } - tempDirectory = path.join(baseLocation, 'actions', 'temp'); - } - const dest = path.join(tempDirectory, uuid_1.v4()); - yield io.mkdirP(dest); - return dest; - }); -} -exports.createTempDirectory = createTempDirectory; -function getArchiveFileSizeIsBytes(filePath) { - return fs.statSync(filePath).size; -} -exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; -function resolvePaths(patterns) { - var e_1, _a; - var _b; - return __awaiter(this, void 0, void 0, function* () { - const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); - const globber = yield glob.create(patterns.join('\n'), { - implicitDescendants: false - }); - try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path.relative(workspace, file); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - paths.push(`${relativeFile}`); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); - } - finally { if (e_1) throw e_1.error; } - } - return paths; - }); -} -exports.resolvePaths = resolvePaths; -function unlinkFile(filePath) { - return __awaiter(this, void 0, void 0, function* () { - return util.promisify(fs.unlink)(filePath); - }); -} -exports.unlinkFile = unlinkFile; -function getVersion(app) { - return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); - let versionOutput = ''; - try { - yield exec.exec(`${app} --version`, [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => (versionOutput += data.toString()), - stderr: (data) => (versionOutput += data.toString()) - } - }); - } - catch (err) { - core.debug(err.message); - } - versionOutput = versionOutput.trim(); - core.debug(versionOutput); - return versionOutput; - }); -} -// Use zstandard if possible to maximize cache performance -function getCompressionMethod() { - return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); - const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed - return constants_1.CompressionMethod.Gzip; - } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } - else { - return constants_1.CompressionMethod.Zstd; - } - }); -} -exports.getCompressionMethod = getCompressionMethod; -function getCacheFileName(compressionMethod) { - return compressionMethod === constants_1.CompressionMethod.Gzip - ? constants_1.CacheFilename.Gzip - : constants_1.CacheFilename.Zstd; -} -exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { - return __awaiter(this, void 0, void 0, function* () { - const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); - }); -} -exports.isGnuTarInstalled = isGnuTarInstalled; -function assertDefined(name, value) { - if (value === undefined) { - throw Error(`Expected ${name} but value was undefiend`); - } - return value; -} -exports.assertDefined = assertDefined; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const exec = __importStar(__webpack_require__(1514)); +const glob = __importStar(__webpack_require__(8090)); +const io = __importStar(__webpack_require__(7436)); +const fs = __importStar(__webpack_require__(5747)); +const path = __importStar(__webpack_require__(5622)); +const semver = __importStar(__webpack_require__(5911)); +const util = __importStar(__webpack_require__(1669)); +const uuid_1 = __webpack_require__(2155); +const constants_1 = __webpack_require__(8840); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, uuid_1.v4()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeIsBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; +function resolvePaths(patterns) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path.relative(workspace, file); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Checking ${app} --version`); + let versionOutput = ''; + try { + yield exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } + const versionOutput = yield getVersion('zstd'); + const version = semver.clean(versionOutput); + if (!versionOutput.toLowerCase().includes('zstd command line interface')) { + // zstd is not installed + return constants_1.CompressionMethod.Gzip; + } + else if (!version || semver.lt(version, 'v1.3.2')) { + // zstd is installed but using a version earlier than v1.3.2 + // v1.3.2 is required to use the `--long` options in zstd + return constants_1.CompressionMethod.ZstdWithoutLong; + } + else { + return constants_1.CompressionMethod.Zstd; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function isGnuTarInstalled() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar'); + }); +} +exports.isGnuTarInstalled = isGnuTarInstalled; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -554,25 +557,29 @@ exports.assertDefined = assertDefined; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var CacheFilename; -(function (CacheFilename) { - CacheFilename["Gzip"] = "cache.tgz"; - CacheFilename["Zstd"] = "cache.tzst"; -})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); -var CompressionMethod; -(function (CompressionMethod) { - CompressionMethod["Gzip"] = "gzip"; - // Long range mode was added to zstd in v1.3.2. - // This enum is for earlier version of zstd that does not have --long support - CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; - CompressionMethod["Zstd"] = "zstd"; -})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -// Socket timeout in milliseconds during download. If no traffic is received -// over the socket during this period, the socket is destroyed and the download -// is aborted. -exports.SocketTimeout = 5000; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var CacheFilename; +(function (CacheFilename) { + CacheFilename["Gzip"] = "cache.tgz"; + CacheFilename["Zstd"] = "cache.tzst"; +})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); +var CompressionMethod; +(function (CompressionMethod) { + CompressionMethod["Gzip"] = "gzip"; + // Long range mode was added to zstd in v1.3.2. + // This enum is for earlier version of zstd that does not have --long support + CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod["Zstd"] = "zstd"; +})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +// The default number of retry attempts. +exports.DefaultRetryAttempts = 2; +// The default delay in milliseconds between retry attempts. +exports.DefaultRetryDelay = 5000; +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; //# sourceMappingURL=constants.js.map /***/ }), @@ -581,235 +588,235 @@ exports.SocketTimeout = 5000; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -const storage_blob_1 = __webpack_require__(4100); -const buffer = __importStar(__webpack_require__(4293)); -const fs = __importStar(__webpack_require__(5747)); -const stream = __importStar(__webpack_require__(2413)); -const util = __importStar(__webpack_require__(1669)); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -const requestUtils_1 = __webpack_require__(3981); -/** - * Pipes the body of a HTTP response to a stream - * - * @param response the HTTP response - * @param output the writable stream - */ -function pipeResponseToStream(response, output) { - return __awaiter(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); - }); -} -/** - * Class for tracking the download state and displaying stats. - */ -class DownloadProgress { - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); - } - /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment - */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); - } - /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received - */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; - } - /** - * Returns the total number of bytes transferred. - */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; - } - /** - * Returns true if the download is complete. - */ - isDone() { - return this.getTransferredBytes() === this.contentLength; - } - /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. - */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / - (1024 * 1024) / - (elapsedTime / 1000)).toFixed(1); - core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } - } - /** - * Returns a function used to handle TransferProgressEvents. - */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; - } - /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write - */ - startDisplayTimer(delayInMs = 1000) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. - */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = undefined; - } - this.display(); - } -} -exports.DownloadProgress = DownloadProgress; -/** - * Download the cache using the Actions toolkit http-client - * - * @param archiveLocation the URL for the cache - * @param archivePath the local path where the cache is saved - */ -function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient('actions/cache'); - const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); - // Abort download if no traffic received over the socket. - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - // Validate download size. - const contentLengthHeader = downloadResponse.message.headers['content-length']; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } - else { - core.debug('Unable to validate download, no Content-Length header'); - } - }); -} -exports.downloadCacheHttpClient = downloadCacheHttpClient; -/** - * Download the cache using the Azure Storage SDK. Only call this method if the - * URL points to an Azure Storage endpoint. - * - * @param archiveLocation the URL for the cache - * @param archivePath the local path where the cache is saved - * @param options the download options with the defaults set - */ -function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - // We should never hit this condition, but just in case fall back to downloading the - // file as one large stream - core.debug('Unable to determine content length, downloading file with http-client...'); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } - else { - // Use downloadToBuffer for faster downloads, since internally it splits the - // file into 4 MB chunks which can then be parallelized and retried independently - // - // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB - // on 64-bit systems), split the download into multiple segments - const maxSegmentSize = buffer.constants.MAX_LENGTH; - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs.openSync(archivePath, 'w'); - try { - downloadProgress.startDisplayTimer(); - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield client.downloadToBuffer(segmentStart, segmentSize, { - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - }); - fs.writeFileSync(fd, result); - } - } - finally { - downloadProgress.stopDisplayTimer(); - fs.closeSync(fd); - } - } - }); -} -exports.downloadCacheStorageSDK = downloadCacheStorageSDK; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const storage_blob_1 = __webpack_require__(4100); +const buffer = __importStar(__webpack_require__(4293)); +const fs = __importStar(__webpack_require__(5747)); +const stream = __importStar(__webpack_require__(2413)); +const util = __importStar(__webpack_require__(1669)); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +const requestUtils_1 = __webpack_require__(3981); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + const maxSegmentSize = buffer.constants.MAX_LENGTH; + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + }); + fs.writeFileSync(fd, result); + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -818,96 +825,124 @@ exports.downloadCacheStorageSDK = downloadCacheStorageSDK; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; -} -exports.isSuccessStatusCode = isSuccessStatusCode; -function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; - } - return statusCode >= 500; -} -exports.isServerErrorStatusCode = isServerErrorStatusCode; -function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; - } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); -} -exports.isRetryableStatusCode = isRetryableStatusCode; -function retry(name, method, getStatusCode, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - let response = undefined; - let statusCode = undefined; - let isRetryable = false; - let errorMessage = ''; - let attempt = 1; - while (attempt <= maxAttempts) { - try { - response = yield method(); - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - catch (error) { - isRetryable = true; - errorMessage = error.message; - } - core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core.debug(`${name} - Error is not retryable`); - break; - } - attempt++; - } - throw Error(`${name} failed: ${errorMessage}`); - }); -} -exports.retry = retry; -function retryTypedResponse(name, method, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.statusCode, maxAttempts); - }); -} -exports.retryTypedResponse = retryTypedResponse; -function retryHttpClientResponse(name, method, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); - }); -} -exports.retryHttpClientResponse = retryHttpClientResponse; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const constants_1 = __webpack_require__(8840); +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} +exports.isServerErrorStatusCode = isServerErrorStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { + return __awaiter(this, void 0, void 0, function* () { + let errorMessage = ''; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + try { + response = yield method(); + } + catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, + // If the error object contains the statusCode property, extract it and return + // an ITypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {} + }; + } + else { + return undefined; + } + }); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; //# sourceMappingURL=requestUtils.js.map /***/ }), @@ -916,130 +951,130 @@ exports.retryHttpClientResponse = retryHttpClientResponse; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const exec_1 = __webpack_require__(1514); -const io = __importStar(__webpack_require__(7436)); -const fs_1 = __webpack_require__(5747); -const path = __importStar(__webpack_require__(5622)); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -function getTarPath(args, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === 'win32'; - if (IS_WINDOWS) { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); - } - else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); - } - } - return yield io.which('tar', true); - }); -} -function execTar(args, compressionMethod, cwd) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - }); -} -function getWorkingDirectory() { - var _a; - return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); -} -function extractTar(archivePath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // Create directory to extract tar into - const workingDirectory = getWorkingDirectory(); - yield io.mkdirP(workingDirectory); - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } - const args = [ - ...getCompressionProgram(), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); - }); -} -exports.extractTar = extractTar; -function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -T0 --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -T0']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); - }); -} -exports.createTar = createTar; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const exec_1 = __webpack_require__(1514); +const io = __importStar(__webpack_require__(7436)); +const fs_1 = __webpack_require__(5747); +const path = __importStar(__webpack_require__(5622)); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +function getTarPath(args, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + if (IS_WINDOWS) { + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); + } + else if (fs_1.existsSync(systemTar)) { + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); + } + } + return yield io.which('tar', true); + }); +} +function execTar(args, compressionMethod, cwd) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); + }); +} +exports.extractTar = extractTar; +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -T0 --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -T0']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); + }); +} +exports.createTar = createTar; //# sourceMappingURL=tar.js.map /***/ }), @@ -1048,67 +1083,67 @@ exports.createTar = createTar; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -/** - * Returns a copy of the upload options with defaults filled in. - * - * @param copy the original upload options - */ -function getUploadOptions(copy) { - const result = { - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.uploadConcurrency === 'number') { - result.uploadConcurrency = copy.uploadConcurrency; - } - if (typeof copy.uploadChunkSize === 'number') { - result.uploadChunkSize = copy.uploadChunkSize; - } - } - core.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; -} -exports.getUploadOptions = getUploadOptions; -/** - * Returns a copy of the download options with defaults filled in. - * - * @param copy the original download options - */ -function getDownloadOptions(copy) { - const result = { - useAzureSdk: true, - downloadConcurrency: 8, - timeoutInMs: 30000 - }; - if (copy) { - if (typeof copy.useAzureSdk === 'boolean') { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.downloadConcurrency === 'number') { - result.downloadConcurrency = copy.downloadConcurrency; - } - if (typeof copy.timeoutInMs === 'number') { - result.timeoutInMs = copy.timeoutInMs; - } - } - core.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core.debug(`Download concurrency: ${result.downloadConcurrency}`); - core.debug(`Request timeout (ms): ${result.timeoutInMs}`); - return result; -} -exports.getDownloadOptions = getDownloadOptions; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; //# sourceMappingURL=options.js.map /***/ }), @@ -3166,7 +3201,6 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); const http = __webpack_require__(8605); const https = __webpack_require__(7211); const pm = __webpack_require__(6443); @@ -3215,7 +3249,7 @@ var MediaTypes; * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; @@ -3234,6 +3268,15 @@ const HttpResponseRetryCodes = [ const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; @@ -3252,7 +3295,7 @@ class HttpClientResponse { } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); + let parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; @@ -3357,7 +3400,7 @@ class HttpClient { if (this._disposed) { throw new Error('Client has already been disposed.'); } - let parsedUrl = url.parse(requestUrl); + let parsedUrl = new URL(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 @@ -3396,7 +3439,7 @@ class HttpClient { // if there's no location to redirect to, we won't break; } - let parsedRedirectUrl = url.parse(redirectUrl); + let parsedRedirectUrl = new URL(redirectUrl); if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { @@ -3512,7 +3555,7 @@ class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); + let parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } _prepareRequest(method, requestUrl, headers) { @@ -3585,7 +3628,7 @@ class HttpClient { maxSockets: maxSockets, keepAlive: this._keepAlive, proxy: { - proxyAuth: proxyUrl.auth, + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, host: proxyUrl.hostname, port: proxyUrl.port } @@ -3680,12 +3723,8 @@ class HttpClient { else { msg = 'Failed request: (' + statusCode + ')'; } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; - } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; reject(err); } else { @@ -3700,12 +3739,11 @@ exports.HttpClient = HttpClient; /***/ }), /***/ 6443: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); function getProxyUrl(reqUrl) { let usingSsl = reqUrl.protocol === 'https:'; let proxyUrl; @@ -3720,7 +3758,7 @@ function getProxyUrl(reqUrl) { proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } if (proxyVar) { - proxyUrl = url.parse(proxyVar); + proxyUrl = new URL(proxyVar); } return proxyUrl; } @@ -4635,8 +4673,8 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var uuid = __webpack_require__(3206); var tslib = __webpack_require__(4331); +var uuid = __webpack_require__(3206); var tough = __webpack_require__(8165); var http = __webpack_require__(8605); var https = __webpack_require__(7211); @@ -4827,7 +4865,7 @@ var Constants = { * @const * @type {string} */ - coreHttpVersion: "1.1.8", + coreHttpVersion: "1.1.9", /** * Specifies HTTP. * @@ -4856,6 +4894,20 @@ var Constants = { * @type {string} */ HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + * + * @const + * @type {string} + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + * + * @const + * @type {string} + */ + ALL_PROXY: "ALL_PROXY", HttpConstants: { /** * Http Verbs @@ -5046,12 +5098,15 @@ function promiseToServiceCallback(promise) { }); }; } -function prepareXMLRootList(obj, elementName) { - var _a; +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + var _a, _b, _c; if (!Array.isArray(obj)) { obj = [obj]; } - return _a = {}, _a[elementName] = obj, _a; + if (!xmlNamespaceKey || !xmlNamespace) { + return _a = {}, _a[elementName] = obj, _a; + } + return _b = {}, _b[elementName] = obj, _b.$ = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c), _b; } /** * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor @@ -5093,6 +5148,15 @@ function replaceAll(value, searchValue, replaceValue) { function isPrimitiveType(value) { return (typeof value !== "object" && typeof value !== "function") || value === null; } +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} // Copyright (c) Microsoft Corporation. var Serializer = /** @class */ (function () { @@ -5214,13 +5278,13 @@ var Serializer = /** @class */ (function () { payload = serializeBase64UrlType(objectName, object); } else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName); + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML)); } else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName); + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML)); } else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName); + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML)); } } return payload; @@ -5494,7 +5558,8 @@ function serializeDateTypes(typeName, value, objectName) { } return value; } -function serializeSequenceType(serializer, mapper, object, objectName) { +function serializeSequenceType(serializer, mapper, object, objectName, isXml) { + var _a, _b; if (!Array.isArray(object)) { throw new Error(objectName + " must be of type Array."); } @@ -5505,11 +5570,26 @@ function serializeSequenceType(serializer, mapper, object, objectName) { } var tempArray = []; for (var i = 0; i < object.length; i++) { - tempArray[i] = serializer.serialize(elementType, object[i], objectName); + var serializedValue = serializer.serialize(elementType, object[i], objectName); + if (isXml && elementType.xmlNamespace) { + var xmlnsKey = elementType.xmlNamespacePrefix + ? "xmlns:" + elementType.xmlNamespacePrefix + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = tslib.__assign(tslib.__assign({}, serializedValue), { $: (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a) }); + } + else { + tempArray[i] = { _: serializedValue, $: (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b) }; + } + } + else { + tempArray[i] = serializedValue; + } } return tempArray; } -function serializeDictionaryType(serializer, mapper, object, objectName) { +function serializeDictionaryType(serializer, mapper, object, objectName, isXml) { + var _a; if (typeof object !== "object") { throw new Error(objectName + " must be of type object."); } @@ -5519,12 +5599,46 @@ function serializeDictionaryType(serializer, mapper, object, objectName) { ("mapper and it must of type \"object\" in " + objectName + ".")); } var tempDictionary = {}; - for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { - var key = _a[_i]; - tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) { + var key = _b[_i]; + var serializedValue = serializer.serialize(valueType, object[key], objectName); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + var xmlnsKey = mapper.xmlNamespacePrefix ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; + return tslib.__assign(tslib.__assign({}, tempDictionary), { $: (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a) }); } return tempDictionary; } +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + * @param objectName name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + var additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + * @param objectName name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + return serializer.modelMappers[className]; +} /** * Resolves a composite mapper's modelProperties. * @param serializer the serializer containing the entire set of mappers @@ -5533,32 +5647,28 @@ function serializeDictionaryType(serializer, mapper, object, objectName) { function resolveModelProperties(serializer, mapper, objectName) { var modelProps = mapper.type.modelProperties; if (!modelProps) { - var className = mapper.type.className; - if (!className) { - throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); - } - var modelMapper = serializer.modelMappers[className]; + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) { - throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + throw new Error("mapper() cannot be null or undefined for model \"" + mapper.type.className + "\"."); } - modelProps = modelMapper.type.modelProperties; + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) { throw new Error("modelProperties cannot be null or undefined in the " + - ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + mapper.type.className + "\" for object \"" + objectName + "\".")); } } return modelProps; } -function serializeCompositeType(serializer, mapper, object, objectName) { - var _a; +function serializeCompositeType(serializer, mapper, object, objectName, isXml) { + var _a, _b; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); } if (object != undefined) { var payload = {}; var modelProps = resolveModelProperties(serializer, mapper, objectName); - for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { - var key = _b[_i]; + for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) { + var key = _c[_i]; var propertyMapper = modelProps[key]; if (propertyMapper.readOnly) { continue; @@ -5576,8 +5686,8 @@ function serializeCompositeType(serializer, mapper, object, objectName) { else { var paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); - for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { - var pathName = paths_1[_c]; + for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) { + var pathName = paths_1[_d]; var childObject = parentObject[pathName]; if (childObject == undefined && (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { @@ -5587,6 +5697,12 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } } if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + var xmlnsKey = mapper.xmlNamespacePrefix + ? "xmlns:" + mapper.xmlNamespacePrefix + : "xmlns"; + parentObject.$ = tslib.__assign(tslib.__assign({}, parentObject.$), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a)); + } var propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; @@ -5599,23 +5715,24 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); if (serializedValue !== undefined && propName != undefined) { - if (propertyMapper.xmlIsAttribute) { + var value = getXmlObjectValue(propertyMapper, serializedValue, isXml); + if (isXml && propertyMapper.xmlIsAttribute) { // $ is the key attributes are kept under in xml2js. // This keeps things simple while preventing name collision // with names in user documents. parentObject.$ = parentObject.$ || {}; parentObject.$[propName] = serializedValue; } - else if (propertyMapper.xmlIsWrapped) { - parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b); } else { - parentObject[propName] = serializedValue; + parentObject[propName] = value; } } } } - var additionalPropertiesMapper = mapper.type.additionalProperties; + var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { var propNames = Object.keys(modelProps); var _loop_1 = function (clientPropName) { @@ -5632,18 +5749,33 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } return object; } +function getXmlObjectValue(propertyMapper, serializedValue, isXml) { + var _a; + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + var xmlnsKey = propertyMapper.xmlNamespacePrefix + ? "xmlns:" + propertyMapper.xmlNamespacePrefix + : "xmlns"; + var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a); + if (["Composite"].includes(propertyMapper.type.name)) { + return tslib.__assign({ $: xmlNamespace }, serializedValue); + } + return { _: serializedValue, $: xmlNamespace }; +} function isSpecialXmlProperty(propertyName) { return ["$", "_"].includes(propertyName); } function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + var _a; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); } var modelProps = resolveModelProperties(serializer, mapper, objectName); var instance = {}; var handledPropertyNames = []; - for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { - var key = _a[_i]; + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; var propertyMapper = modelProps[key]; var paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); @@ -5655,8 +5787,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { var dictionary = {}; - for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { - var headerKey = _c[_b]; + for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) { + var headerKey = _d[_c]; if (headerKey.startsWith(headerCollectionPrefix)) { dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); } @@ -5670,16 +5802,29 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) } else { var propertyName = xmlElementName || xmlName || serializedName; - var unwrappedProperty = responseBody[propertyName]; if (propertyMapper.xmlIsWrapped) { - unwrappedProperty = responseBody[xmlName]; - unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; - var isEmptyWrappedList = unwrappedProperty === undefined; - if (isEmptyWrappedList) { - unwrappedProperty = []; - } + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + var wrapped = responseBody[xmlName]; + var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName); + } + else { + var property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName); } - instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); } } else { @@ -5687,8 +5832,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) var propertyInstance = void 0; var res = responseBody; // traversing the object step by step. - for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { - var item = paths_2[_d]; + for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) { + var item = paths_2[_e]; if (!res) break; res = res[item]; @@ -5739,8 +5884,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) } } else if (responseBody) { - for (var _e = 0, _f = Object.keys(responseBody); _e < _f.length; _e++) { - var key = _f[_e]; + for (var _f = 0, _g = Object.keys(responseBody); _f < _g.length; _f++) { + var key = _g[_f]; if (instance[key] === undefined && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key)) { @@ -8524,23 +8669,57 @@ function retry$1(policy, request, operationResponse, err, retryData) { })(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); // Copyright (c) Microsoft Corporation. +var noProxyList = []; +var isNoProxyInitalized = false; +var byPassedList = new Map(); function loadEnvironmentProxyValue() { if (!process) { return undefined; } - if (process.env[Constants.HTTPS_PROXY]) { - return process.env[Constants.HTTPS_PROXY]; + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the given `uri` matches the noProxyList. If it matches, any request sent to that same `uri` won't set the proxy settings. +function isBypassed(uri) { + if (byPassedList.has(uri)) { + return byPassedList.get(uri); } - else if (process.env[Constants.HTTPS_PROXY.toLowerCase()]) { - return process.env[Constants.HTTPS_PROXY.toLowerCase()]; + loadNoProxy(); + var isBypassed = false; + var host = URLBuilder.parse(uri).getHost(); + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var proxyString = noProxyList_1[_i]; + if (proxyString[0] === ".") { + if (uri.endsWith(proxyString)) { + isBypassed = true; + } + else { + if (host === proxyString.slice(1) && host.length === proxyString.length - 1) { + isBypassed = true; + } + } + } + else { + if (host === proxyString) { + isBypassed = true; + } + } } - else if (process.env[Constants.HTTP_PROXY]) { - return process.env[Constants.HTTP_PROXY]; + byPassedList.set(uri, isBypassed); + return isBypassed; +} +function loadNoProxy() { + if (isNoProxyInitalized) { + return; } - else if (process.env[Constants.HTTP_PROXY.toLowerCase()]) { - return process.env[Constants.HTTP_PROXY.toLowerCase()]; + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + var list = noProxy.split(","); + noProxyList = list.map(function (item) { return item.trim(); }).filter(function (item) { return item.length; }); } - return undefined; + isNoProxyInitalized = true; } function getDefaultProxySettings(proxyUrl) { if (!proxyUrl) { @@ -8596,7 +8775,7 @@ var ProxyPolicy = /** @class */ (function (_super) { return _this; } ProxyPolicy.prototype.sendRequest = function (request) { - if (!request.proxySettings) { + if (!request.proxySettings && !isBypassed(request.url)) { request.proxySettings = this.proxySettings; } return this._nextPolicy.sendRequest(request); @@ -8871,6 +9050,51 @@ var DisableResponseDecompressionPolicy = /** @class */ (function (_super) { return DisableResponseDecompressionPolicy; }(BaseRequestPolicy)); +// Copyright (c) Microsoft Corporation. +function ndJsonPolicy() { + return { + create: function (nextPolicy, options) { + return new NdJsonPolicy(nextPolicy, options); + } + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +var NdJsonPolicy = /** @class */ (function (_super) { + tslib.__extends(NdJsonPolicy, _super); + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy + * @param options + */ + function NdJsonPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + /** + * Sends a request. + * + * @param request + */ + NdJsonPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var body; + return tslib.__generator(this, function (_a) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map(function (item) { return JSON.stringify(item) + "\n"; }).join(""); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return NdJsonPolicy; +}(BaseRequestPolicy)); + // Copyright (c) Microsoft Corporation. /** * @class @@ -9168,7 +9392,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (operationSpec.requestBody && operationSpec.requestBody.mapper) { httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); var bodyMapper = operationSpec.requestBody.mapper; - var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix; var typeName = bodyMapper.type.name; try { if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { @@ -9176,11 +9400,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); var isStream = typeName === MapperType.Stream; if (operationSpec.isXML) { + var xmlnsKey = xmlNamespacePrefix ? "xmlns:" + xmlNamespacePrefix : "xmlns"; + var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body); if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName }); } else if (!isStream) { - httpRequest.body = stringifyXML(httpRequest.body, { + httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName }); } @@ -9212,6 +9438,18 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op } } } +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue) { + var _a; + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + return { _: serializedValue, $: (_a = {}, _a[xmlnsKey] = xmlNamespace, _a) }; + } + return serializedValue; +} function getValueOrFunctionResult(value, defaultValueCreator) { var result; if (typeof value === "string") { @@ -9254,6 +9492,9 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { } function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { var requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } var userAgentValue = undefined; if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { var userAgentInfo = []; @@ -12540,15 +12781,15 @@ __webpack_require__.r(__webpack_exports__); // EXPORTS __webpack_require__.d(__webpack_exports__, { + "NIL": () => /* reexport */ nil, + "parse": () => /* reexport */ esm_node_parse, + "stringify": () => /* reexport */ esm_node_stringify, "v1": () => /* reexport */ esm_node_v1, "v3": () => /* reexport */ esm_node_v3, "v4": () => /* reexport */ esm_node_v4, "v5": () => /* reexport */ esm_node_v5, - "NIL": () => /* reexport */ nil, - "version": () => /* reexport */ esm_node_version, "validate": () => /* reexport */ esm_node_validate, - "stringify": () => /* reexport */ esm_node_stringify, - "parse": () => /* reexport */ esm_node_parse + "version": () => /* reexport */ esm_node_version }); // EXTERNAL MODULE: external "crypto" @@ -12557,9 +12798,16 @@ var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto // CONCATENATED MODULE: ./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js -const rnds8 = new Uint8Array(16); +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; function rng() { - return external_crypto_default().randomFillSync(rnds8); + if (poolPtr > rnds8Pool.length - 16) { + external_crypto_default().randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } // CONCATENATED MODULE: ./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js /* harmony default export */ const regex = (/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i); @@ -54418,21 +54666,16 @@ module.exports = v4; // ESM COMPAT FLAG __webpack_require__.r(__webpack_exports__); -// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js -var core = __webpack_require__(2186); - // EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js var cache = __webpack_require__(7799); - +// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js +var core = __webpack_require__(2186); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __webpack_require__(1514); - // EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js var glob = __webpack_require__(8090); - // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js var io = __webpack_require__(7436); - // EXTERNAL MODULE: external "crypto" var external_crypto_ = __webpack_require__(6417); var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto_); @@ -54450,13 +54693,6 @@ var external_path_ = __webpack_require__(5622); var external_path_default = /*#__PURE__*/__webpack_require__.n(external_path_); // CONCATENATED MODULE: ./src/common.ts -var __asyncValues = (undefined && undefined.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; @@ -54518,32 +54754,25 @@ async function getRustVersion() { } async function getCmdOutput(cmd, args = [], options = {}) { let stdout = ""; - await exec.exec(cmd, args, Object.assign({ silent: true, listeners: { + await exec.exec(cmd, args, { + silent: true, + listeners: { stdout(data) { stdout += data.toString(); }, - } }, options)); + }, + ...options, + }); return stdout; } async function getLockfileHash() { - var e_1, _a; const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false }); const files = await globber.glob(); files.sort((a, b) => a.localeCompare(b)); const hasher = external_crypto_default().createHash("sha1"); for (const file of files) { - try { - for (var _b = (e_1 = void 0, __asyncValues(external_fs_default().createReadStream(file))), _c; _c = await _b.next(), !_c.done;) { - const chunk = _c.value; - hasher.update(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) await _a.call(_b); - } - finally { if (e_1) throw e_1.error; } + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); } } return hasher.digest("hex").slice(0, 20); @@ -54559,28 +54788,17 @@ async function getPackages() { }); } async function cleanTarget(packages) { - var e_2, _a; await external_fs_default().promises.unlink("./target/.rustc_info.json"); await io.rmRF("./target/debug/examples"); await io.rmRF("./target/debug/incremental"); let dir; // remove all *files* from debug dir = await external_fs_default().promises.opendir("./target/debug"); - try { - for (var dir_1 = __asyncValues(dir), dir_1_1; dir_1_1 = await dir_1.next(), !dir_1_1.done;) { - const dirent = dir_1_1.value; - if (dirent.isFile()) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + if (dirent.isFile()) { + await rm(dir.path, dirent); } } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (dir_1_1 && !dir_1_1.done && (_a = dir_1.return)) await _a.call(dir_1); - } - finally { if (e_2) throw e_2.error; } - } const keepPkg = new Set(packages.map((p) => p.name)); await rmExcept("./target/debug/build", keepPkg); await rmExcept("./target/debug/.fingerprint", keepPkg); @@ -54596,30 +54814,19 @@ async function cleanTarget(packages) { } const oneWeek = 7 * 24 * 3600 * 1000; async function rmExcept(dirName, keepPrefix) { - var e_3, _a; const dir = await external_fs_default().promises.opendir(dirName); - try { - for (var dir_2 = __asyncValues(dir), dir_2_1; dir_2_1 = await dir_2.next(), !dir_2_1.done;) { - const dirent = dir_2_1.value; - let name = dirent.name; - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - // we don’t really know - if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + let name = dirent.name; + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); } - } - catch (e_3_1) { e_3 = { error: e_3_1 }; } - finally { - try { - if (dir_2_1 && !dir_2_1.done && (_a = dir_2.return)) await _a.call(dir_2); + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + // we don’t really know + if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) { + await rm(dir.path, dirent); } - finally { if (e_3) throw e_3.error; } } } async function rm(parent, dirent) { @@ -54707,7 +54914,7 @@ module.exports = JSON.parse("[\"ac\",\"com.ac\",\"edu.ac\",\"gov.ac\",\"net.ac\" /***/ ((module) => { "use strict"; -module.exports = require("assert"); +module.exports = require("assert");; /***/ }), @@ -54715,7 +54922,7 @@ module.exports = require("assert"); /***/ ((module) => { "use strict"; -module.exports = require("buffer"); +module.exports = require("buffer");; /***/ }), @@ -54723,7 +54930,7 @@ module.exports = require("buffer"); /***/ ((module) => { "use strict"; -module.exports = require("child_process"); +module.exports = require("child_process");; /***/ }), @@ -54731,7 +54938,7 @@ module.exports = require("child_process"); /***/ ((module) => { "use strict"; -module.exports = require("crypto"); +module.exports = require("crypto");; /***/ }), @@ -54739,7 +54946,7 @@ module.exports = require("crypto"); /***/ ((module) => { "use strict"; -module.exports = require("events"); +module.exports = require("events");; /***/ }), @@ -54747,7 +54954,7 @@ module.exports = require("events"); /***/ ((module) => { "use strict"; -module.exports = require("fs"); +module.exports = require("fs");; /***/ }), @@ -54755,7 +54962,7 @@ module.exports = require("fs"); /***/ ((module) => { "use strict"; -module.exports = require("http"); +module.exports = require("http");; /***/ }), @@ -54763,7 +54970,7 @@ module.exports = require("http"); /***/ ((module) => { "use strict"; -module.exports = require("https"); +module.exports = require("https");; /***/ }), @@ -54771,7 +54978,7 @@ module.exports = require("https"); /***/ ((module) => { "use strict"; -module.exports = require("net"); +module.exports = require("net");; /***/ }), @@ -54779,7 +54986,7 @@ module.exports = require("net"); /***/ ((module) => { "use strict"; -module.exports = require("os"); +module.exports = require("os");; /***/ }), @@ -54787,7 +54994,7 @@ module.exports = require("os"); /***/ ((module) => { "use strict"; -module.exports = require("path"); +module.exports = require("path");; /***/ }), @@ -54795,7 +55002,7 @@ module.exports = require("path"); /***/ ((module) => { "use strict"; -module.exports = require("punycode"); +module.exports = require("punycode");; /***/ }), @@ -54803,7 +55010,7 @@ module.exports = require("punycode"); /***/ ((module) => { "use strict"; -module.exports = require("stream"); +module.exports = require("stream");; /***/ }), @@ -54811,7 +55018,7 @@ module.exports = require("stream"); /***/ ((module) => { "use strict"; -module.exports = require("string_decoder"); +module.exports = require("string_decoder");; /***/ }), @@ -54819,7 +55026,7 @@ module.exports = require("string_decoder"); /***/ ((module) => { "use strict"; -module.exports = require("timers"); +module.exports = require("timers");; /***/ }), @@ -54827,7 +55034,7 @@ module.exports = require("timers"); /***/ ((module) => { "use strict"; -module.exports = require("tls"); +module.exports = require("tls");; /***/ }), @@ -54835,7 +55042,7 @@ module.exports = require("tls"); /***/ ((module) => { "use strict"; -module.exports = require("url"); +module.exports = require("url");; /***/ }), @@ -54843,7 +55050,7 @@ module.exports = require("url"); /***/ ((module) => { "use strict"; -module.exports = require("util"); +module.exports = require("util");; /***/ }), @@ -54851,7 +55058,7 @@ module.exports = require("util"); /***/ ((module) => { "use strict"; -module.exports = require("zlib"); +module.exports = require("zlib");; /***/ }) diff --git a/dist/save/index.js b/dist/save/index.js index 65115c4..057ea96 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -6,152 +6,152 @@ module.exports = /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const path = __importStar(__webpack_require__(5622)); -const utils = __importStar(__webpack_require__(1518)); -const cacheHttpClient = __importStar(__webpack_require__(8245)); -const tar_1 = __webpack_require__(6490); -class ValidationError extends Error { - constructor(message) { - super(message); - this.name = 'ValidationError'; - Object.setPrototypeOf(this, ValidationError.prototype); - } -} -exports.ValidationError = ValidationError; -class ReserveCacheError extends Error { - constructor(message) { - super(message); - this.name = 'ReserveCacheError'; - Object.setPrototypeOf(this, ReserveCacheError.prototype); - } -} -exports.ReserveCacheError = ReserveCacheError; -function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); - } -} -function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); - } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); - } -} -/** - * Restores cache from keys - * - * @param paths a list of file paths to restore from the cache - * @param primaryKey an explicit key for restoring the cache - * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key - * @param downloadOptions cache download options - * @returns string returns the key for the cache hit, otherwise returns undefined - */ -function restoreCache(paths, primaryKey, restoreKeys, options) { - return __awaiter(this, void 0, void 0, function* () { - checkPaths(paths); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core.debug('Resolved Keys:'); - core.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - const compressionMethod = yield utils.getCompressionMethod(); - // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; - } - const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core.debug(`Archive Path: ${archivePath}`); - try { - // Download the cache from the cache entry - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); - core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield tar_1.extractTar(archivePath, compressionMethod); - } - finally { - // Try to delete the archive to save space - try { - yield utils.unlinkFile(archivePath); - } - catch (error) { - core.debug(`Failed to delete archive: ${error}`); - } - } - return cacheEntry.cacheKey; - }); -} -exports.restoreCache = restoreCache; -/** - * Saves a list of files with the specified key - * - * @param paths a list of file paths to be cached - * @param key an explicit key for restoring the cache - * @param options cache upload options - * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails - */ -function saveCache(paths, key, options) { - return __awaiter(this, void 0, void 0, function* () { - checkPaths(paths); - checkKey(key); - const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); - const cachePaths = yield utils.resolvePaths(paths); - core.debug('Cache Paths:'); - core.debug(`${JSON.stringify(cachePaths)}`); - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core.debug(`Archive Path: ${archivePath}`); - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); - const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit - const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); - core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); - } - core.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); - return cacheId; - }); -} -exports.saveCache = saveCache; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const path = __importStar(__webpack_require__(5622)); +const utils = __importStar(__webpack_require__(1518)); +const cacheHttpClient = __importStar(__webpack_require__(8245)); +const tar_1 = __webpack_require__(6490); +class ValidationError extends Error { + constructor(message) { + super(message); + this.name = 'ValidationError'; + Object.setPrototypeOf(this, ValidationError.prototype); + } +} +exports.ValidationError = ValidationError; +class ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = 'ReserveCacheError'; + Object.setPrototypeOf(this, ReserveCacheError.prototype); + } +} +exports.ReserveCacheError = ReserveCacheError; +function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + } +} +function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } +} +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +function restoreCache(paths, primaryKey, restoreKeys, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core.debug('Resolved Keys:'); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + // path are needed to compute version + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // Cache not found + return undefined; + } + const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield tar_1.extractTar(archivePath, compressionMethod); + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheEntry.cacheKey; + }); +} +exports.restoreCache = restoreCache; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +function saveCache(paths, key, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + checkKey(key); + const compressionMethod = yield utils.getCompressionMethod(); + core.debug('Reserving Cache'); + const cacheId = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod + }); + if (cacheId === -1) { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); + } + core.debug(`Cache ID: ${cacheId}`); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); + return cacheId; + }); +} +exports.saveCache = saveCache; //# sourceMappingURL=cache.js.map /***/ }), @@ -160,217 +160,220 @@ exports.saveCache = saveCache; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -const auth_1 = __webpack_require__(3702); -const crypto = __importStar(__webpack_require__(6417)); -const fs = __importStar(__webpack_require__(5747)); -const url_1 = __webpack_require__(8835); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -const downloadUtils_1 = __webpack_require__(5500); -const options_1 = __webpack_require__(6215); -const requestUtils_1 = __webpack_require__(3981); -const versionSalt = '1.0'; -function getCacheApiUrl(resource) { - // Ideally we just use ACTIONS_CACHE_URL - const baseUrl = (process.env['ACTIONS_CACHE_URL'] || - process.env['ACTIONS_RUNTIME_URL'] || - '').replace('pipelines', 'artifactcache'); - if (!baseUrl) { - throw new Error('Cache Service Url not found, unable to restore cache.'); - } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core.debug(`Resource Url: ${url}`); - return url; -} -function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; -} -function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader('application/json', '6.0-preview.1') - } - }; - return requestOptions; -} -function createHttpClient() { - const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); -} -function getCacheVersion(paths, compressionMethod) { - const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip - ? [] - : [compressionMethod]); - // Add salt to cache version to support breaking changes in cache entry - components.push(versionSalt); - return crypto - .createHash('sha256') - .update(components.join('|')) - .digest('hex'); -} -exports.getCacheVersion = getCacheVersion; -function getCacheEntry(keys, paths, options) { - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); - const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; - const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - if (response.statusCode === 204) { - return null; - } - if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); - } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error('Cache not found.'); - } - core.setSecret(cacheDownloadUrl); - core.debug(`Cache Result:`); - core.debug(JSON.stringify(cacheResult)); - return cacheResult; - }); -} -exports.getCacheEntry = getCacheEntry; -function downloadCache(archiveLocation, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = options_1.getDownloadOptions(options); - if (downloadOptions.useAzureSdk && - archiveUrl.hostname.endsWith('.blob.core.windows.net')) { - // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. - yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); - } - else { - // Otherwise, download using the Actions http-client. - yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); - } - }); -} -exports.downloadCache = downloadCache; -// Reserve Cache -function reserveCache(key, paths, options) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); - const reserveCacheRequest = { - key, - version - }; - const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); - })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; - }); -} -exports.reserveCache = reserveCache; -function getContentRange(start, end) { - // Format: `bytes start-end/filesize - // start and end are inclusive - // filesize can be * - // For a 200 byte chunk starting at byte 0: - // Content-Range: bytes 0-199/* - return `bytes ${start}-${end}/*`; -} -function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter(this, void 0, void 0, function* () { - core.debug(`Uploading chunk of size ${end - - start + - 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - 'Content-Type': 'application/octet-stream', - 'Content-Range': getContentRange(start, end) - }; - yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { - return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); - })); - }); -} -function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - // Upload Chunks - const fileSize = fs.statSync(archivePath).size; - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs.openSync(archivePath, 'r'); - const uploadOptions = options_1.getUploadOptions(options); - const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core.debug('Awaiting all uploads'); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs - .createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }) - .on('error', error => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), start, end); - } - }))); - } - finally { - fs.closeSync(fd); - } - return; - }); -} -function commitCache(httpClient, cacheId, filesize) { - return __awaiter(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); - }); -} -function saveCache(cacheId, archivePath, options) { - return __awaiter(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - core.debug('Upload cache'); - yield uploadFile(httpClient, cacheId, archivePath, options); - // Commit Cache - core.debug('Commiting cache'); - const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); - } - core.info('Cache saved successfully'); - }); -} -exports.saveCache = saveCache; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const auth_1 = __webpack_require__(3702); +const crypto = __importStar(__webpack_require__(6417)); +const fs = __importStar(__webpack_require__(5747)); +const url_1 = __webpack_require__(8835); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +const downloadUtils_1 = __webpack_require__(5500); +const options_1 = __webpack_require__(6215); +const requestUtils_1 = __webpack_require__(3981); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + // Ideally we just use ACTIONS_CACHE_URL + const baseUrl = (process.env['ACTIONS_CACHE_URL'] || + process.env['ACTIONS_RUNTIME_URL'] || + '').replace('pipelines', 'artifactcache'); + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod) { + const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip + ? [] + : [compressionMethod]); + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 204) { + return null; + } + if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = options_1.getDownloadOptions(options); + if (downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const reserveCacheRequest = { + key, + version + }; + const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size; + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = options_1.getUploadOptions(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; //# sourceMappingURL=cacheHttpClient.js.map /***/ }), @@ -379,173 +382,173 @@ exports.saveCache = saveCache; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const exec = __importStar(__webpack_require__(1514)); -const glob = __importStar(__webpack_require__(8090)); -const io = __importStar(__webpack_require__(7436)); -const fs = __importStar(__webpack_require__(5747)); -const path = __importStar(__webpack_require__(5622)); -const semver = __importStar(__webpack_require__(5911)); -const util = __importStar(__webpack_require__(1669)); -const uuid_1 = __webpack_require__(2155); -const constants_1 = __webpack_require__(8840); -// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 -function createTempDirectory() { - return __awaiter(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === 'win32'; - let tempDirectory = process.env['RUNNER_TEMP'] || ''; - if (!tempDirectory) { - let baseLocation; - if (IS_WINDOWS) { - // On Windows use the USERPROFILE env variable - baseLocation = process.env['USERPROFILE'] || 'C:\\'; - } - else { - if (process.platform === 'darwin') { - baseLocation = '/Users'; - } - else { - baseLocation = '/home'; - } - } - tempDirectory = path.join(baseLocation, 'actions', 'temp'); - } - const dest = path.join(tempDirectory, uuid_1.v4()); - yield io.mkdirP(dest); - return dest; - }); -} -exports.createTempDirectory = createTempDirectory; -function getArchiveFileSizeIsBytes(filePath) { - return fs.statSync(filePath).size; -} -exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; -function resolvePaths(patterns) { - var e_1, _a; - var _b; - return __awaiter(this, void 0, void 0, function* () { - const paths = []; - const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); - const globber = yield glob.create(patterns.join('\n'), { - implicitDescendants: false - }); - try { - for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { - const file = _d.value; - const relativeFile = path.relative(workspace, file); - core.debug(`Matched: ${relativeFile}`); - // Paths are made relative so the tar entries are all relative to the root of the workspace. - paths.push(`${relativeFile}`); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); - } - finally { if (e_1) throw e_1.error; } - } - return paths; - }); -} -exports.resolvePaths = resolvePaths; -function unlinkFile(filePath) { - return __awaiter(this, void 0, void 0, function* () { - return util.promisify(fs.unlink)(filePath); - }); -} -exports.unlinkFile = unlinkFile; -function getVersion(app) { - return __awaiter(this, void 0, void 0, function* () { - core.debug(`Checking ${app} --version`); - let versionOutput = ''; - try { - yield exec.exec(`${app} --version`, [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => (versionOutput += data.toString()), - stderr: (data) => (versionOutput += data.toString()) - } - }); - } - catch (err) { - core.debug(err.message); - } - versionOutput = versionOutput.trim(); - core.debug(versionOutput); - return versionOutput; - }); -} -// Use zstandard if possible to maximize cache performance -function getCompressionMethod() { - return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } - const versionOutput = yield getVersion('zstd'); - const version = semver.clean(versionOutput); - if (!versionOutput.toLowerCase().includes('zstd command line interface')) { - // zstd is not installed - return constants_1.CompressionMethod.Gzip; - } - else if (!version || semver.lt(version, 'v1.3.2')) { - // zstd is installed but using a version earlier than v1.3.2 - // v1.3.2 is required to use the `--long` options in zstd - return constants_1.CompressionMethod.ZstdWithoutLong; - } - else { - return constants_1.CompressionMethod.Zstd; - } - }); -} -exports.getCompressionMethod = getCompressionMethod; -function getCacheFileName(compressionMethod) { - return compressionMethod === constants_1.CompressionMethod.Gzip - ? constants_1.CacheFilename.Gzip - : constants_1.CacheFilename.Zstd; -} -exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { - return __awaiter(this, void 0, void 0, function* () { - const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); - }); -} -exports.isGnuTarInstalled = isGnuTarInstalled; -function assertDefined(name, value) { - if (value === undefined) { - throw Error(`Expected ${name} but value was undefiend`); - } - return value; -} -exports.assertDefined = assertDefined; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const exec = __importStar(__webpack_require__(1514)); +const glob = __importStar(__webpack_require__(8090)); +const io = __importStar(__webpack_require__(7436)); +const fs = __importStar(__webpack_require__(5747)); +const path = __importStar(__webpack_require__(5622)); +const semver = __importStar(__webpack_require__(5911)); +const util = __importStar(__webpack_require__(1669)); +const uuid_1 = __webpack_require__(2155); +const constants_1 = __webpack_require__(8840); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, uuid_1.v4()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeIsBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; +function resolvePaths(patterns) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path.relative(workspace, file); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Checking ${app} --version`); + let versionOutput = ''; + try { + yield exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } + const versionOutput = yield getVersion('zstd'); + const version = semver.clean(versionOutput); + if (!versionOutput.toLowerCase().includes('zstd command line interface')) { + // zstd is not installed + return constants_1.CompressionMethod.Gzip; + } + else if (!version || semver.lt(version, 'v1.3.2')) { + // zstd is installed but using a version earlier than v1.3.2 + // v1.3.2 is required to use the `--long` options in zstd + return constants_1.CompressionMethod.ZstdWithoutLong; + } + else { + return constants_1.CompressionMethod.Zstd; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function isGnuTarInstalled() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar'); + }); +} +exports.isGnuTarInstalled = isGnuTarInstalled; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -554,25 +557,29 @@ exports.assertDefined = assertDefined; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -var CacheFilename; -(function (CacheFilename) { - CacheFilename["Gzip"] = "cache.tgz"; - CacheFilename["Zstd"] = "cache.tzst"; -})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); -var CompressionMethod; -(function (CompressionMethod) { - CompressionMethod["Gzip"] = "gzip"; - // Long range mode was added to zstd in v1.3.2. - // This enum is for earlier version of zstd that does not have --long support - CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; - CompressionMethod["Zstd"] = "zstd"; -})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -// Socket timeout in milliseconds during download. If no traffic is received -// over the socket during this period, the socket is destroyed and the download -// is aborted. -exports.SocketTimeout = 5000; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var CacheFilename; +(function (CacheFilename) { + CacheFilename["Gzip"] = "cache.tgz"; + CacheFilename["Zstd"] = "cache.tzst"; +})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); +var CompressionMethod; +(function (CompressionMethod) { + CompressionMethod["Gzip"] = "gzip"; + // Long range mode was added to zstd in v1.3.2. + // This enum is for earlier version of zstd that does not have --long support + CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod["Zstd"] = "zstd"; +})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +// The default number of retry attempts. +exports.DefaultRetryAttempts = 2; +// The default delay in milliseconds between retry attempts. +exports.DefaultRetryDelay = 5000; +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; //# sourceMappingURL=constants.js.map /***/ }), @@ -581,235 +588,235 @@ exports.SocketTimeout = 5000; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -const storage_blob_1 = __webpack_require__(4100); -const buffer = __importStar(__webpack_require__(4293)); -const fs = __importStar(__webpack_require__(5747)); -const stream = __importStar(__webpack_require__(2413)); -const util = __importStar(__webpack_require__(1669)); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -const requestUtils_1 = __webpack_require__(3981); -/** - * Pipes the body of a HTTP response to a stream - * - * @param response the HTTP response - * @param output the writable stream - */ -function pipeResponseToStream(response, output) { - return __awaiter(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); - }); -} -/** - * Class for tracking the download state and displaying stats. - */ -class DownloadProgress { - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); - } - /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment - */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); - } - /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received - */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; - } - /** - * Returns the total number of bytes transferred. - */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; - } - /** - * Returns true if the download is complete. - */ - isDone() { - return this.getTransferredBytes() === this.contentLength; - } - /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. - */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / - (1024 * 1024) / - (elapsedTime / 1000)).toFixed(1); - core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } - } - /** - * Returns a function used to handle TransferProgressEvents. - */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; - } - /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write - */ - startDisplayTimer(delayInMs = 1000) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. - */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = undefined; - } - this.display(); - } -} -exports.DownloadProgress = DownloadProgress; -/** - * Download the cache using the Actions toolkit http-client - * - * @param archiveLocation the URL for the cache - * @param archivePath the local path where the cache is saved - */ -function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient('actions/cache'); - const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); - // Abort download if no traffic received over the socket. - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - // Validate download size. - const contentLengthHeader = downloadResponse.message.headers['content-length']; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } - else { - core.debug('Unable to validate download, no Content-Length header'); - } - }); -} -exports.downloadCacheHttpClient = downloadCacheHttpClient; -/** - * Download the cache using the Azure Storage SDK. Only call this method if the - * URL points to an Azure Storage endpoint. - * - * @param archiveLocation the URL for the cache - * @param archivePath the local path where the cache is saved - * @param options the download options with the defaults set - */ -function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - // We should never hit this condition, but just in case fall back to downloading the - // file as one large stream - core.debug('Unable to determine content length, downloading file with http-client...'); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } - else { - // Use downloadToBuffer for faster downloads, since internally it splits the - // file into 4 MB chunks which can then be parallelized and retried independently - // - // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB - // on 64-bit systems), split the download into multiple segments - const maxSegmentSize = buffer.constants.MAX_LENGTH; - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs.openSync(archivePath, 'w'); - try { - downloadProgress.startDisplayTimer(); - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield client.downloadToBuffer(segmentStart, segmentSize, { - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - }); - fs.writeFileSync(fd, result); - } - } - finally { - downloadProgress.stopDisplayTimer(); - fs.closeSync(fd); - } - } - }); -} -exports.downloadCacheStorageSDK = downloadCacheStorageSDK; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const storage_blob_1 = __webpack_require__(4100); +const buffer = __importStar(__webpack_require__(4293)); +const fs = __importStar(__webpack_require__(5747)); +const stream = __importStar(__webpack_require__(2413)); +const util = __importStar(__webpack_require__(1669)); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +const requestUtils_1 = __webpack_require__(3981); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + const maxSegmentSize = buffer.constants.MAX_LENGTH; + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + }); + fs.writeFileSync(fd, result); + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; //# sourceMappingURL=downloadUtils.js.map /***/ }), @@ -818,96 +825,124 @@ exports.downloadCacheStorageSDK = downloadCacheStorageSDK; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -const http_client_1 = __webpack_require__(9925); -function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; -} -exports.isSuccessStatusCode = isSuccessStatusCode; -function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; - } - return statusCode >= 500; -} -exports.isServerErrorStatusCode = isServerErrorStatusCode; -function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; - } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); -} -exports.isRetryableStatusCode = isRetryableStatusCode; -function retry(name, method, getStatusCode, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - let response = undefined; - let statusCode = undefined; - let isRetryable = false; - let errorMessage = ''; - let attempt = 1; - while (attempt <= maxAttempts) { - try { - response = yield method(); - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - catch (error) { - isRetryable = true; - errorMessage = error.message; - } - core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core.debug(`${name} - Error is not retryable`); - break; - } - attempt++; - } - throw Error(`${name} failed: ${errorMessage}`); - }); -} -exports.retry = retry; -function retryTypedResponse(name, method, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.statusCode, maxAttempts); - }); -} -exports.retryTypedResponse = retryTypedResponse; -function retryHttpClientResponse(name, method, maxAttempts = 2) { - return __awaiter(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); - }); -} -exports.retryHttpClientResponse = retryHttpClientResponse; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const http_client_1 = __webpack_require__(9925); +const constants_1 = __webpack_require__(8840); +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} +exports.isServerErrorStatusCode = isServerErrorStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { + return __awaiter(this, void 0, void 0, function* () { + let errorMessage = ''; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + try { + response = yield method(); + } + catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, + // If the error object contains the statusCode property, extract it and return + // an ITypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {} + }; + } + else { + return undefined; + } + }); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; //# sourceMappingURL=requestUtils.js.map /***/ }), @@ -916,130 +951,130 @@ exports.retryHttpClientResponse = retryHttpClientResponse; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const exec_1 = __webpack_require__(1514); -const io = __importStar(__webpack_require__(7436)); -const fs_1 = __webpack_require__(5747); -const path = __importStar(__webpack_require__(5622)); -const utils = __importStar(__webpack_require__(1518)); -const constants_1 = __webpack_require__(8840); -function getTarPath(args, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === 'win32'; - if (IS_WINDOWS) { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); - } - else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); - } - } - return yield io.which('tar', true); - }); -} -function execTar(args, compressionMethod, cwd) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); - } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - }); -} -function getWorkingDirectory() { - var _a; - return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); -} -function extractTar(archivePath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // Create directory to extract tar into - const workingDirectory = getWorkingDirectory(); - yield io.mkdirP(workingDirectory); - // --d: Decompress. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -d --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -d']; - default: - return ['-z']; - } - } - const args = [ - ...getCompressionProgram(), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); - }); -} -exports.extractTar = extractTar; -function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return ['--use-compress-program', 'zstd -T0 --long=30']; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', 'zstd -T0']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); - }); -} -exports.createTar = createTar; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const exec_1 = __webpack_require__(1514); +const io = __importStar(__webpack_require__(7436)); +const fs_1 = __webpack_require__(5747); +const path = __importStar(__webpack_require__(5622)); +const utils = __importStar(__webpack_require__(1518)); +const constants_1 = __webpack_require__(8840); +function getTarPath(args, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + if (IS_WINDOWS) { + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); + } + else if (fs_1.existsSync(systemTar)) { + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); + } + } + return yield io.which('tar', true); + }); +} +function execTar(args, compressionMethod, cwd) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); + }); +} +exports.extractTar = extractTar; +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -T0 --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -T0']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); + }); +} +exports.createTar = createTar; //# sourceMappingURL=tar.js.map /***/ }), @@ -1048,67 +1083,67 @@ exports.createTar = createTar; /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const core = __importStar(__webpack_require__(2186)); -/** - * Returns a copy of the upload options with defaults filled in. - * - * @param copy the original upload options - */ -function getUploadOptions(copy) { - const result = { - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.uploadConcurrency === 'number') { - result.uploadConcurrency = copy.uploadConcurrency; - } - if (typeof copy.uploadChunkSize === 'number') { - result.uploadChunkSize = copy.uploadChunkSize; - } - } - core.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; -} -exports.getUploadOptions = getUploadOptions; -/** - * Returns a copy of the download options with defaults filled in. - * - * @param copy the original download options - */ -function getDownloadOptions(copy) { - const result = { - useAzureSdk: true, - downloadConcurrency: 8, - timeoutInMs: 30000 - }; - if (copy) { - if (typeof copy.useAzureSdk === 'boolean') { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.downloadConcurrency === 'number') { - result.downloadConcurrency = copy.downloadConcurrency; - } - if (typeof copy.timeoutInMs === 'number') { - result.timeoutInMs = copy.timeoutInMs; - } - } - core.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core.debug(`Download concurrency: ${result.downloadConcurrency}`); - core.debug(`Request timeout (ms): ${result.timeoutInMs}`); - return result; -} -exports.getDownloadOptions = getDownloadOptions; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; //# sourceMappingURL=options.js.map /***/ }), @@ -3166,7 +3201,6 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); const http = __webpack_require__(8605); const https = __webpack_require__(7211); const pm = __webpack_require__(6443); @@ -3215,7 +3249,7 @@ var MediaTypes; * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; @@ -3234,6 +3268,15 @@ const HttpResponseRetryCodes = [ const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; @@ -3252,7 +3295,7 @@ class HttpClientResponse { } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); + let parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; @@ -3357,7 +3400,7 @@ class HttpClient { if (this._disposed) { throw new Error('Client has already been disposed.'); } - let parsedUrl = url.parse(requestUrl); + let parsedUrl = new URL(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 @@ -3396,7 +3439,7 @@ class HttpClient { // if there's no location to redirect to, we won't break; } - let parsedRedirectUrl = url.parse(redirectUrl); + let parsedRedirectUrl = new URL(redirectUrl); if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { @@ -3512,7 +3555,7 @@ class HttpClient { * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); + let parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } _prepareRequest(method, requestUrl, headers) { @@ -3585,7 +3628,7 @@ class HttpClient { maxSockets: maxSockets, keepAlive: this._keepAlive, proxy: { - proxyAuth: proxyUrl.auth, + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, host: proxyUrl.hostname, port: proxyUrl.port } @@ -3680,12 +3723,8 @@ class HttpClient { else { msg = 'Failed request: (' + statusCode + ')'; } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; - } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; reject(err); } else { @@ -3700,12 +3739,11 @@ exports.HttpClient = HttpClient; /***/ }), /***/ 6443: -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -const url = __webpack_require__(8835); function getProxyUrl(reqUrl) { let usingSsl = reqUrl.protocol === 'https:'; let proxyUrl; @@ -3720,7 +3758,7 @@ function getProxyUrl(reqUrl) { proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } if (proxyVar) { - proxyUrl = url.parse(proxyVar); + proxyUrl = new URL(proxyVar); } return proxyUrl; } @@ -4635,8 +4673,8 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var uuid = __webpack_require__(3206); var tslib = __webpack_require__(4331); +var uuid = __webpack_require__(3206); var tough = __webpack_require__(8165); var http = __webpack_require__(8605); var https = __webpack_require__(7211); @@ -4827,7 +4865,7 @@ var Constants = { * @const * @type {string} */ - coreHttpVersion: "1.1.8", + coreHttpVersion: "1.1.9", /** * Specifies HTTP. * @@ -4856,6 +4894,20 @@ var Constants = { * @type {string} */ HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + * + * @const + * @type {string} + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + * + * @const + * @type {string} + */ + ALL_PROXY: "ALL_PROXY", HttpConstants: { /** * Http Verbs @@ -5046,12 +5098,15 @@ function promiseToServiceCallback(promise) { }); }; } -function prepareXMLRootList(obj, elementName) { - var _a; +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + var _a, _b, _c; if (!Array.isArray(obj)) { obj = [obj]; } - return _a = {}, _a[elementName] = obj, _a; + if (!xmlNamespaceKey || !xmlNamespace) { + return _a = {}, _a[elementName] = obj, _a; + } + return _b = {}, _b[elementName] = obj, _b.$ = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c), _b; } /** * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor @@ -5093,6 +5148,15 @@ function replaceAll(value, searchValue, replaceValue) { function isPrimitiveType(value) { return (typeof value !== "object" && typeof value !== "function") || value === null; } +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} // Copyright (c) Microsoft Corporation. var Serializer = /** @class */ (function () { @@ -5214,13 +5278,13 @@ var Serializer = /** @class */ (function () { payload = serializeBase64UrlType(objectName, object); } else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName); + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML)); } else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName); + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML)); } else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName); + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML)); } } return payload; @@ -5494,7 +5558,8 @@ function serializeDateTypes(typeName, value, objectName) { } return value; } -function serializeSequenceType(serializer, mapper, object, objectName) { +function serializeSequenceType(serializer, mapper, object, objectName, isXml) { + var _a, _b; if (!Array.isArray(object)) { throw new Error(objectName + " must be of type Array."); } @@ -5505,11 +5570,26 @@ function serializeSequenceType(serializer, mapper, object, objectName) { } var tempArray = []; for (var i = 0; i < object.length; i++) { - tempArray[i] = serializer.serialize(elementType, object[i], objectName); + var serializedValue = serializer.serialize(elementType, object[i], objectName); + if (isXml && elementType.xmlNamespace) { + var xmlnsKey = elementType.xmlNamespacePrefix + ? "xmlns:" + elementType.xmlNamespacePrefix + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = tslib.__assign(tslib.__assign({}, serializedValue), { $: (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a) }); + } + else { + tempArray[i] = { _: serializedValue, $: (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b) }; + } + } + else { + tempArray[i] = serializedValue; + } } return tempArray; } -function serializeDictionaryType(serializer, mapper, object, objectName) { +function serializeDictionaryType(serializer, mapper, object, objectName, isXml) { + var _a; if (typeof object !== "object") { throw new Error(objectName + " must be of type object."); } @@ -5519,12 +5599,46 @@ function serializeDictionaryType(serializer, mapper, object, objectName) { ("mapper and it must of type \"object\" in " + objectName + ".")); } var tempDictionary = {}; - for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { - var key = _a[_i]; - tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) { + var key = _b[_i]; + var serializedValue = serializer.serialize(valueType, object[key], objectName); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + var xmlnsKey = mapper.xmlNamespacePrefix ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; + return tslib.__assign(tslib.__assign({}, tempDictionary), { $: (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a) }); } return tempDictionary; } +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + * @param objectName name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + var additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + * @param objectName name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + return serializer.modelMappers[className]; +} /** * Resolves a composite mapper's modelProperties. * @param serializer the serializer containing the entire set of mappers @@ -5533,32 +5647,28 @@ function serializeDictionaryType(serializer, mapper, object, objectName) { function resolveModelProperties(serializer, mapper, objectName) { var modelProps = mapper.type.modelProperties; if (!modelProps) { - var className = mapper.type.className; - if (!className) { - throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); - } - var modelMapper = serializer.modelMappers[className]; + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) { - throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + throw new Error("mapper() cannot be null or undefined for model \"" + mapper.type.className + "\"."); } - modelProps = modelMapper.type.modelProperties; + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) { throw new Error("modelProperties cannot be null or undefined in the " + - ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + mapper.type.className + "\" for object \"" + objectName + "\".")); } } return modelProps; } -function serializeCompositeType(serializer, mapper, object, objectName) { - var _a; +function serializeCompositeType(serializer, mapper, object, objectName, isXml) { + var _a, _b; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); } if (object != undefined) { var payload = {}; var modelProps = resolveModelProperties(serializer, mapper, objectName); - for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { - var key = _b[_i]; + for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) { + var key = _c[_i]; var propertyMapper = modelProps[key]; if (propertyMapper.readOnly) { continue; @@ -5576,8 +5686,8 @@ function serializeCompositeType(serializer, mapper, object, objectName) { else { var paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); - for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { - var pathName = paths_1[_c]; + for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) { + var pathName = paths_1[_d]; var childObject = parentObject[pathName]; if (childObject == undefined && (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { @@ -5587,6 +5697,12 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } } if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + var xmlnsKey = mapper.xmlNamespacePrefix + ? "xmlns:" + mapper.xmlNamespacePrefix + : "xmlns"; + parentObject.$ = tslib.__assign(tslib.__assign({}, parentObject.$), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a)); + } var propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; @@ -5599,23 +5715,24 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); if (serializedValue !== undefined && propName != undefined) { - if (propertyMapper.xmlIsAttribute) { + var value = getXmlObjectValue(propertyMapper, serializedValue, isXml); + if (isXml && propertyMapper.xmlIsAttribute) { // $ is the key attributes are kept under in xml2js. // This keeps things simple while preventing name collision // with names in user documents. parentObject.$ = parentObject.$ || {}; parentObject.$[propName] = serializedValue; } - else if (propertyMapper.xmlIsWrapped) { - parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b); } else { - parentObject[propName] = serializedValue; + parentObject[propName] = value; } } } } - var additionalPropertiesMapper = mapper.type.additionalProperties; + var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { var propNames = Object.keys(modelProps); var _loop_1 = function (clientPropName) { @@ -5632,18 +5749,33 @@ function serializeCompositeType(serializer, mapper, object, objectName) { } return object; } +function getXmlObjectValue(propertyMapper, serializedValue, isXml) { + var _a; + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + var xmlnsKey = propertyMapper.xmlNamespacePrefix + ? "xmlns:" + propertyMapper.xmlNamespacePrefix + : "xmlns"; + var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a); + if (["Composite"].includes(propertyMapper.type.name)) { + return tslib.__assign({ $: xmlNamespace }, serializedValue); + } + return { _: serializedValue, $: xmlNamespace }; +} function isSpecialXmlProperty(propertyName) { return ["$", "_"].includes(propertyName); } function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + var _a; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); } var modelProps = resolveModelProperties(serializer, mapper, objectName); var instance = {}; var handledPropertyNames = []; - for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { - var key = _a[_i]; + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; var propertyMapper = modelProps[key]; var paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); @@ -5655,8 +5787,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { var dictionary = {}; - for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { - var headerKey = _c[_b]; + for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) { + var headerKey = _d[_c]; if (headerKey.startsWith(headerCollectionPrefix)) { dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); } @@ -5670,16 +5802,29 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) } else { var propertyName = xmlElementName || xmlName || serializedName; - var unwrappedProperty = responseBody[propertyName]; if (propertyMapper.xmlIsWrapped) { - unwrappedProperty = responseBody[xmlName]; - unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; - var isEmptyWrappedList = unwrappedProperty === undefined; - if (isEmptyWrappedList) { - unwrappedProperty = []; - } + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + var wrapped = responseBody[xmlName]; + var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName); + } + else { + var property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName); } - instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); } } else { @@ -5687,8 +5832,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) var propertyInstance = void 0; var res = responseBody; // traversing the object step by step. - for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { - var item = paths_2[_d]; + for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) { + var item = paths_2[_e]; if (!res) break; res = res[item]; @@ -5739,8 +5884,8 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName) } } else if (responseBody) { - for (var _e = 0, _f = Object.keys(responseBody); _e < _f.length; _e++) { - var key = _f[_e]; + for (var _f = 0, _g = Object.keys(responseBody); _f < _g.length; _f++) { + var key = _g[_f]; if (instance[key] === undefined && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key)) { @@ -8524,23 +8669,57 @@ function retry$1(policy, request, operationResponse, err, retryData) { })(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); // Copyright (c) Microsoft Corporation. +var noProxyList = []; +var isNoProxyInitalized = false; +var byPassedList = new Map(); function loadEnvironmentProxyValue() { if (!process) { return undefined; } - if (process.env[Constants.HTTPS_PROXY]) { - return process.env[Constants.HTTPS_PROXY]; + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the given `uri` matches the noProxyList. If it matches, any request sent to that same `uri` won't set the proxy settings. +function isBypassed(uri) { + if (byPassedList.has(uri)) { + return byPassedList.get(uri); } - else if (process.env[Constants.HTTPS_PROXY.toLowerCase()]) { - return process.env[Constants.HTTPS_PROXY.toLowerCase()]; + loadNoProxy(); + var isBypassed = false; + var host = URLBuilder.parse(uri).getHost(); + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var proxyString = noProxyList_1[_i]; + if (proxyString[0] === ".") { + if (uri.endsWith(proxyString)) { + isBypassed = true; + } + else { + if (host === proxyString.slice(1) && host.length === proxyString.length - 1) { + isBypassed = true; + } + } + } + else { + if (host === proxyString) { + isBypassed = true; + } + } } - else if (process.env[Constants.HTTP_PROXY]) { - return process.env[Constants.HTTP_PROXY]; + byPassedList.set(uri, isBypassed); + return isBypassed; +} +function loadNoProxy() { + if (isNoProxyInitalized) { + return; } - else if (process.env[Constants.HTTP_PROXY.toLowerCase()]) { - return process.env[Constants.HTTP_PROXY.toLowerCase()]; + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + var list = noProxy.split(","); + noProxyList = list.map(function (item) { return item.trim(); }).filter(function (item) { return item.length; }); } - return undefined; + isNoProxyInitalized = true; } function getDefaultProxySettings(proxyUrl) { if (!proxyUrl) { @@ -8596,7 +8775,7 @@ var ProxyPolicy = /** @class */ (function (_super) { return _this; } ProxyPolicy.prototype.sendRequest = function (request) { - if (!request.proxySettings) { + if (!request.proxySettings && !isBypassed(request.url)) { request.proxySettings = this.proxySettings; } return this._nextPolicy.sendRequest(request); @@ -8871,6 +9050,51 @@ var DisableResponseDecompressionPolicy = /** @class */ (function (_super) { return DisableResponseDecompressionPolicy; }(BaseRequestPolicy)); +// Copyright (c) Microsoft Corporation. +function ndJsonPolicy() { + return { + create: function (nextPolicy, options) { + return new NdJsonPolicy(nextPolicy, options); + } + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +var NdJsonPolicy = /** @class */ (function (_super) { + tslib.__extends(NdJsonPolicy, _super); + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy + * @param options + */ + function NdJsonPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + /** + * Sends a request. + * + * @param request + */ + NdJsonPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var body; + return tslib.__generator(this, function (_a) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map(function (item) { return JSON.stringify(item) + "\n"; }).join(""); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return NdJsonPolicy; +}(BaseRequestPolicy)); + // Copyright (c) Microsoft Corporation. /** * @class @@ -9168,7 +9392,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (operationSpec.requestBody && operationSpec.requestBody.mapper) { httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); var bodyMapper = operationSpec.requestBody.mapper; - var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix; var typeName = bodyMapper.type.name; try { if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { @@ -9176,11 +9400,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); var isStream = typeName === MapperType.Stream; if (operationSpec.isXML) { + var xmlnsKey = xmlNamespacePrefix ? "xmlns:" + xmlNamespacePrefix : "xmlns"; + var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body); if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName }); } else if (!isStream) { - httpRequest.body = stringifyXML(httpRequest.body, { + httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName }); } @@ -9212,6 +9438,18 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op } } } +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue) { + var _a; + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + return { _: serializedValue, $: (_a = {}, _a[xmlnsKey] = xmlNamespace, _a) }; + } + return serializedValue; +} function getValueOrFunctionResult(value, defaultValueCreator) { var result; if (typeof value === "string") { @@ -9254,6 +9492,9 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { } function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { var requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } var userAgentValue = undefined; if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { var userAgentInfo = []; @@ -12540,15 +12781,15 @@ __webpack_require__.r(__webpack_exports__); // EXPORTS __webpack_require__.d(__webpack_exports__, { + "NIL": () => /* reexport */ nil, + "parse": () => /* reexport */ esm_node_parse, + "stringify": () => /* reexport */ esm_node_stringify, "v1": () => /* reexport */ esm_node_v1, "v3": () => /* reexport */ esm_node_v3, "v4": () => /* reexport */ esm_node_v4, "v5": () => /* reexport */ esm_node_v5, - "NIL": () => /* reexport */ nil, - "version": () => /* reexport */ esm_node_version, "validate": () => /* reexport */ esm_node_validate, - "stringify": () => /* reexport */ esm_node_stringify, - "parse": () => /* reexport */ esm_node_parse + "version": () => /* reexport */ esm_node_version }); // EXTERNAL MODULE: external "crypto" @@ -12557,9 +12798,16 @@ var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto // CONCATENATED MODULE: ./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js -const rnds8 = new Uint8Array(16); +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; function rng() { - return external_crypto_default().randomFillSync(rnds8); + if (poolPtr > rnds8Pool.length - 16) { + external_crypto_default().randomFillSync(rnds8Pool); + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); } // CONCATENATED MODULE: ./node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js /* harmony default export */ const regex = (/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i); @@ -54418,45 +54666,33 @@ module.exports = v4; // ESM COMPAT FLAG __webpack_require__.r(__webpack_exports__); -// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js -var core = __webpack_require__(2186); - // EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js var cache = __webpack_require__(7799); - +// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js +var core = __webpack_require__(2186); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __webpack_require__(1514); - // EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js var glob = __webpack_require__(8090); - // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js var io = __webpack_require__(7436); - -// EXTERNAL MODULE: external "crypto" -var external_crypto_ = __webpack_require__(6417); -var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto_); - // EXTERNAL MODULE: external "fs" var external_fs_ = __webpack_require__(5747); var external_fs_default = /*#__PURE__*/__webpack_require__.n(external_fs_); -// EXTERNAL MODULE: external "os" -var external_os_ = __webpack_require__(2087); -var external_os_default = /*#__PURE__*/__webpack_require__.n(external_os_); - // EXTERNAL MODULE: external "path" var external_path_ = __webpack_require__(5622); var external_path_default = /*#__PURE__*/__webpack_require__.n(external_path_); +// EXTERNAL MODULE: external "crypto" +var external_crypto_ = __webpack_require__(6417); +var external_crypto_default = /*#__PURE__*/__webpack_require__.n(external_crypto_); + +// EXTERNAL MODULE: external "os" +var external_os_ = __webpack_require__(2087); +var external_os_default = /*#__PURE__*/__webpack_require__.n(external_os_); + // CONCATENATED MODULE: ./src/common.ts -var __asyncValues = (undefined && undefined.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; @@ -54518,32 +54754,25 @@ async function getRustVersion() { } async function getCmdOutput(cmd, args = [], options = {}) { let stdout = ""; - await exec.exec(cmd, args, Object.assign({ silent: true, listeners: { + await exec.exec(cmd, args, { + silent: true, + listeners: { stdout(data) { stdout += data.toString(); }, - } }, options)); + }, + ...options, + }); return stdout; } async function getLockfileHash() { - var e_1, _a; const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock", { followSymbolicLinks: false }); const files = await globber.glob(); files.sort((a, b) => a.localeCompare(b)); const hasher = external_crypto_default().createHash("sha1"); for (const file of files) { - try { - for (var _b = (e_1 = void 0, __asyncValues(external_fs_default().createReadStream(file))), _c; _c = await _b.next(), !_c.done;) { - const chunk = _c.value; - hasher.update(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) await _a.call(_b); - } - finally { if (e_1) throw e_1.error; } + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); } } return hasher.digest("hex").slice(0, 20); @@ -54559,28 +54788,17 @@ async function getPackages() { }); } async function cleanTarget(packages) { - var e_2, _a; await external_fs_default().promises.unlink("./target/.rustc_info.json"); await io.rmRF("./target/debug/examples"); await io.rmRF("./target/debug/incremental"); let dir; // remove all *files* from debug dir = await external_fs_default().promises.opendir("./target/debug"); - try { - for (var dir_1 = __asyncValues(dir), dir_1_1; dir_1_1 = await dir_1.next(), !dir_1_1.done;) { - const dirent = dir_1_1.value; - if (dirent.isFile()) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + if (dirent.isFile()) { + await rm(dir.path, dirent); } } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (dir_1_1 && !dir_1_1.done && (_a = dir_1.return)) await _a.call(dir_1); - } - finally { if (e_2) throw e_2.error; } - } const keepPkg = new Set(packages.map((p) => p.name)); await rmExcept("./target/debug/build", keepPkg); await rmExcept("./target/debug/.fingerprint", keepPkg); @@ -54596,30 +54814,19 @@ async function cleanTarget(packages) { } const oneWeek = 7 * 24 * 3600 * 1000; async function rmExcept(dirName, keepPrefix) { - var e_3, _a; const dir = await external_fs_default().promises.opendir(dirName); - try { - for (var dir_2 = __asyncValues(dir), dir_2_1; dir_2_1 = await dir_2.next(), !dir_2_1.done;) { - const dirent = dir_2_1.value; - let name = dirent.name; - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - // we don’t really know - if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + let name = dirent.name; + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); } - } - catch (e_3_1) { e_3 = { error: e_3_1 }; } - finally { - try { - if (dir_2_1 && !dir_2_1.done && (_a = dir_2.return)) await _a.call(dir_2); + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + // we don’t really know + if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) { + await rm(dir.path, dirent); } - finally { if (e_3) throw e_3.error; } } } async function rm(parent, dirent) { @@ -54634,13 +54841,6 @@ async function rm(parent, dirent) { } // CONCATENATED MODULE: ./src/save.ts -var save_asyncValues = (undefined && undefined.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; @@ -54667,15 +54867,15 @@ async function run() { try { await cleanRegistry(registryName, packages); } - catch (_a) { } + catch { } try { await cleanGit(packages); } - catch (_b) { } + catch { } try { await cleanTarget(packages); } - catch (_c) { } + catch { } core.info(`Saving paths:\n ${savePaths.join("\n ")}`); core.info(`Using key "${key}".`); try { @@ -54704,28 +54904,16 @@ async function getRegistryName() { return external_path_default().basename(external_path_default().dirname(first)); } async function cleanRegistry(registryName, packages) { - var e_1, _a; await io.rmRF(external_path_default().join(paths.index, registryName, ".cache")); const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); const dir = await external_fs_default().promises.opendir(external_path_default().join(paths.cache, registryName)); - try { - for (var dir_1 = save_asyncValues(dir), dir_1_1; dir_1_1 = await dir_1.next(), !dir_1_1.done;) { - const dirent = dir_1_1.value; - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); } } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (dir_1_1 && !dir_1_1.done && (_a = dir_1.return)) await _a.call(dir_1); - } - finally { if (e_1) throw e_1.error; } - } } async function cleanGit(packages) { - var e_2, _a, e_3, _b, e_4, _c; const coPath = external_path_default().join(paths.git, "checkouts"); const dbPath = external_path_default().join(paths.git, "db"); const repos = new Map(); @@ -54747,59 +54935,29 @@ async function cleanGit(packages) { let dir; // clean the db dir = await external_fs_default().promises.opendir(dbPath); - try { - for (var dir_2 = save_asyncValues(dir), dir_2_1; dir_2_1 = await dir_2.next(), !dir_2_1.done;) { - const dirent = dir_2_1.value; - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); } } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (dir_2_1 && !dir_2_1.done && (_a = dir_2.return)) await _a.call(dir_2); - } - finally { if (e_2) throw e_2.error; } - } // clean the checkouts dir = await external_fs_default().promises.opendir(coPath); - try { - for (var dir_3 = save_asyncValues(dir), dir_3_1; dir_3_1 = await dir_3.next(), !dir_3_1.done;) { - const dirent = dir_3_1.value; - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); - try { - for (var refsDir_1 = (e_4 = void 0, save_asyncValues(refsDir)), refsDir_1_1; refsDir_1_1 = await refsDir_1.next(), !refsDir_1_1.done;) { - const dirent = refsDir_1_1.value; - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - catch (e_4_1) { e_4 = { error: e_4_1 }; } - finally { - try { - if (refsDir_1_1 && !refsDir_1_1.done && (_c = refsDir_1.return)) await _c.call(refsDir_1); - } - finally { if (e_4) throw e_4.error; } + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); } } } - catch (e_3_1) { e_3 = { error: e_3_1 }; } - finally { - try { - if (dir_3_1 && !dir_3_1.done && (_b = dir_3.return)) await _b.call(dir_3); - } - finally { if (e_3) throw e_3.error; } - } } async function macOsWorkaround() { try { @@ -54807,7 +54965,7 @@ async function macOsWorkaround() { // Also see https://github.com/rust-lang/cargo/issues/8603 await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); } - catch (_a) { } + catch { } } @@ -54841,7 +54999,7 @@ module.exports = JSON.parse("[\"ac\",\"com.ac\",\"edu.ac\",\"gov.ac\",\"net.ac\" /***/ ((module) => { "use strict"; -module.exports = require("assert"); +module.exports = require("assert");; /***/ }), @@ -54849,7 +55007,7 @@ module.exports = require("assert"); /***/ ((module) => { "use strict"; -module.exports = require("buffer"); +module.exports = require("buffer");; /***/ }), @@ -54857,7 +55015,7 @@ module.exports = require("buffer"); /***/ ((module) => { "use strict"; -module.exports = require("child_process"); +module.exports = require("child_process");; /***/ }), @@ -54865,7 +55023,7 @@ module.exports = require("child_process"); /***/ ((module) => { "use strict"; -module.exports = require("crypto"); +module.exports = require("crypto");; /***/ }), @@ -54873,7 +55031,7 @@ module.exports = require("crypto"); /***/ ((module) => { "use strict"; -module.exports = require("events"); +module.exports = require("events");; /***/ }), @@ -54881,7 +55039,7 @@ module.exports = require("events"); /***/ ((module) => { "use strict"; -module.exports = require("fs"); +module.exports = require("fs");; /***/ }), @@ -54889,7 +55047,7 @@ module.exports = require("fs"); /***/ ((module) => { "use strict"; -module.exports = require("http"); +module.exports = require("http");; /***/ }), @@ -54897,7 +55055,7 @@ module.exports = require("http"); /***/ ((module) => { "use strict"; -module.exports = require("https"); +module.exports = require("https");; /***/ }), @@ -54905,7 +55063,7 @@ module.exports = require("https"); /***/ ((module) => { "use strict"; -module.exports = require("net"); +module.exports = require("net");; /***/ }), @@ -54913,7 +55071,7 @@ module.exports = require("net"); /***/ ((module) => { "use strict"; -module.exports = require("os"); +module.exports = require("os");; /***/ }), @@ -54921,7 +55079,7 @@ module.exports = require("os"); /***/ ((module) => { "use strict"; -module.exports = require("path"); +module.exports = require("path");; /***/ }), @@ -54929,7 +55087,7 @@ module.exports = require("path"); /***/ ((module) => { "use strict"; -module.exports = require("punycode"); +module.exports = require("punycode");; /***/ }), @@ -54937,7 +55095,7 @@ module.exports = require("punycode"); /***/ ((module) => { "use strict"; -module.exports = require("stream"); +module.exports = require("stream");; /***/ }), @@ -54945,7 +55103,7 @@ module.exports = require("stream"); /***/ ((module) => { "use strict"; -module.exports = require("string_decoder"); +module.exports = require("string_decoder");; /***/ }), @@ -54953,7 +55111,7 @@ module.exports = require("string_decoder"); /***/ ((module) => { "use strict"; -module.exports = require("timers"); +module.exports = require("timers");; /***/ }), @@ -54961,7 +55119,7 @@ module.exports = require("timers"); /***/ ((module) => { "use strict"; -module.exports = require("tls"); +module.exports = require("tls");; /***/ }), @@ -54969,7 +55127,7 @@ module.exports = require("tls"); /***/ ((module) => { "use strict"; -module.exports = require("url"); +module.exports = require("url");; /***/ }), @@ -54977,7 +55135,7 @@ module.exports = require("url"); /***/ ((module) => { "use strict"; -module.exports = require("util"); +module.exports = require("util");; /***/ }), @@ -54985,7 +55143,7 @@ module.exports = require("util"); /***/ ((module) => { "use strict"; -module.exports = require("zlib"); +module.exports = require("zlib");; /***/ }) diff --git a/package-lock.json b/package-lock.json index ae0ed29..face14d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,26 +8,26 @@ "version": "1.0.2", "license": "LGPL-3.0", "dependencies": { - "@actions/cache": "^1.0.3", + "@actions/cache": "^1.0.4", "@actions/core": "^1.2.6", "@actions/exec": "^1.0.4", "@actions/glob": "^0.1.0", "@actions/io": "^1.0.2" }, "devDependencies": { - "@vercel/ncc": "^0.24.1", - "typescript": "^4.0.3" + "@vercel/ncc": "^0.25.1", + "typescript": "^4.0.5" }, "funding": { "url": "https://github.com/sponsors/Swatinem" } }, "node_modules/@actions/cache": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.3.tgz", - "integrity": "sha512-2DcdMrqbfj5LcsEYIh6ACgjb0bNMeMPP7y1a+bf/Sk0MFfkkrYRNFihkXRV6v0TNkNo0b5X38xmUguRZ5ZPLbw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.4.tgz", + "integrity": "sha512-1grYfbu8P6JDDHc40eOI5tQDRcAxMwq5HBWhaCqEg9o/ixDRZfwPHlQvQAop2ZzFCjF2ns0ENQOIBAH8GNn+zA==", "dependencies": { - "@actions/core": "^1.2.4", + "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^1.0.9", @@ -140,9 +140,9 @@ "integrity": "sha512-uZtkfKblCEQtZKBF6EBXVZeQNl82yqtDQdv+eck8u7tdPxjLu2/lp5/uPW+um2tpuxINHWy3GhiccY7QgEaVHQ==" }, "node_modules/@azure/core-http": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-1.1.9.tgz", - "integrity": "sha512-wM0HMRNQaE2NtTHb+9FXF7uxUqaAHFTMVu6OzlEll6gUGybcDqM7+9Oklp33BhEfq+ZumpCoqxq3njNbMHuf/w==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-1.2.0.tgz", + "integrity": "sha512-SQmyI1tpstWKePNmTseEUp8PMq1uNBslvGBrYF2zNM/fEfLD1q64XCatoH8nDQtSmDydEPsqlyyLSjjnuXrlOQ==", "dependencies": { "@azure/abort-controller": "^1.0.0", "@azure/core-auth": "^1.1.3", @@ -157,7 +157,7 @@ "tough-cookie": "^4.0.0", "tslib": "^2.0.0", "tunnel": "^0.0.6", - "uuid": "^8.1.0", + "uuid": "^8.3.0", "xml2js": "^0.4.19" }, "engines": { @@ -317,9 +317,9 @@ } }, "node_modules/@types/node": { - "version": "14.14.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.0.tgz", - "integrity": "sha512-BfbIHP9IapdupGhq/hc+jT5dyiBVZ2DdeC5WwJWQWDb0GijQlzUFAeIQn/2GtvZcd2HVUU7An8felIICFTC2qg==" + "version": "14.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz", + "integrity": "sha512-6QlRuqsQ/Ox/aJEQWBEJG7A9+u7oSYl3mem/K8IzxXG/kAGbV1YPD9Bg9Zw3vyxC/YP+zONKwy8hGkSt1jxFMw==" }, "node_modules/@types/node-fetch": { "version": "2.5.7", @@ -352,9 +352,9 @@ } }, "node_modules/@vercel/ncc": { - "version": "0.24.1", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.24.1.tgz", - "integrity": "sha512-r9m7brz2hNmq5TF3sxrK4qR/FhXn44XIMglQUir4sT7Sh5GOaYXlMYikHFwJStf8rmQGTlvOoBXt4yHVonRG8A==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.25.1.tgz", + "integrity": "sha512-dGecC5+1wLof1MQpey4+6i2KZv4Sfs6WfXkl9KfO32GED4ZPiKxRfvtGPjbjZv0IbqMl6CxtcV1RotXYfd5SSA==", "dev": true, "bin": { "ncc": "dist/ncc/cli.js" @@ -550,9 +550,9 @@ } }, "node_modules/typescript": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.3.tgz", - "integrity": "sha512-tEu6DGxGgRJPb/mVPIZ48e69xCn2yRmCgYmDugAVwmJ6o+0u1RI18eO7E7WBTLYLaEVVOhwQmcdhQHweux/WPg==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.5.tgz", + "integrity": "sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -601,11 +601,11 @@ }, "dependencies": { "@actions/cache": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.3.tgz", - "integrity": "sha512-2DcdMrqbfj5LcsEYIh6ACgjb0bNMeMPP7y1a+bf/Sk0MFfkkrYRNFihkXRV6v0TNkNo0b5X38xmUguRZ5ZPLbw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.4.tgz", + "integrity": "sha512-1grYfbu8P6JDDHc40eOI5tQDRcAxMwq5HBWhaCqEg9o/ixDRZfwPHlQvQAop2ZzFCjF2ns0ENQOIBAH8GNn+zA==", "requires": { - "@actions/core": "^1.2.4", + "@actions/core": "^1.2.6", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^1.0.9", @@ -713,9 +713,9 @@ } }, "@azure/core-http": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-1.1.9.tgz", - "integrity": "sha512-wM0HMRNQaE2NtTHb+9FXF7uxUqaAHFTMVu6OzlEll6gUGybcDqM7+9Oklp33BhEfq+ZumpCoqxq3njNbMHuf/w==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-1.2.0.tgz", + "integrity": "sha512-SQmyI1tpstWKePNmTseEUp8PMq1uNBslvGBrYF2zNM/fEfLD1q64XCatoH8nDQtSmDydEPsqlyyLSjjnuXrlOQ==", "requires": { "@azure/abort-controller": "^1.0.0", "@azure/core-auth": "^1.1.3", @@ -730,7 +730,7 @@ "tough-cookie": "^4.0.0", "tslib": "^2.0.0", "tunnel": "^0.0.6", - "uuid": "^8.1.0", + "uuid": "^8.3.0", "xml2js": "^0.4.19" }, "dependencies": { @@ -869,9 +869,9 @@ "integrity": "sha512-hZNKjKOYsckoOEgBziGMnBcX0M7EtstnCmwz5jZUOUYwlZ+/xxX6z3jPu1XVO2Jivk0eLfuP9GP+vFD49CMetw==" }, "@types/node": { - "version": "14.14.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.0.tgz", - "integrity": "sha512-BfbIHP9IapdupGhq/hc+jT5dyiBVZ2DdeC5WwJWQWDb0GijQlzUFAeIQn/2GtvZcd2HVUU7An8felIICFTC2qg==" + "version": "14.14.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz", + "integrity": "sha512-6QlRuqsQ/Ox/aJEQWBEJG7A9+u7oSYl3mem/K8IzxXG/kAGbV1YPD9Bg9Zw3vyxC/YP+zONKwy8hGkSt1jxFMw==" }, "@types/node-fetch": { "version": "2.5.7", @@ -903,9 +903,9 @@ } }, "@vercel/ncc": { - "version": "0.24.1", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.24.1.tgz", - "integrity": "sha512-r9m7brz2hNmq5TF3sxrK4qR/FhXn44XIMglQUir4sT7Sh5GOaYXlMYikHFwJStf8rmQGTlvOoBXt4yHVonRG8A==", + "version": "0.25.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.25.1.tgz", + "integrity": "sha512-dGecC5+1wLof1MQpey4+6i2KZv4Sfs6WfXkl9KfO32GED4ZPiKxRfvtGPjbjZv0IbqMl6CxtcV1RotXYfd5SSA==", "dev": true }, "abort-controller": { @@ -1050,9 +1050,9 @@ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" }, "typescript": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.3.tgz", - "integrity": "sha512-tEu6DGxGgRJPb/mVPIZ48e69xCn2yRmCgYmDugAVwmJ6o+0u1RI18eO7E7WBTLYLaEVVOhwQmcdhQHweux/WPg==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.5.tgz", + "integrity": "sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ==", "dev": true }, "universalify": { diff --git a/package.json b/package.json index 6d7df72..8ed7535 100644 --- a/package.json +++ b/package.json @@ -22,17 +22,17 @@ }, "homepage": "https://github.com/Swatinem/rust-cache#readme", "dependencies": { - "@actions/cache": "^1.0.3", + "@actions/cache": "^1.0.4", "@actions/core": "^1.2.6", "@actions/exec": "^1.0.4", "@actions/glob": "^0.1.0", "@actions/io": "^1.0.2" }, "devDependencies": { - "@vercel/ncc": "^0.24.1", - "typescript": "^4.0.3" + "@vercel/ncc": "^0.25.1", + "typescript": "^4.0.5" }, "scripts": { - "prepare": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts" + "prepare": "ncc build --target es2020 -o dist/restore src/restore.ts && ncc build --target es2020 -o dist/save src/save.ts" } } diff --git a/tsconfig.json b/tsconfig.json index c0e5888..5a5b361 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -4,7 +4,7 @@ "diagnostics": true, "lib": ["esnext"], - "target": "es2017", + "target": "es2020", "resolveJsonModule": true, "moduleResolution": "node",