From 9b0be58822d00cbc2e7efa6780a8f0b9c273df2e Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Fri, 23 Dec 2022 11:49:17 +0530 Subject: [PATCH] Release compression related changes for windows (#1039) * Release compression related changes for windows * Update license --- .licenses/npm/@actions/cache.dep.yml | Bin 1242 -> 1242 bytes README.md | 1 + RELEASES.md | 7 +- dist/restore-only/index.js | 334 +++++++++++++++++++-------- dist/restore/index.js | 334 +++++++++++++++++++-------- dist/save-only/index.js | 334 +++++++++++++++++++-------- dist/save/index.js | 334 +++++++++++++++++++-------- package-lock.json | 18 +- package.json | 4 +- tips-and-workarounds.md | 18 -- 10 files changed, 962 insertions(+), 422 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index 75c2c69aef322e92b07dd74edd233b601f2d36ca..fa356cb1b112c03af4dcb8658cdf8d66d49340b4 100644 GIT binary patch delta 14 Vcmcb`d5d#`8ndCE!A8xEEC41L1fBo@ delta 14 Vcmcb`d5d#`8nc0(*+$KcEC41i1fu`| diff --git a/README.md b/README.md index fba677b..1562039 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,7 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac * Fix zstd not working for windows on gnu tar in issues. * Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes. * Two new actions available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml) +* Add support for cross os caching. For example, a cache saved on windows can be restored on ubuntu and vice versa. Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions diff --git a/RELEASES.md b/RELEASES.md index fae6244..8a03ae6 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -54,4 +54,9 @@ - Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache. ### 3.2.0 -- Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache \ No newline at end of file +- Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache + +### 3.2.1 +- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) +- Added support for fallback to gzip to restore old caches on windows. +- Added logs for cache version in case of a cache miss. \ No newline at end of file diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index 2de1cd7..189523d 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -1177,10 +1177,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3432,7 +3431,12 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3441,6 +3445,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -3450,6 +3455,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -38202,21 +38223,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38224,25 +38243,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38250,91 +38336,116 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47084,6 +47195,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47145,16 +47257,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; + // This is to support the old cache entry created by gzip on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return undefined; + } + core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); + } + else { + // Cache not found + return undefined; + } } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53235,6 +53362,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53243,6 +53375,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/restore/index.js b/dist/restore/index.js index 076b17e..2a6be56 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1177,10 +1177,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3432,7 +3431,12 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3441,6 +3445,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -3450,6 +3455,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -38115,21 +38136,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38137,25 +38156,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38163,91 +38249,116 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47055,6 +47166,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47116,16 +47228,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; + // This is to support the old cache entry created by gzip on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return undefined; + } + core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); + } + else { + // Cache not found + return undefined; + } } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53235,6 +53362,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53243,6 +53375,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/save-only/index.js b/dist/save-only/index.js index b7da72d..37ab951 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -1233,10 +1233,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1260,13 +1256,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3488,7 +3487,12 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3497,6 +3501,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -3506,6 +3511,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -38166,21 +38187,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38188,25 +38207,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38214,91 +38300,116 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47197,6 +47308,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47258,16 +47370,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; + // This is to support the old cache entry created by gzip on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return undefined; + } + core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); + } + else { + // Cache not found + return undefined; + } } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53270,6 +53397,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53278,6 +53410,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/save/index.js b/dist/save/index.js index 4b27928..9dc76f1 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1177,10 +1177,6 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { - if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { - // Disable zstd due to bug https://github.com/actions/cache/issues/301 - return constants_1.CompressionMethod.Gzip; - } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function isGnuTarInstalled() { +function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { + if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar'); + return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } -exports.isGnuTarInstalled = isGnuTarInstalled; +exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3432,7 +3431,12 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + // Cache not found if (response.statusCode === 204) { + // List cache for primary key only if cache miss occurs + if (core.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } return null; } if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { @@ -3441,6 +3445,7 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { + // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -3450,6 +3455,22 @@ function getCacheEntry(keys, paths, options) { }); } exports.getCacheEntry = getCacheEntry; +function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } + } + }); +} function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); @@ -38110,21 +38131,19 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -function getTarPath(args, compressionMethod) { +// Returns tar path and type: BSD or GNU +function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const systemTar = `${process.env['windir']}\\System32\\tar.exe`; - if (compressionMethod !== constants_1.CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local'); + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + // Use GNUtar as default on windows + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if (fs_1.existsSync(systemTar)) { - return systemTar; - } - else if (yield utils.isGnuTarInstalled()) { - args.push('--force-local'); + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } @@ -38132,25 +38151,92 @@ function getTarPath(args, compressionMethod) { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - args.push('--delay-directory-restore'); - return gnuTar; + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } + else { + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.BSD + }; } - break; } default: break; } - return yield io.which('tar', true); + // Default assumption is GNU tar is present in path + return { + path: yield io.which('tar', true), + type: constants_1.ArchiveToolType.GNU + }; }); } -function execTar(args, compressionMethod, cwd) { +// Return arguments for tar as per tarPath, compressionMethod, method type and os +function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = 'cache.tar'; + const workingDirectory = getWorkingDirectory(); + // Speficic args for BSD tar on windows for workaround + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + // Method specific args + switch (type) { + case 'create': + args.push('--posix', '-cf', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD + ? tarFile + : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); + break; + case 'extract': + args.push('-xf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); + break; + case 'list': + args.push('-tf', BSD_TAR_ZSTD + ? tarFile + : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); + break; } - catch (error) { - throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + // Platform specific args + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case 'win32': + args.push('--force-local'); + break; + case 'darwin': + args.push('--delay-directory-restore'); + break; + } } + return args; + }); +} +// Returns commands to run tar and compression program +function getCommands(compressionMethod, type, archivePath = '') { + return __awaiter(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== 'create' + ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) + : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== 'create') { + args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; + } + else { + args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38158,91 +38244,116 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getCompressionProgram(compressionMethod) { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; - default: - return ['-z']; - } +function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --long=30 --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -d --force -o', + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; + default: + return ['-z']; + } + }); } +// Used for creating the archive +// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. +// zstdmt is equivalent to 'zstd -T0' +// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. +// Using 30 here because we also support 32-bit self-hosted runners. +// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. +function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && + compressionMethod !== constants_1.CompressionMethod.Gzip && + IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --long=30 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : [ + '--use-compress-program', + IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD + ? [ + 'zstd -T0 --force -o', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + constants_1.TarFilename + ] + : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; + default: + return ['-z']; + } + }); +} +// Executes all commands as separate processes +function execCommands(commands, cwd) { + return __awaiter(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield exec_1.exec(command, undefined, { cwd }); + } + catch (error) { + throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); +} +// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const args = [ - ...getCompressionProgram(compressionMethod), - '-tf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P' - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'list', archivePath); + yield execCommands(commands); }); } exports.listTar = listTar; +// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const args = [ - ...getCompressionProgram(compressionMethod), - '-xf', - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ]; - yield execTar(args, compressionMethod); + const commands = yield getCommands(compressionMethod, 'extract', archivePath); + yield execCommands(commands); }); } exports.extractTar = extractTar; +// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = 'manifest.txt'; - const cacheFileName = utils.getCacheFileName(compressionMethod); - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); - const workingDirectory = getWorkingDirectory(); - // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. - // zstdmt is equivalent to 'zstd -T0' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. - function getCompressionProgram() { - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return [ - '--use-compress-program', - IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; - default: - return ['-z']; - } - } - const args = [ - '--posix', - ...getCompressionProgram(), - '-cf', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--exclude', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '-P', - '-C', - workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - '--files-from', - manifestFilename - ]; - yield execTar(args, compressionMethod, archiveFolder); + fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); + const commands = yield getCommands(compressionMethod, 'create'); + yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; @@ -47170,6 +47281,7 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); +const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47231,16 +47343,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - const compressionMethod = yield utils.getCompressionMethod(); + let cacheEntry; + let compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // Cache not found - return undefined; + // This is to support the old cache entry created by gzip on windows. + if (process.platform === 'win32' && + compressionMethod !== constants_1.CompressionMethod.Gzip) { + compressionMethod = constants_1.CompressionMethod.Gzip; + cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return undefined; + } + core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); + } + else { + // Cache not found + return undefined; + } } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53243,6 +53370,11 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +var ArchiveToolType; +(function (ArchiveToolType) { + ArchiveToolType["GNU"] = "gnu"; + ArchiveToolType["BSD"] = "bsd"; +})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53251,6 +53383,12 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; +// The default path of GNUtar on hosted Windows runners +exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; +// The default path of BSDtar on hosted Windows runners +exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; +exports.TarFilename = 'cache.tar'; +exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/package-lock.json b/package-lock.json index d219d01..71af18c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.2.0", + "version": "3.2.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.2.0", + "version": "3.2.1", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.6", + "@actions/cache": "^3.1.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,9 +36,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz", - "integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", + "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", @@ -9722,9 +9722,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz", - "integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", + "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index eaa0cd3..6035b0e 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.2.0", + "version": "3.2.1", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.0.6", + "@actions/cache": "^3.1.0", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" diff --git a/tips-and-workarounds.md b/tips-and-workarounds.md index 0777bed..ac541d8 100644 --- a/tips-and-workarounds.md +++ b/tips-and-workarounds.md @@ -19,24 +19,6 @@ A cache today is immutable and cannot be updated. But some use cases require the ## Use cache across feature branches Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. -## Improving cache restore performance on Windows/Using cross-os caching -Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows. - -To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step: - -```yaml - - if: ${{ runner.os == 'Windows' }} - name: Use GNU tar - shell: cmd - run: | - echo "Adding GNU tar to PATH" - echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" -``` - -The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed. - -The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key. - ## Force deletion of caches overriding default cache eviction policy Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches.