From 501277cfd7e95763bb7a9e079b959b7e57713e71 Mon Sep 17 00:00:00 2001 From: Adam Dinwoodie Date: Sat, 24 Dec 2022 13:15:08 +0000 Subject: [PATCH 1/2] README.md: remove outdated Windows cache tip link As of 9b0be58 (Release compression related changes for windows (#1039), 2022-12-23), the section of tips-and-workarounds.md referring to improving cache restore performance on Windows no longer exists, so don't link to it from README.md. --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 1562039..7b0293f 100644 --- a/README.md +++ b/README.md @@ -245,7 +245,6 @@ Following are some of the known practices/workarounds which community has used t - [Cache segment restore timeout](./tips-and-workarounds.md#cache-segment-restore-timeout) - [Update a cache](./tips-and-workarounds.md#update-a-cache) - [Use cache across feature branches](./tips-and-workarounds.md#use-cache-across-feature-branches) -- [Improving cache restore performance on Windows/Using cross-os caching](./tips-and-workarounds.md#improving-cache-restore-performance-on-windows-using-cross-os-caching) - [Force deletion of caches overriding default cache eviction policy](./tips-and-workarounds.md#force-deletion-of-caches-overriding-default-cache-eviction-policy) #### Windows environment variables From 4723a57e26efda3a62cbde1812113b730952852d Mon Sep 17 00:00:00 2001 From: Sampark Sharma Date: Tue, 27 Dec 2022 16:38:40 +0530 Subject: [PATCH 2/2] Revert compression changes related to windows but keep version logging (#1049) * Revert compression changes related to windows due to symlink issues * Added tips and workarounds for cross os --- .licenses/npm/@actions/cache.dep.yml | Bin 1242 -> 1242 bytes README.md | 2 +- RELEASES.md | 5 +- dist/restore-only/index.js | 314 +++++++++------------------ dist/restore/index.js | 314 +++++++++------------------ dist/save-only/index.js | 314 +++++++++------------------ dist/save/index.js | 314 +++++++++------------------ package-lock.json | 18 +- package.json | 4 +- tips-and-workarounds.md | 18 ++ 10 files changed, 426 insertions(+), 877 deletions(-) diff --git a/.licenses/npm/@actions/cache.dep.yml b/.licenses/npm/@actions/cache.dep.yml index fa356cb1b112c03af4dcb8658cdf8d66d49340b4..d35c8082786d1fe144a81a9e844d9dfb63c2eadd 100644 GIT binary patch delta 12 Tcmcb`d5d#`2BYCd&5bMo9<&5N delta 12 Tcmcb`d5d#`2BX17&5bMo9 __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { @@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { - // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38223,19 +38222,21 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Returns tar path and type: BSD or GNU -function getTarPath() { +function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - // Use GNUtar as default on windows - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); } else if (fs_1.existsSync(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); } break; } @@ -38243,92 +38244,25 @@ function getTarPath() { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } - else { - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.BSD - }; + args.push('--delay-directory-restore'); + return gnuTar; } + break; } default: break; } - // Default assumption is GNU tar is present in path - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.GNU - }; + return yield io.which('tar', true); }); } -// Return arguments for tar as per tarPath, compressionMethod, method type and os -function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { +function execTar(args, compressionMethod, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = 'cache.tar'; - const workingDirectory = getWorkingDirectory(); - // Speficic args for BSD tar on windows for workaround - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - // Method specific args - switch (type) { - case 'create': - args.push('--posix', '-cf', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); - break; - case 'extract': - args.push('-xf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); - break; - case 'list': - args.push('-tf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); - break; + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); } - // Platform specific args - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case 'win32': - args.push('--force-local'); - break; - case 'darwin': - args.push('--delay-directory-restore'); - break; - } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } - return args; - }); -} -// Returns commands to run tar and compression program -function getCommands(compressionMethod, type, archivePath = '') { - return __awaiter(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== 'create' - ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) - : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== 'create') { - args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; - } - else { - args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38336,116 +38270,91 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --long=30 --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; - default: - return ['-z']; - } - }); +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } } -// Used for creating the archive -// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. -// zstdmt is equivalent to 'zstd -T0' -// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. -// Using 30 here because we also support 32-bit self-hosted runners. -// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. -function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --long=30 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; - default: - return ['-z']; - } - }); -} -// Executes all commands as separate processes -function execCommands(commands, cwd) { - return __awaiter(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield exec_1.exec(command, undefined, { cwd }); - } - catch (error) { - throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); -} -// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, 'list', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); }); } exports.listTar = listTar; -// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, 'extract', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); }); } exports.extractTar = extractTar; -// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const commands = yield getCommands(compressionMethod, 'create'); - yield execCommands(commands, archiveFolder); + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); }); } exports.createTar = createTar; @@ -47195,7 +47104,6 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); -const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47257,31 +47165,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - let cacheEntry; - let compressionMethod = yield utils.getCompressionMethod(); + const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // This is to support the old cache entry created by gzip on windows. - if (process.platform === 'win32' && - compressionMethod !== constants_1.CompressionMethod.Gzip) { - compressionMethod = constants_1.CompressionMethod.Gzip; - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return undefined; - } - core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); - } - else { - // Cache not found - return undefined; - } + // Cache not found + return undefined; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53362,11 +53255,6 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -var ArchiveToolType; -(function (ArchiveToolType) { - ArchiveToolType["GNU"] = "gnu"; - ArchiveToolType["BSD"] = "bsd"; -})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53375,12 +53263,6 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; -// The default path of GNUtar on hosted Windows runners -exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; -// The default path of BSDtar on hosted Windows runners -exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; -exports.TarFilename = 'cache.tar'; -exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/restore/index.js b/dist/restore/index.js index 2a6be56..81e33e9 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1177,6 +1177,10 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function getGnuTarPathOnWindows() { +function isGnuTarInstalled() { return __awaiter(this, void 0, void 0, function* () { - if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { - return constants_1.GnuTarPathOnWindows; - } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; + return versionOutput.toLowerCase().includes('gnu tar'); }); } -exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; +exports.isGnuTarInstalled = isGnuTarInstalled; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3431,7 +3432,6 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { @@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { - // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38136,19 +38135,21 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Returns tar path and type: BSD or GNU -function getTarPath() { +function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - // Use GNUtar as default on windows - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); } else if (fs_1.existsSync(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); } break; } @@ -38156,92 +38157,25 @@ function getTarPath() { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } - else { - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.BSD - }; + args.push('--delay-directory-restore'); + return gnuTar; } + break; } default: break; } - // Default assumption is GNU tar is present in path - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.GNU - }; + return yield io.which('tar', true); }); } -// Return arguments for tar as per tarPath, compressionMethod, method type and os -function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { +function execTar(args, compressionMethod, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = 'cache.tar'; - const workingDirectory = getWorkingDirectory(); - // Speficic args for BSD tar on windows for workaround - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - // Method specific args - switch (type) { - case 'create': - args.push('--posix', '-cf', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); - break; - case 'extract': - args.push('-xf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); - break; - case 'list': - args.push('-tf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); - break; + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); } - // Platform specific args - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case 'win32': - args.push('--force-local'); - break; - case 'darwin': - args.push('--delay-directory-restore'); - break; - } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } - return args; - }); -} -// Returns commands to run tar and compression program -function getCommands(compressionMethod, type, archivePath = '') { - return __awaiter(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== 'create' - ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) - : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== 'create') { - args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; - } - else { - args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38249,116 +38183,91 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --long=30 --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; - default: - return ['-z']; - } - }); +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } } -// Used for creating the archive -// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. -// zstdmt is equivalent to 'zstd -T0' -// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. -// Using 30 here because we also support 32-bit self-hosted runners. -// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. -function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --long=30 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; - default: - return ['-z']; - } - }); -} -// Executes all commands as separate processes -function execCommands(commands, cwd) { - return __awaiter(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield exec_1.exec(command, undefined, { cwd }); - } - catch (error) { - throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); -} -// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, 'list', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); }); } exports.listTar = listTar; -// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, 'extract', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); }); } exports.extractTar = extractTar; -// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const commands = yield getCommands(compressionMethod, 'create'); - yield execCommands(commands, archiveFolder); + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); }); } exports.createTar = createTar; @@ -47166,7 +47075,6 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); -const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47228,31 +47136,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - let cacheEntry; - let compressionMethod = yield utils.getCompressionMethod(); + const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // This is to support the old cache entry created by gzip on windows. - if (process.platform === 'win32' && - compressionMethod !== constants_1.CompressionMethod.Gzip) { - compressionMethod = constants_1.CompressionMethod.Gzip; - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return undefined; - } - core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); - } - else { - // Cache not found - return undefined; - } + // Cache not found + return undefined; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53362,11 +53255,6 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -var ArchiveToolType; -(function (ArchiveToolType) { - ArchiveToolType["GNU"] = "gnu"; - ArchiveToolType["BSD"] = "bsd"; -})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53375,12 +53263,6 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; -// The default path of GNUtar on hosted Windows runners -exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; -// The default path of BSDtar on hosted Windows runners -exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; -exports.TarFilename = 'cache.tar'; -exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/save-only/index.js b/dist/save-only/index.js index 37ab951..8573690 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -1233,6 +1233,10 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1256,16 +1260,13 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function getGnuTarPathOnWindows() { +function isGnuTarInstalled() { return __awaiter(this, void 0, void 0, function* () { - if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { - return constants_1.GnuTarPathOnWindows; - } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; + return versionOutput.toLowerCase().includes('gnu tar'); }); } -exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; +exports.isGnuTarInstalled = isGnuTarInstalled; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3487,7 +3488,6 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { @@ -3501,7 +3501,6 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { - // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38187,19 +38186,21 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Returns tar path and type: BSD or GNU -function getTarPath() { +function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - // Use GNUtar as default on windows - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); } else if (fs_1.existsSync(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); } break; } @@ -38207,92 +38208,25 @@ function getTarPath() { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } - else { - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.BSD - }; + args.push('--delay-directory-restore'); + return gnuTar; } + break; } default: break; } - // Default assumption is GNU tar is present in path - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.GNU - }; + return yield io.which('tar', true); }); } -// Return arguments for tar as per tarPath, compressionMethod, method type and os -function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { +function execTar(args, compressionMethod, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = 'cache.tar'; - const workingDirectory = getWorkingDirectory(); - // Speficic args for BSD tar on windows for workaround - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - // Method specific args - switch (type) { - case 'create': - args.push('--posix', '-cf', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); - break; - case 'extract': - args.push('-xf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); - break; - case 'list': - args.push('-tf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); - break; + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); } - // Platform specific args - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case 'win32': - args.push('--force-local'); - break; - case 'darwin': - args.push('--delay-directory-restore'); - break; - } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } - return args; - }); -} -// Returns commands to run tar and compression program -function getCommands(compressionMethod, type, archivePath = '') { - return __awaiter(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== 'create' - ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) - : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== 'create') { - args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; - } - else { - args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38300,116 +38234,91 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --long=30 --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; - default: - return ['-z']; - } - }); +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } } -// Used for creating the archive -// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. -// zstdmt is equivalent to 'zstd -T0' -// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. -// Using 30 here because we also support 32-bit self-hosted runners. -// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. -function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --long=30 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; - default: - return ['-z']; - } - }); -} -// Executes all commands as separate processes -function execCommands(commands, cwd) { - return __awaiter(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield exec_1.exec(command, undefined, { cwd }); - } - catch (error) { - throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); -} -// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, 'list', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); }); } exports.listTar = listTar; -// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, 'extract', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); }); } exports.extractTar = extractTar; -// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const commands = yield getCommands(compressionMethod, 'create'); - yield execCommands(commands, archiveFolder); + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); }); } exports.createTar = createTar; @@ -47308,7 +47217,6 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); -const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47370,31 +47278,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - let cacheEntry; - let compressionMethod = yield utils.getCompressionMethod(); + const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // This is to support the old cache entry created by gzip on windows. - if (process.platform === 'win32' && - compressionMethod !== constants_1.CompressionMethod.Gzip) { - compressionMethod = constants_1.CompressionMethod.Gzip; - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return undefined; - } - core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); - } - else { - // Cache not found - return undefined; - } + // Cache not found + return undefined; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53397,11 +53290,6 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -var ArchiveToolType; -(function (ArchiveToolType) { - ArchiveToolType["GNU"] = "gnu"; - ArchiveToolType["BSD"] = "bsd"; -})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53410,12 +53298,6 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; -// The default path of GNUtar on hosted Windows runners -exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; -// The default path of BSDtar on hosted Windows runners -exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; -exports.TarFilename = 'cache.tar'; -exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/dist/save/index.js b/dist/save/index.js index 9dc76f1..9657785 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1177,6 +1177,10 @@ function getVersion(app) { // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } const versionOutput = yield getVersion('zstd'); const version = semver.clean(versionOutput); if (!versionOutput.toLowerCase().includes('zstd command line interface')) { @@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) { : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; -function getGnuTarPathOnWindows() { +function isGnuTarInstalled() { return __awaiter(this, void 0, void 0, function* () { - if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { - return constants_1.GnuTarPathOnWindows; - } const versionOutput = yield getVersion('tar'); - return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; + return versionOutput.toLowerCase().includes('gnu tar'); }); } -exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; +exports.isGnuTarInstalled = isGnuTarInstalled; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); @@ -3431,7 +3432,6 @@ function getCacheEntry(keys, paths, options) { const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); - // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { @@ -3445,7 +3445,6 @@ function getCacheEntry(keys, paths, options) { const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { - // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); @@ -38131,19 +38130,21 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const constants_1 = __webpack_require__(931); const IS_WINDOWS = process.platform === 'win32'; -// Returns tar path and type: BSD or GNU -function getTarPath() { +function getTarPath(args, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - // Use GNUtar as default on windows - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); } else if (fs_1.existsSync(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); } break; } @@ -38151,92 +38152,25 @@ function getTarPath() { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } - else { - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.BSD - }; + args.push('--delay-directory-restore'); + return gnuTar; } + break; } default: break; } - // Default assumption is GNU tar is present in path - return { - path: yield io.which('tar', true), - type: constants_1.ArchiveToolType.GNU - }; + return yield io.which('tar', true); }); } -// Return arguments for tar as per tarPath, compressionMethod, method type and os -function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { +function execTar(args, compressionMethod, cwd) { return __awaiter(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = 'cache.tar'; - const workingDirectory = getWorkingDirectory(); - // Speficic args for BSD tar on windows for workaround - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - // Method specific args - switch (type) { - case 'create': - args.push('--posix', '-cf', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD - ? tarFile - : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); - break; - case 'extract': - args.push('-xf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); - break; - case 'list': - args.push('-tf', BSD_TAR_ZSTD - ? tarFile - : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); - break; + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); } - // Platform specific args - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case 'win32': - args.push('--force-local'); - break; - case 'darwin': - args.push('--delay-directory-restore'); - break; - } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } - return args; - }); -} -// Returns commands to run tar and compression program -function getCommands(compressionMethod, type, archivePath = '') { - return __awaiter(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== 'create' - ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) - : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== 'create') { - args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; - } - else { - args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(' ')]; }); } function getWorkingDirectory() { @@ -38244,116 +38178,91 @@ function getWorkingDirectory() { return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method -function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - // -d: Decompress. - // unzstd is equivalent to 'zstd -d' - // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. - // Using 30 here because we also support 32-bit self-hosted runners. - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --long=30 --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -d --force -o', - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; - default: - return ['-z']; - } - }); +function getCompressionProgram(compressionMethod) { + // -d: Decompress. + // unzstd is equivalent to 'zstd -d' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; + default: + return ['-z']; + } } -// Used for creating the archive -// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. -// zstdmt is equivalent to 'zstd -T0' -// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. -// Using 30 here because we also support 32-bit self-hosted runners. -// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. -function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && - compressionMethod !== constants_1.CompressionMethod.Gzip && - IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --long=30 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : [ - '--use-compress-program', - IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD - ? [ - 'zstd -T0 --force -o', - cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), - constants_1.TarFilename - ] - : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; - default: - return ['-z']; - } - }); -} -// Executes all commands as separate processes -function execCommands(commands, cwd) { - return __awaiter(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield exec_1.exec(command, undefined, { cwd }); - } - catch (error) { - throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); -} -// List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, 'list', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); }); } exports.listTar = listTar; -// Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, 'extract', archivePath); - yield execCommands(commands); + const args = [ + ...getCompressionProgram(compressionMethod), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); }); } exports.extractTar = extractTar; -// Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits - fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); - const commands = yield getCommands(compressionMethod, 'create'); - yield execCommands(commands, archiveFolder); + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // zstdmt is equivalent to 'zstd -T0' + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return [ + '--use-compress-program', + IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); }); } exports.createTar = createTar; @@ -47281,7 +47190,6 @@ const path = __importStar(__webpack_require__(622)); const utils = __importStar(__webpack_require__(15)); const cacheHttpClient = __importStar(__webpack_require__(114)); const tar_1 = __webpack_require__(434); -const constants_1 = __webpack_require__(931); class ValidationError extends Error { constructor(message) { super(message); @@ -47343,31 +47251,16 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { for (const key of keys) { checkKey(key); } - let cacheEntry; - let compressionMethod = yield utils.getCompressionMethod(); + const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - // This is to support the old cache entry created by gzip on windows. - if (process.platform === 'win32' && - compressionMethod !== constants_1.CompressionMethod.Gzip) { - compressionMethod = constants_1.CompressionMethod.Gzip; - cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return undefined; - } - core.info("Couldn't find cache entry with zstd compression, falling back to gzip compression."); - } - else { - // Cache not found - return undefined; - } + // Cache not found + return undefined; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); @@ -53370,11 +53263,6 @@ var CompressionMethod; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); -var ArchiveToolType; -(function (ArchiveToolType) { - ArchiveToolType["GNU"] = "gnu"; - ArchiveToolType["BSD"] = "bsd"; -})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. @@ -53383,12 +53271,6 @@ exports.DefaultRetryDelay = 5000; // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; -// The default path of GNUtar on hosted Windows runners -exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; -// The default path of BSDtar on hosted Windows runners -exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; -exports.TarFilename = 'cache.tar'; -exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), diff --git a/package-lock.json b/package-lock.json index 71af18c..008f657 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,15 @@ { "name": "cache", - "version": "3.2.1", + "version": "3.2.2", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "cache", - "version": "3.2.1", + "version": "3.2.2", "license": "MIT", "dependencies": { - "@actions/cache": "^3.1.0", + "@actions/cache": "^3.1.1", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" @@ -36,9 +36,9 @@ } }, "node_modules/@actions/cache": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", - "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", + "integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", @@ -9722,9 +9722,9 @@ }, "dependencies": { "@actions/cache": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0.tgz", - "integrity": "sha512-wKGJkpK3uFTgwy+KA0fxz0H3/ZPymdi0IlyhMmyoMeWd+CIv8xVPWdGlrPDDdN9bFgve2yvEPZVaKRb43Uwtyg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", + "integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", "requires": { "@actions/core": "^1.10.0", "@actions/exec": "^1.0.1", diff --git a/package.json b/package.json index 6035b0e..a004e9b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "3.2.1", + "version": "3.2.2", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -23,7 +23,7 @@ "author": "GitHub", "license": "MIT", "dependencies": { - "@actions/cache": "^3.1.0", + "@actions/cache": "^3.1.1", "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", "@actions/io": "^1.1.2" diff --git a/tips-and-workarounds.md b/tips-and-workarounds.md index ac541d8..0777bed 100644 --- a/tips-and-workarounds.md +++ b/tips-and-workarounds.md @@ -19,6 +19,24 @@ A cache today is immutable and cannot be updated. But some use cases require the ## Use cache across feature branches Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. +## Improving cache restore performance on Windows/Using cross-os caching +Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows. + +To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step: + +```yaml + - if: ${{ runner.os == 'Windows' }} + name: Use GNU tar + shell: cmd + run: | + echo "Adding GNU tar to PATH" + echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" +``` + +The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed. + +The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key. + ## Force deletion of caches overriding default cache eviction policy Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches.