Add support to opt-in enable cross-os caching on windows

This commit is contained in:
Sampark Sharma 2023-01-04 07:54:25 +00:00 committed by GitHub
parent 365406cb70
commit 5e66b6cac9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 951 additions and 449 deletions

View file

@ -14,6 +14,10 @@ inputs:
upload-chunk-size: upload-chunk-size:
description: 'The chunk size used to split up large files during upload, in bytes' description: 'The chunk size used to split up large files during upload, in bytes'
required: false required: false
enableCrossOsArchive:
description: 'An optional boolean enabled to save and restore cache on windows which could be restored and saved on any platform'
default: 'false'
required: false
outputs: outputs:
cache-hit: cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key' description: 'A boolean value to indicate an exact match was found for the primary key'

View file

@ -1177,10 +1177,6 @@ function getVersion(app) {
// Use zstandard if possible to maximize cache performance // Use zstandard if possible to maximize cache performance
function getCompressionMethod() { function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return constants_1.CompressionMethod.Gzip;
}
const versionOutput = yield getVersion('zstd'); const versionOutput = yield getVersion('zstd');
const version = semver.clean(versionOutput); const version = semver.clean(versionOutput);
if (!versionOutput.toLowerCase().includes('zstd command line interface')) { if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
: constants_1.CacheFilename.Zstd; : constants_1.CacheFilename.Zstd;
} }
exports.getCacheFileName = getCacheFileName; exports.getCacheFileName = getCacheFileName;
function isGnuTarInstalled() { function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar'); const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
}); });
} }
exports.isGnuTarInstalled = isGnuTarInstalled; exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) { function assertDefined(name, value) {
if (value === undefined) { if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`); throw Error(`Expected ${name} but value was undefiend`);
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const url_1 = __webpack_require__(414); const url_1 = __webpack_require__(414);
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const downloadUtils_1 = __webpack_require__(251); const downloadUtils_1 = __webpack_require__(251);
const options_1 = __webpack_require__(538); const options_1 = __webpack_require__(538);
const requestUtils_1 = __webpack_require__(899); const requestUtils_1 = __webpack_require__(899);
@ -3414,10 +3412,17 @@ function createHttpClient() {
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip const components = paths;
? [] // Add compression method to cache version to restore
: [compressionMethod]); // compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry // Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt); components.push(versionSalt);
return crypto return crypto
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) { function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) { if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs // List cache for primary key only if cache miss occurs
if (core.isDebug()) { if (core.isDebug()) {
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
function reserveCache(key, paths, options) { function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = { const reserveCacheRequest = {
key, key,
version, version,
@ -4970,7 +4977,8 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action Inputs["UploadChunkSize"] = "upload-chunk-size";
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {
@ -10066,7 +10074,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692)); const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196); const constants_1 = __webpack_require__(196);
@ -10109,6 +10117,10 @@ function getInputAsInt(name, options) {
return value; return value;
} }
exports.getInputAsInt = getInputAsInt; exports.getInputAsInt = getInputAsInt;
function getInputAsBool(name, options) {
return core.getBooleanInput(name, options);
}
exports.getInputAsBool = getInputAsBool;
function isCacheFeatureAvailable() { function isCacheFeatureAvailable() {
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;
@ -38216,27 +38228,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986); const exec_1 = __webpack_require__(986);
const core_1 = __webpack_require__(470);
const io = __importStar(__webpack_require__(1)); const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747); const fs_1 = __webpack_require__(747);
const path = __importStar(__webpack_require__(622)); const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
case 'win32': { case 'win32': {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`; const gnuTar = yield utils.getGnuTarPathOnWindows();
if (compressionMethod !== constants_1.CompressionMethod.Gzip) { const systemTar = constants_1.SystemTarPathOnWindows;
// We only use zstandard compression on windows when gnu tar is installed due to if (gnuTar) {
// a bug with compressing large files with bsdtar + zstd // Use GNUtar as default on windows
args.push('--force-local'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
} }
else if (fs_1.existsSync(systemTar)) { else if (fs_1.existsSync(systemTar)) {
return systemTar; return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
}
else if (yield utils.isGnuTarInstalled()) {
args.push('--force-local');
} }
break; break;
} }
@ -38244,25 +38256,92 @@ function getTarPath(args, compressionMethod) {
const gnuTar = yield io.which('gtar', false); const gnuTar = yield io.which('gtar', false);
if (gnuTar) { if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
args.push('--delay-directory-restore'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
return gnuTar; }
else {
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.BSD
};
} }
break;
} }
default: default:
break; break;
} }
return yield io.which('tar', true); // Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
};
}); });
} }
function execTar(args, compressionMethod, cwd) { // Return arguments for tar as per tarPath, compressionMethod, method type and os
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { const args = [`"${tarPath.path}"`];
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
// Method specific args
switch (type) {
case 'create':
args.push('--posix', '-cf', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
break;
case 'extract':
args.push('-xf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
break;
case 'list':
args.push('-tf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
break;
} }
catch (error) { // Platform specific args
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); if (tarPath.type === constants_1.ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local');
break;
case 'darwin':
args.push('--delay-directory-restore');
break;
} }
}
return args;
});
}
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
: yield getCompressionProgram(tarPath, compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38270,91 +38349,116 @@ function getWorkingDirectory() {
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method // Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) { function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// -d: Decompress. // -d: Decompress.
// unzstd is equivalent to 'zstd -d' // unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return [ return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]; ];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
return ['-z']; return ['-z'];
} }
});
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(tarPath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
default:
return ['-z'];
}
});
}
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) { function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = [ const commands = yield getCommands(compressionMethod, 'list', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
}); });
} }
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = [ const commands = yield getCommands(compressionMethod, 'extract', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
];
yield execTar(args, compressionMethod);
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'; fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const cacheFileName = utils.getCacheFileName(compressionMethod); const commands = yield getCommands(compressionMethod, 'create');
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); yield execCommands(commands, archiveFolder);
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
}
const args = [
'--posix',
...getCompressionProgram(),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
];
yield execTar(args, compressionMethod, archiveFolder);
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -47150,9 +47254,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
*/ */
function restoreCache(paths, primaryKey, restoreKeys, options) { function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
restoreKeys = restoreKeys || []; restoreKeys = restoreKeys || [];
@ -47170,7 +47275,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
try { try {
// path are needed to compute version // path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod compressionMethod,
enableCrossOsArchive
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found // Cache not found
@ -47217,10 +47323,11 @@ exports.restoreCache = restoreCache;
* *
* @param paths a list of file paths to be cached * @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache * @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options * @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/ */
function saveCache(paths, key, options) { function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e; var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
@ -47251,6 +47358,7 @@ function saveCache(paths, key, options) {
core.debug('Reserving Cache'); core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod, compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize cacheSize: archiveFileSize
}); });
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
@ -50385,7 +50493,8 @@ function restoreImpl(stateProvider) {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true required: true
}); });
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys); const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
if (!cacheKey) { if (!cacheKey) {
core.info(`Cache not found for input keys: ${[ core.info(`Cache not found for input keys: ${[
primaryKey, primaryKey,
@ -53255,6 +53364,11 @@ var CompressionMethod;
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -53263,6 +53377,12 @@ exports.DefaultRetryDelay = 5000;
// over the socket during this period, the socket is destroyed and the download // over the socket during this period, the socket is destroyed and the download
// is aborted. // is aborted.
exports.SocketTimeout = 5000; exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
//# sourceMappingURL=constants.js.map //# sourceMappingURL=constants.js.map
/***/ }), /***/ }),

310
dist/restore/index.js vendored
View file

@ -1177,10 +1177,6 @@ function getVersion(app) {
// Use zstandard if possible to maximize cache performance // Use zstandard if possible to maximize cache performance
function getCompressionMethod() { function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return constants_1.CompressionMethod.Gzip;
}
const versionOutput = yield getVersion('zstd'); const versionOutput = yield getVersion('zstd');
const version = semver.clean(versionOutput); const version = semver.clean(versionOutput);
if (!versionOutput.toLowerCase().includes('zstd command line interface')) { if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
: constants_1.CacheFilename.Zstd; : constants_1.CacheFilename.Zstd;
} }
exports.getCacheFileName = getCacheFileName; exports.getCacheFileName = getCacheFileName;
function isGnuTarInstalled() { function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar'); const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
}); });
} }
exports.isGnuTarInstalled = isGnuTarInstalled; exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) { function assertDefined(name, value) {
if (value === undefined) { if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`); throw Error(`Expected ${name} but value was undefiend`);
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const url_1 = __webpack_require__(414); const url_1 = __webpack_require__(414);
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const downloadUtils_1 = __webpack_require__(251); const downloadUtils_1 = __webpack_require__(251);
const options_1 = __webpack_require__(538); const options_1 = __webpack_require__(538);
const requestUtils_1 = __webpack_require__(899); const requestUtils_1 = __webpack_require__(899);
@ -3414,10 +3412,17 @@ function createHttpClient() {
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip const components = paths;
? [] // Add compression method to cache version to restore
: [compressionMethod]); // compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry // Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt); components.push(versionSalt);
return crypto return crypto
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) { function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) { if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs // List cache for primary key only if cache miss occurs
if (core.isDebug()) { if (core.isDebug()) {
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
function reserveCache(key, paths, options) { function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = { const reserveCacheRequest = {
key, key,
version, version,
@ -4970,7 +4977,8 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action Inputs["UploadChunkSize"] = "upload-chunk-size";
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {
@ -38129,27 +38137,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986); const exec_1 = __webpack_require__(986);
const core_1 = __webpack_require__(470);
const io = __importStar(__webpack_require__(1)); const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747); const fs_1 = __webpack_require__(747);
const path = __importStar(__webpack_require__(622)); const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
case 'win32': { case 'win32': {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`; const gnuTar = yield utils.getGnuTarPathOnWindows();
if (compressionMethod !== constants_1.CompressionMethod.Gzip) { const systemTar = constants_1.SystemTarPathOnWindows;
// We only use zstandard compression on windows when gnu tar is installed due to if (gnuTar) {
// a bug with compressing large files with bsdtar + zstd // Use GNUtar as default on windows
args.push('--force-local'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
} }
else if (fs_1.existsSync(systemTar)) { else if (fs_1.existsSync(systemTar)) {
return systemTar; return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
}
else if (yield utils.isGnuTarInstalled()) {
args.push('--force-local');
} }
break; break;
} }
@ -38157,25 +38165,92 @@ function getTarPath(args, compressionMethod) {
const gnuTar = yield io.which('gtar', false); const gnuTar = yield io.which('gtar', false);
if (gnuTar) { if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
args.push('--delay-directory-restore'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
return gnuTar; }
else {
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.BSD
};
} }
break;
} }
default: default:
break; break;
} }
return yield io.which('tar', true); // Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
};
}); });
} }
function execTar(args, compressionMethod, cwd) { // Return arguments for tar as per tarPath, compressionMethod, method type and os
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { const args = [`"${tarPath.path}"`];
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
// Method specific args
switch (type) {
case 'create':
args.push('--posix', '-cf', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
break;
case 'extract':
args.push('-xf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
break;
case 'list':
args.push('-tf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
break;
} }
catch (error) { // Platform specific args
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); if (tarPath.type === constants_1.ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local');
break;
case 'darwin':
args.push('--delay-directory-restore');
break;
} }
}
return args;
});
}
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
: yield getCompressionProgram(tarPath, compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38183,91 +38258,116 @@ function getWorkingDirectory() {
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method // Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) { function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// -d: Decompress. // -d: Decompress.
// unzstd is equivalent to 'zstd -d' // unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return [ return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]; ];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
return ['-z']; return ['-z'];
} }
});
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(tarPath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
default:
return ['-z'];
}
});
}
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) { function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = [ const commands = yield getCommands(compressionMethod, 'list', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
}); });
} }
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = [ const commands = yield getCommands(compressionMethod, 'extract', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
];
yield execTar(args, compressionMethod);
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'; fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const cacheFileName = utils.getCacheFileName(compressionMethod); const commands = yield getCommands(compressionMethod, 'create');
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); yield execCommands(commands, archiveFolder);
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
}
const args = [
'--posix',
...getCompressionProgram(),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
];
yield execTar(args, compressionMethod, archiveFolder);
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -38502,7 +38602,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692)); const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196); const constants_1 = __webpack_require__(196);
@ -38545,6 +38645,10 @@ function getInputAsInt(name, options) {
return value; return value;
} }
exports.getInputAsInt = getInputAsInt; exports.getInputAsInt = getInputAsInt;
function getInputAsBool(name, options) {
return core.getBooleanInput(name, options);
}
exports.getInputAsBool = getInputAsBool;
function isCacheFeatureAvailable() { function isCacheFeatureAvailable() {
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;
@ -47121,9 +47225,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
*/ */
function restoreCache(paths, primaryKey, restoreKeys, options) { function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
restoreKeys = restoreKeys || []; restoreKeys = restoreKeys || [];
@ -47141,7 +47246,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
try { try {
// path are needed to compute version // path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod compressionMethod,
enableCrossOsArchive
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found // Cache not found
@ -47188,10 +47294,11 @@ exports.restoreCache = restoreCache;
* *
* @param paths a list of file paths to be cached * @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache * @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options * @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/ */
function saveCache(paths, key, options) { function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e; var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
@ -47222,6 +47329,7 @@ function saveCache(paths, key, options) {
core.debug('Reserving Cache'); core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod, compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize cacheSize: archiveFileSize
}); });
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
@ -50385,7 +50493,8 @@ function restoreImpl(stateProvider) {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true required: true
}); });
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys); const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
if (!cacheKey) { if (!cacheKey) {
core.info(`Cache not found for input keys: ${[ core.info(`Cache not found for input keys: ${[
primaryKey, primaryKey,
@ -53255,6 +53364,11 @@ var CompressionMethod;
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -53263,6 +53377,12 @@ exports.DefaultRetryDelay = 5000;
// over the socket during this period, the socket is destroyed and the download // over the socket during this period, the socket is destroyed and the download
// is aborted. // is aborted.
exports.SocketTimeout = 5000; exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
//# sourceMappingURL=constants.js.map //# sourceMappingURL=constants.js.map
/***/ }), /***/ }),

View file

@ -1233,10 +1233,6 @@ function getVersion(app) {
// Use zstandard if possible to maximize cache performance // Use zstandard if possible to maximize cache performance
function getCompressionMethod() { function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return constants_1.CompressionMethod.Gzip;
}
const versionOutput = yield getVersion('zstd'); const versionOutput = yield getVersion('zstd');
const version = semver.clean(versionOutput); const version = semver.clean(versionOutput);
if (!versionOutput.toLowerCase().includes('zstd command line interface')) { if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
@ -1260,13 +1256,16 @@ function getCacheFileName(compressionMethod) {
: constants_1.CacheFilename.Zstd; : constants_1.CacheFilename.Zstd;
} }
exports.getCacheFileName = getCacheFileName; exports.getCacheFileName = getCacheFileName;
function isGnuTarInstalled() { function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar'); const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
}); });
} }
exports.isGnuTarInstalled = isGnuTarInstalled; exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) { function assertDefined(name, value) {
if (value === undefined) { if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`); throw Error(`Expected ${name} but value was undefiend`);
@ -3440,7 +3439,6 @@ const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const url_1 = __webpack_require__(835); const url_1 = __webpack_require__(835);
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const downloadUtils_1 = __webpack_require__(251); const downloadUtils_1 = __webpack_require__(251);
const options_1 = __webpack_require__(538); const options_1 = __webpack_require__(538);
const requestUtils_1 = __webpack_require__(899); const requestUtils_1 = __webpack_require__(899);
@ -3470,10 +3468,17 @@ function createHttpClient() {
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip const components = paths;
? [] // Add compression method to cache version to restore
: [compressionMethod]); // compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry // Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt); components.push(versionSalt);
return crypto return crypto
@ -3485,9 +3490,10 @@ exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) { function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) { if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs // List cache for primary key only if cache miss occurs
if (core.isDebug()) { if (core.isDebug()) {
@ -3501,6 +3507,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -3546,7 +3553,7 @@ exports.downloadCache = downloadCache;
function reserveCache(key, paths, options) { function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = { const reserveCacheRequest = {
key, key,
version, version,
@ -5026,7 +5033,8 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action Inputs["UploadChunkSize"] = "upload-chunk-size";
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {
@ -38180,27 +38188,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986); const exec_1 = __webpack_require__(986);
const core_1 = __webpack_require__(470);
const io = __importStar(__webpack_require__(1)); const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747); const fs_1 = __webpack_require__(747);
const path = __importStar(__webpack_require__(622)); const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
case 'win32': { case 'win32': {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`; const gnuTar = yield utils.getGnuTarPathOnWindows();
if (compressionMethod !== constants_1.CompressionMethod.Gzip) { const systemTar = constants_1.SystemTarPathOnWindows;
// We only use zstandard compression on windows when gnu tar is installed due to if (gnuTar) {
// a bug with compressing large files with bsdtar + zstd // Use GNUtar as default on windows
args.push('--force-local'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
} }
else if (fs_1.existsSync(systemTar)) { else if (fs_1.existsSync(systemTar)) {
return systemTar; return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
}
else if (yield utils.isGnuTarInstalled()) {
args.push('--force-local');
} }
break; break;
} }
@ -38208,25 +38216,92 @@ function getTarPath(args, compressionMethod) {
const gnuTar = yield io.which('gtar', false); const gnuTar = yield io.which('gtar', false);
if (gnuTar) { if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
args.push('--delay-directory-restore'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
return gnuTar; }
else {
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.BSD
};
} }
break;
} }
default: default:
break; break;
} }
return yield io.which('tar', true); // Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
};
}); });
} }
function execTar(args, compressionMethod, cwd) { // Return arguments for tar as per tarPath, compressionMethod, method type and os
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { const args = [`"${tarPath.path}"`];
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
// Method specific args
switch (type) {
case 'create':
args.push('--posix', '-cf', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
break;
case 'extract':
args.push('-xf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
break;
case 'list':
args.push('-tf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
break;
} }
catch (error) { // Platform specific args
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); if (tarPath.type === constants_1.ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local');
break;
case 'darwin':
args.push('--delay-directory-restore');
break;
} }
}
return args;
});
}
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
: yield getCompressionProgram(tarPath, compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38234,91 +38309,116 @@ function getWorkingDirectory() {
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method // Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) { function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// -d: Decompress. // -d: Decompress.
// unzstd is equivalent to 'zstd -d' // unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return [ return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]; ];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
return ['-z']; return ['-z'];
} }
});
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(tarPath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
default:
return ['-z'];
}
});
}
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) { function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = [ const commands = yield getCommands(compressionMethod, 'list', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
}); });
} }
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = [ const commands = yield getCommands(compressionMethod, 'extract', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
];
yield execTar(args, compressionMethod);
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'; fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const cacheFileName = utils.getCacheFileName(compressionMethod); const commands = yield getCommands(compressionMethod, 'create');
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); yield execCommands(commands, archiveFolder);
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
}
const args = [
'--posix',
...getCompressionProgram(),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
];
yield execTar(args, compressionMethod, archiveFolder);
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -38553,7 +38653,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692)); const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196); const constants_1 = __webpack_require__(196);
@ -38596,6 +38696,10 @@ function getInputAsInt(name, options) {
return value; return value;
} }
exports.getInputAsInt = getInputAsInt; exports.getInputAsInt = getInputAsInt;
function getInputAsBool(name, options) {
return core.getBooleanInput(name, options);
}
exports.getInputAsBool = getInputAsBool;
function isCacheFeatureAvailable() { function isCacheFeatureAvailable() {
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;
@ -41075,9 +41179,8 @@ function saveImpl(stateProvider) {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true required: true
}); });
cacheId = yield cache.saveCache(cachePaths, primaryKey, { const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) cacheId = yield cache.saveCache(cachePaths, primaryKey, { uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) }, enableCrossOsArchive);
});
if (cacheId != -1) { if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`); core.info(`Cache saved with key: ${primaryKey}`);
} }
@ -47263,9 +47366,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
*/ */
function restoreCache(paths, primaryKey, restoreKeys, options) { function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
restoreKeys = restoreKeys || []; restoreKeys = restoreKeys || [];
@ -47283,7 +47387,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
try { try {
// path are needed to compute version // path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod compressionMethod,
enableCrossOsArchive
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found // Cache not found
@ -47330,10 +47435,11 @@ exports.restoreCache = restoreCache;
* *
* @param paths a list of file paths to be cached * @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache * @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options * @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/ */
function saveCache(paths, key, options) { function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e; var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
@ -47364,6 +47470,7 @@ function saveCache(paths, key, options) {
core.debug('Reserving Cache'); core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod, compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize cacheSize: archiveFileSize
}); });
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
@ -53290,6 +53397,11 @@ var CompressionMethod;
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -53298,6 +53410,12 @@ exports.DefaultRetryDelay = 5000;
// over the socket during this period, the socket is destroyed and the download // over the socket during this period, the socket is destroyed and the download
// is aborted. // is aborted.
exports.SocketTimeout = 5000; exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
//# sourceMappingURL=constants.js.map //# sourceMappingURL=constants.js.map
/***/ }), /***/ }),

312
dist/save/index.js vendored
View file

@ -1177,10 +1177,6 @@ function getVersion(app) {
// Use zstandard if possible to maximize cache performance // Use zstandard if possible to maximize cache performance
function getCompressionMethod() { function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return constants_1.CompressionMethod.Gzip;
}
const versionOutput = yield getVersion('zstd'); const versionOutput = yield getVersion('zstd');
const version = semver.clean(versionOutput); const version = semver.clean(versionOutput);
if (!versionOutput.toLowerCase().includes('zstd command line interface')) { if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) {
: constants_1.CacheFilename.Zstd; : constants_1.CacheFilename.Zstd;
} }
exports.getCacheFileName = getCacheFileName; exports.getCacheFileName = getCacheFileName;
function isGnuTarInstalled() { function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar'); const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
}); });
} }
exports.isGnuTarInstalled = isGnuTarInstalled; exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) { function assertDefined(name, value) {
if (value === undefined) { if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`); throw Error(`Expected ${name} but value was undefiend`);
@ -3384,7 +3383,6 @@ const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const url_1 = __webpack_require__(835); const url_1 = __webpack_require__(835);
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const downloadUtils_1 = __webpack_require__(251); const downloadUtils_1 = __webpack_require__(251);
const options_1 = __webpack_require__(538); const options_1 = __webpack_require__(538);
const requestUtils_1 = __webpack_require__(899); const requestUtils_1 = __webpack_require__(899);
@ -3414,10 +3412,17 @@ function createHttpClient() {
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
} }
function getCacheVersion(paths, compressionMethod) { function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip const components = paths;
? [] // Add compression method to cache version to restore
: [compressionMethod]); // compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry // Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt); components.push(versionSalt);
return crypto return crypto
@ -3429,9 +3434,10 @@ exports.getCacheVersion = getCacheVersion;
function getCacheEntry(keys, paths, options) { function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) { if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs // List cache for primary key only if cache miss occurs
if (core.isDebug()) { if (core.isDebug()) {
@ -3445,6 +3451,7 @@ function getCacheEntry(keys, paths, options) {
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.'); throw new Error('Cache not found.');
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheDownloadUrl);
@ -3490,7 +3497,7 @@ exports.downloadCache = downloadCache;
function reserveCache(key, paths, options) { function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient(); const httpClient = createHttpClient();
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = { const reserveCacheRequest = {
key, key,
version, version,
@ -4970,7 +4977,8 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action Inputs["UploadChunkSize"] = "upload-chunk-size";
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; // Input for cache, restore, save action
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {
@ -38124,27 +38132,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986); const exec_1 = __webpack_require__(986);
const core_1 = __webpack_require__(470);
const io = __importStar(__webpack_require__(1)); const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747); const fs_1 = __webpack_require__(747);
const path = __importStar(__webpack_require__(622)); const path = __importStar(__webpack_require__(622));
const utils = __importStar(__webpack_require__(15)); const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931); const constants_1 = __webpack_require__(931);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { core_1.exportVariable('MSYS', 'winsymlinks:nativestrict');
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
case 'win32': { case 'win32': {
const systemTar = `${process.env['windir']}\\System32\\tar.exe`; const gnuTar = yield utils.getGnuTarPathOnWindows();
if (compressionMethod !== constants_1.CompressionMethod.Gzip) { const systemTar = constants_1.SystemTarPathOnWindows;
// We only use zstandard compression on windows when gnu tar is installed due to if (gnuTar) {
// a bug with compressing large files with bsdtar + zstd // Use GNUtar as default on windows
args.push('--force-local'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
} }
else if (fs_1.existsSync(systemTar)) { else if (fs_1.existsSync(systemTar)) {
return systemTar; return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
}
else if (yield utils.isGnuTarInstalled()) {
args.push('--force-local');
} }
break; break;
} }
@ -38152,25 +38160,92 @@ function getTarPath(args, compressionMethod) {
const gnuTar = yield io.which('gtar', false); const gnuTar = yield io.which('gtar', false);
if (gnuTar) { if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
args.push('--delay-directory-restore'); return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
return gnuTar; }
else {
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.BSD
};
} }
break;
} }
default: default:
break; break;
} }
return yield io.which('tar', true); // Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
};
}); });
} }
function execTar(args, compressionMethod, cwd) { // Return arguments for tar as per tarPath, compressionMethod, method type and os
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { const args = [`"${tarPath.path}"`];
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
// Method specific args
switch (type) {
case 'create':
args.push('--posix', '-cf', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
break;
case 'extract':
args.push('-xf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
break;
case 'list':
args.push('-tf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
break;
} }
catch (error) { // Platform specific args
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); if (tarPath.type === constants_1.ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local');
break;
case 'darwin':
args.push('--delay-directory-restore');
break;
} }
}
return args;
});
}
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
: yield getCompressionProgram(tarPath, compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
}); });
} }
function getWorkingDirectory() { function getWorkingDirectory() {
@ -38178,91 +38253,116 @@ function getWorkingDirectory() {
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method // Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) { function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// -d: Decompress. // -d: Decompress.
// unzstd is equivalent to 'zstd -d' // unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return [ return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program', '--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
]; ];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default: default:
return ['-z']; return ['-z'];
} }
});
} }
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(tarPath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
default:
return ['-z'];
}
});
}
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
for (const command of commands) {
try {
yield exec_1.exec(command, undefined, { cwd });
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) { function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const args = [ const commands = yield getCommands(compressionMethod, 'list', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
}); });
} }
exports.listTar = listTar; exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
const args = [ const commands = yield getCommands(compressionMethod, 'extract', archivePath);
...getCompressionProgram(compressionMethod), yield execCommands(commands);
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
];
yield execTar(args, compressionMethod);
}); });
} }
exports.extractTar = extractTar; exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) { function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits // Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt'; fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const cacheFileName = utils.getCacheFileName(compressionMethod); const commands = yield getCommands(compressionMethod, 'create');
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); yield execCommands(commands, archiveFolder);
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
}
const args = [
'--posix',
...getCompressionProgram(),
'-cf',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--exclude',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
'-C',
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'--files-from',
manifestFilename
];
yield execTar(args, compressionMethod, archiveFolder);
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
@ -38497,7 +38597,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692)); const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196); const constants_1 = __webpack_require__(196);
@ -38540,6 +38640,10 @@ function getInputAsInt(name, options) {
return value; return value;
} }
exports.getInputAsInt = getInputAsInt; exports.getInputAsInt = getInputAsInt;
function getInputAsBool(name, options) {
return core.getBooleanInput(name, options);
}
exports.getInputAsBool = getInputAsBool;
function isCacheFeatureAvailable() { function isCacheFeatureAvailable() {
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;
@ -41019,9 +41123,8 @@ function saveImpl(stateProvider) {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true required: true
}); });
cacheId = yield cache.saveCache(cachePaths, primaryKey, { const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) cacheId = yield cache.saveCache(cachePaths, primaryKey, { uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) }, enableCrossOsArchive);
});
if (cacheId != -1) { if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`); core.info(`Cache saved with key: ${primaryKey}`);
} }
@ -47236,9 +47339,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
* @param primaryKey an explicit key for restoring the cache * @param primaryKey an explicit key for restoring the cache
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @param downloadOptions cache download options * @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined * @returns string returns the key for the cache hit, otherwise returns undefined
*/ */
function restoreCache(paths, primaryKey, restoreKeys, options) { function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
restoreKeys = restoreKeys || []; restoreKeys = restoreKeys || [];
@ -47256,7 +47360,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
try { try {
// path are needed to compute version // path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod compressionMethod,
enableCrossOsArchive
}); });
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found // Cache not found
@ -47303,10 +47408,11 @@ exports.restoreCache = restoreCache;
* *
* @param paths a list of file paths to be cached * @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache * @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options * @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/ */
function saveCache(paths, key, options) { function saveCache(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e; var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
checkPaths(paths); checkPaths(paths);
@ -47337,6 +47443,7 @@ function saveCache(paths, key, options) {
core.debug('Reserving Cache'); core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod, compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize cacheSize: archiveFileSize
}); });
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
@ -53263,6 +53370,11 @@ var CompressionMethod;
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd"; CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
// The default number of retry attempts. // The default number of retry attempts.
exports.DefaultRetryAttempts = 2; exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts. // The default delay in milliseconds between retry attempts.
@ -53271,6 +53383,12 @@ exports.DefaultRetryDelay = 5000;
// over the socket during this period, the socket is destroyed and the download // over the socket during this period, the socket is destroyed and the download
// is aborted. // is aborted.
exports.SocketTimeout = 5000; exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
//# sourceMappingURL=constants.js.map //# sourceMappingURL=constants.js.map
/***/ }), /***/ }),

14
package-lock.json generated
View file

@ -9,7 +9,7 @@
"version": "3.2.2", "version": "3.2.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^3.1.1", "@actions/cache": "^3.1.2",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"
@ -36,9 +36,9 @@
} }
}, },
"node_modules/@actions/cache": { "node_modules/@actions/cache": {
"version": "3.1.1", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.2.tgz",
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", "integrity": "sha512-3XeKcXIonfIbqvW7gPm/VLOhv1RHQ1dtTgSBCH6OUhCgSTii9bEVgu0PIms7UbLnXeMCKFzECfpbud8fJEvBbQ==",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
@ -9722,9 +9722,9 @@
}, },
"dependencies": { "dependencies": {
"@actions/cache": { "@actions/cache": {
"version": "3.1.1", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.2.tgz",
"integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", "integrity": "sha512-3XeKcXIonfIbqvW7gPm/VLOhv1RHQ1dtTgSBCH6OUhCgSTii9bEVgu0PIms7UbLnXeMCKFzECfpbud8fJEvBbQ==",
"requires": { "requires": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",

View file

@ -23,7 +23,7 @@
"author": "GitHub", "author": "GitHub",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^3.1.1", "@actions/cache": "^3.1.2",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"

View file

@ -11,6 +11,10 @@ inputs:
restore-keys: restore-keys:
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.' description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
required: false required: false
enableCrossOsArchive:
description: 'An optional boolean enabled to restore cache on windows which could be saved on any platform'
default: 'false'
required: false
outputs: outputs:
cache-hit: cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key' description: 'A boolean value to indicate an exact match was found for the primary key'

View file

@ -11,6 +11,10 @@ inputs:
upload-chunk-size: upload-chunk-size:
description: 'The chunk size used to split up large files during upload, in bytes' description: 'The chunk size used to split up large files during upload, in bytes'
required: false required: false
enableCrossOsArchive:
description: 'An optional boolean enabled to save cache on windows which could be restored on any platform'
default: 'false'
required: false
runs: runs:
using: 'node16' using: 'node16'
main: '../dist/save-only/index.js' main: '../dist/save-only/index.js'

View file

@ -2,7 +2,8 @@ export enum Inputs {
Key = "key", // Input for cache, restore, save action Key = "key", // Input for cache, restore, save action
Path = "path", // Input for cache, restore, save action Path = "path", // Input for cache, restore, save action
RestoreKeys = "restore-keys", // Input for cache, restore action RestoreKeys = "restore-keys", // Input for cache, restore action
UploadChunkSize = "upload-chunk-size" // Input for cache, save action UploadChunkSize = "upload-chunk-size", // Input for cache, save action
EnableCrossOsArchive = "enableCrossOsArchive" // Input for cache, restore, save action
} }
export enum Outputs { export enum Outputs {

View file

@ -17,8 +17,7 @@ async function restoreImpl(
// Validate inputs, this can cause task failure // Validate inputs, this can cause task failure
if (!utils.isValidEvent()) { if (!utils.isValidEvent()) {
utils.logWarning( utils.logWarning(
`Event Validation Error: The event type ${ `Event Validation Error: The event type ${process.env[Events.Key]
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.` } is not supported because it's not tied to a branch or tag ref.`
); );
return; return;
@ -31,11 +30,14 @@ async function restoreImpl(
const cachePaths = utils.getInputAsArray(Inputs.Path, { const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true required: true
}); });
const enableCrossOsArchive = utils.getInputAsBool(Inputs.EnableCrossOsArchive);
const cacheKey = await cache.restoreCache( const cacheKey = await cache.restoreCache(
cachePaths, cachePaths,
primaryKey, primaryKey,
restoreKeys restoreKeys,
{},
enableCrossOsArchive
); );
if (!cacheKey) { if (!cacheKey) {

View file

@ -19,8 +19,7 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> {
if (!utils.isValidEvent()) { if (!utils.isValidEvent()) {
utils.logWarning( utils.logWarning(
`Event Validation Error: The event type ${ `Event Validation Error: The event type ${process.env[Events.Key]
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.` } is not supported because it's not tied to a branch or tag ref.`
); );
return; return;
@ -52,9 +51,14 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> {
required: true required: true
}); });
cacheId = await cache.saveCache(cachePaths, primaryKey, { const enableCrossOsArchive = utils.getInputAsBool(Inputs.EnableCrossOsArchive);
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
}); cacheId = await cache.saveCache(
cachePaths,
primaryKey,
{ uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize) },
enableCrossOsArchive
);
if (cacheId != -1) { if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`); core.info(`Cache saved with key: ${primaryKey}`);

View file

@ -52,6 +52,13 @@ export function getInputAsInt(
return value; return value;
} }
export function getInputAsBool(
name: string,
options?: core.InputOptions
): boolean {
return core.getBooleanInput(name, options);
}
export function isCacheFeatureAvailable(): boolean { export function isCacheFeatureAvailable(): boolean {
if (cache.isFeatureAvailable()) { if (cache.isFeatureAvailable()) {
return true; return true;