v1.0.1 release binaries

This commit is contained in:
Josh Gross 2019-11-05 15:43:33 -05:00
parent 0f810ad45a
commit 86dff562ab
2 changed files with 7 additions and 4 deletions

View file

@ -2972,7 +2972,10 @@ function run() {
core.debug(`Cache Path: ${cachePath}`);
const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true });
core.saveState(constants_1.State.CacheKey, primaryKey);
const restoreKeys = core.getInput(constants_1.Inputs.RestoreKeys).split("\n");
const restoreKeys = core
.getInput(constants_1.Inputs.RestoreKeys)
.split("\n")
.filter(x => x !== "");
const keys = [primaryKey, ...restoreKeys];
core.debug("Resolved Keys:");
core.debug(JSON.stringify(keys));
@ -2994,7 +2997,7 @@ function run() {
try {
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry) {
core.info(`Cache not found for input keys: ${JSON.stringify(keys)}.`);
core.info(`Cache not found for input keys: ${keys.join(", ")}.`);
return;
}
let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");

4
dist/save/index.js vendored
View file

@ -2889,11 +2889,11 @@ function run() {
const tarPath = yield io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
yield exec_1.exec(`"${tarPath}"`, args);
const fileSizeLimit = 200 * 1024 * 1024; // 200MB
const fileSizeLimit = 400 * 1024 * 1024; // 400MB
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
core.warning(`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`);
core.warning(`Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.`);
return;
}
const stream = fs.createReadStream(archivePath);