Merge pull request #424 from actions/dhadka/upload-chunk-size

Adds input for upload chunk size
This commit is contained in:
David Hadka 2020-10-07 15:55:25 -05:00 committed by GitHub
commit d1255ad936
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 92 additions and 11 deletions

View file

@ -212,3 +212,23 @@ test("getInputAsArray handles empty lines correctly", () => {
testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
});
test("getInputAsInt returns undefined if input not set", () => {
expect(actionUtils.getInputAsInt("undefined")).toBeUndefined();
});
test("getInputAsInt returns value if input is valid", () => {
testUtils.setInput("foo", "8");
expect(actionUtils.getInputAsInt("foo")).toBe(8);
});
test("getInputAsInt returns undefined if input is invalid or NaN", () => {
testUtils.setInput("foo", "bar");
expect(actionUtils.getInputAsInt("foo")).toBeUndefined();
});
test("getInputAsInt throws if required and value missing", () => {
expect(() =>
actionUtils.getInputAsInt("undefined", { required: true })
).toThrowError();
});

View file

@ -27,6 +27,14 @@ beforeAll(() => {
}
);
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
(name, options) => {
return jest
.requireActual("../src/utils/actionUtils")
.getInputAsInt(name, options);
}
);
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
return jest
@ -193,7 +201,11 @@ test("save with large cache outputs warning", async () => {
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
@ -236,7 +248,11 @@ test("save with reserve cache failure outputs warning", async () => {
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
@ -274,7 +290,11 @@ test("save with server error outputs warning", async () => {
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
@ -300,6 +320,7 @@ test("save with valid inputs uploads a cache", async () => {
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
const cacheId = 4;
const saveCacheMock = jest
@ -311,7 +332,9 @@ test("save with valid inputs uploads a cache", async () => {
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
uploadChunkSize: 4000000
});
expect(failedMock).toHaveBeenCalledTimes(0);
});

View file

@ -11,6 +11,9 @@ inputs:
restore-keys:
description: 'An ordered list of keys to use for restoring the cache if no cache hit occurred for key'
required: false
upload-chunk-size:
description: 'The chunk size used to split up large files during upload, in bytes'
required: false
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'

11
dist/restore/index.js vendored
View file

@ -31296,7 +31296,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(694);
function isGhes() {
@ -31353,6 +31353,14 @@ function getInputAsArray(name, options) {
.filter(x => x !== "");
}
exports.getInputAsArray = getInputAsArray;
function getInputAsInt(name, options) {
const value = parseInt(core.getInput(name, options));
if (isNaN(value) || value < 0) {
return undefined;
}
return value;
}
exports.getInputAsInt = getInputAsInt;
/***/ }),
@ -38485,6 +38493,7 @@ var Inputs;
Inputs["Key"] = "key";
Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs;
(function (Outputs) {

15
dist/save/index.js vendored
View file

@ -31296,7 +31296,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(694);
function isGhes() {
@ -31353,6 +31353,14 @@ function getInputAsArray(name, options) {
.filter(x => x !== "");
}
exports.getInputAsArray = getInputAsArray;
function getInputAsInt(name, options) {
const value = parseInt(core.getInput(name, options));
if (isNaN(value) || value < 0) {
return undefined;
}
return value;
}
exports.getInputAsInt = getInputAsInt;
/***/ }),
@ -38353,7 +38361,9 @@ function run() {
required: true
});
try {
yield cache.saveCache(cachePaths, primaryKey);
yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
}
catch (error) {
if (error.name === cache.ValidationError.name) {
@ -38574,6 +38584,7 @@ var Inputs;
Inputs["Key"] = "key";
Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs;
(function (Outputs) {

2
package-lock.json generated
View file

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "2.1.1",
"version": "2.1.2",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View file

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "2.1.1",
"version": "2.1.2",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",

View file

@ -1,7 +1,8 @@
export enum Inputs {
Key = "key",
Path = "path",
RestoreKeys = "restore-keys"
RestoreKeys = "restore-keys",
UploadChunkSize = "upload-chunk-size"
}
export enum Outputs {

View file

@ -41,7 +41,9 @@ async function run(): Promise<void> {
});
try {
await cache.saveCache(cachePaths, primaryKey);
await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
} catch (error) {
if (error.name === cache.ValidationError.name) {
throw error;

View file

@ -63,3 +63,14 @@ export function getInputAsArray(
.map(s => s.trim())
.filter(x => x !== "");
}
export function getInputAsInt(
name: string,
options?: core.InputOptions
): number | undefined {
const value = parseInt(core.getInput(name, options));
if (isNaN(value) || value < 0) {
return undefined;
}
return value;
}

View file

@ -26,4 +26,5 @@ export function clearInputs(): void {
delete process.env[getInputName(Inputs.Path)];
delete process.env[getInputName(Inputs.Key)];
delete process.env[getInputName(Inputs.RestoreKeys)];
delete process.env[getInputName(Inputs.UploadChunkSize)];
}