mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-16 23:51:16 +01:00
Merge branch 'main' into Phantsure-patch-1
This commit is contained in:
commit
7d403ca5c3
34 changed files with 128699 additions and 6096 deletions
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"name": "Node.js & TypeScript",
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
|
||||
// Features to add to the dev container. More info: https://containers.dev/implementors/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "npm install"
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
|
@ -2,12 +2,19 @@
|
|||
|
||||
This action allows caching dependencies and build outputs to improve workflow execution time.
|
||||
|
||||
In addition to this `cache` action, other two actions are also available
|
||||
|
||||
[Restore action](./restore/README.md)
|
||||
|
||||
[Save action](./save/README.md)
|
||||
|
||||
[![Tests](https://github.com/actions/cache/actions/workflows/workflow.yml/badge.svg)](https://github.com/actions/cache/actions/workflows/workflow.yml)
|
||||
|
||||
## Documentation
|
||||
|
||||
See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows).
|
||||
|
||||
|
||||
## What's New
|
||||
### v3
|
||||
* Added support for caching from GHES 3.5.
|
||||
|
@ -20,6 +27,7 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
|||
* Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads.
|
||||
* Fix zstd not working for windows on gnu tar in issues.
|
||||
* Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
* Two new actions available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml)
|
||||
|
||||
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
|
||||
|
||||
|
|
15
RELEASES.md
15
RELEASES.md
|
@ -40,3 +40,18 @@
|
|||
### 3.0.11
|
||||
- Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0`
|
||||
- Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0`
|
||||
|
||||
### 3.1.0-beta.1
|
||||
- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
|
||||
### 3.1.0-beta.2
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
- Bug fixes for bsdtar fallback if gnutar not available and gzip fallback if cache saved using old cache action on windows.
|
||||
|
||||
### 3.2.0-beta.1
|
||||
- Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache.
|
||||
|
||||
### 3.2.0
|
||||
- Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache
|
|
@ -1,7 +1,7 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Outputs, RefKey, State } from "../src/constants";
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
|
@ -79,83 +79,6 @@ test("isExactKeyMatch with same key and different casing returns true", () => {
|
|||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
|
||||
});
|
||||
|
||||
test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheKey = undefined;
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("setOutputAndState with exact match to set cache-hit output and state", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheKey = "linux-rust";
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("getCacheState with no state returns undefined", () => {
|
||||
const getStateMock = jest.spyOn(core, "getState");
|
||||
getStateMock.mockImplementation(() => {
|
||||
return "";
|
||||
});
|
||||
|
||||
const state = actionUtils.getCacheState();
|
||||
|
||||
expect(state).toBe(undefined);
|
||||
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("getCacheState with valid state", () => {
|
||||
const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const getStateMock = jest.spyOn(core, "getState");
|
||||
getStateMock.mockImplementation(() => {
|
||||
return cacheKey;
|
||||
});
|
||||
|
||||
const state = actionUtils.getCacheState();
|
||||
|
||||
expect(state).toEqual(cacheKey);
|
||||
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("logWarning logs a message with a warning prefix", () => {
|
||||
const message = "A warning occurred.";
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restore";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
@ -45,158 +45,6 @@ afterEach(() => {
|
|||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("restore with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
});
|
||||
|
||||
test("restore on GHES without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
});
|
||||
|
||||
test("restore on GHES with AC available ", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with too many keys should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with large key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "foo".repeat(512); // Over the 512 character limit
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with invalid key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "comma,comma";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
|
@ -220,6 +68,8 @@ test("restore with no cache found", async () => {
|
|||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(stateMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
|
@ -252,6 +102,8 @@ test("restore with restore keys and no cache found", async () => {
|
|||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(stateMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
|
@ -270,7 +122,7 @@ test("restore with cache found for key", async () => {
|
|||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
|
@ -283,8 +135,11 @@ test("restore with cache found for key", async () => {
|
|||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
||||
expect(stateMock).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
@ -303,7 +158,7 @@ test("restore with cache found for restore key", async () => {
|
|||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
|
@ -316,9 +171,11 @@ test("restore with cache found for restore key", async () => {
|
|||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", restoreKey);
|
||||
expect(stateMock).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
|
|
326
__tests__/restoreImpl.test.ts
Normal file
326
__tests__/restoreImpl.test.ts
Normal file
|
@ -0,0 +1,326 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isExactKeyMatch(key, cacheResult);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => true
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("restore with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run(new StateProvider());
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||
});
|
||||
|
||||
test("restore on GHES without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||
});
|
||||
|
||||
test("restore on GHES with AC available ", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with too many keys should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with large key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "foo".repeat(512); // Over the 512 character limit
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with invalid key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "comma,comma";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with restore keys and no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with cache found for key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with cache found for restore key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
177
__tests__/restoreOnly.test.ts
Normal file
177
__tests__/restoreOnly.test.ts
Normal file
|
@ -0,0 +1,177 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreOnly";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isExactKeyMatch(key, cacheResult);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => true
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const outputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||
expect(outputMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with restore keys and no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const outputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with cache found for key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const outputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-matched-key", key);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with cache found for restore key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const outputMock = jest.spyOn(core, "setOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-hit", "false");
|
||||
expect(outputMock).toHaveBeenCalledWith("cache-matched-key", restoreKey);
|
||||
|
||||
expect(outputMock).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
|
@ -15,8 +15,8 @@ beforeAll(() => {
|
|||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
|
||||
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
||||
jest.spyOn(core, "getState").mockImplementation(name => {
|
||||
return jest.requireActual("@actions/core").getState(name);
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
|
@ -65,294 +65,6 @@ afterEach(() => {
|
|||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("save with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with no primary key in state outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Error retrieving key from state.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save on ghes without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save on GHES with AC available", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with exact match returns early", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with missing input outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with large cache outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with reserve cache failure outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
const actualCache = jest.requireActual("@actions/cache");
|
||||
const error = new actualCache.ReserveCacheError(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
throw error;
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with server error outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
|
@ -362,11 +74,11 @@ test("save with valid inputs uploads a cache", async () => {
|
|||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
return primaryKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
return savedCacheKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
|
|
386
__tests__/saveImpl.test.ts
Normal file
386
__tests__/saveImpl.test.ts
Normal file
|
@ -0,0 +1,386 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/saveImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsInt(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.isExactKeyMatch(key, cacheResult);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => true
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("save with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run(new StateProvider());
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with no primary key in state outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(`Key is not specified.`);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save on ghes without AC available should no-op", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => false
|
||||
);
|
||||
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save on GHES with AC available", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with exact match returns early", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with missing input outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with large cache outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with reserve cache failure outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
const actualCache = jest.requireActual("@actions/cache");
|
||||
const error = new actualCache.ReserveCacheError(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
throw error;
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with server error outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
expect.anything()
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
121
__tests__/saveOnly.test.ts
Normal file
121
__tests__/saveOnly.test.ts
Normal file
|
@ -0,0 +1,121 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/saveOnly";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
|
||||
jest.spyOn(core, "setOutput").mockImplementation((key, value) => {
|
||||
return jest.requireActual("@actions/core").getInput(key, value);
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsInt(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.isExactKeyMatch(key, cacheResult);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => true
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Key, primaryKey);
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save failing logs the warning message", async () => {
|
||||
const warningMock = jest.spyOn(core, "warning");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Key, primaryKey);
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = -1;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(warningMock).toHaveBeenCalledTimes(1);
|
||||
expect(warningMock).toHaveBeenCalledWith("Cache save failed.");
|
||||
});
|
89
__tests__/stateProvider.test.ts
Normal file
89
__tests__/stateProvider.test.ts
Normal file
|
@ -0,0 +1,89 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey, State } from "../src/constants";
|
||||
import {
|
||||
IStateProvider,
|
||||
NullStateProvider,
|
||||
StateProvider
|
||||
} from "../src/stateProvider";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
|
||||
jest.spyOn(core, "setOutput").mockImplementation((key, value) => {
|
||||
return jest.requireActual("@actions/core").setOutput(key, value);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("StateProvider saves states", async () => {
|
||||
const states = new Map<string, string>();
|
||||
const getStateMock = jest
|
||||
.spyOn(core, "getState")
|
||||
.mockImplementation(key => states.get(key) || "");
|
||||
|
||||
const saveStateMock = jest
|
||||
.spyOn(core, "saveState")
|
||||
.mockImplementation((key, value) => {
|
||||
states.set(key, value);
|
||||
});
|
||||
|
||||
const setOutputMock = jest
|
||||
.spyOn(core, "setOutput")
|
||||
.mockImplementation((key, value) => {
|
||||
return jest.requireActual("@actions/core").setOutput(key, value);
|
||||
});
|
||||
|
||||
const cacheMatchedKey = "node-cache";
|
||||
|
||||
const stateProvider: IStateProvider = new StateProvider();
|
||||
stateProvider.setState("stateKey", "stateValue");
|
||||
stateProvider.setState(State.CacheMatchedKey, cacheMatchedKey);
|
||||
const stateValue = stateProvider.getState("stateKey");
|
||||
const cacheStateValue = stateProvider.getCacheState();
|
||||
|
||||
expect(stateValue).toBe("stateValue");
|
||||
expect(cacheStateValue).toBe(cacheMatchedKey);
|
||||
expect(getStateMock).toHaveBeenCalledTimes(2);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(2);
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("NullStateProvider saves outputs", async () => {
|
||||
const getStateMock = jest
|
||||
.spyOn(core, "getState")
|
||||
.mockImplementation(name =>
|
||||
jest.requireActual("@actions/core").getState(name)
|
||||
);
|
||||
|
||||
const setOutputMock = jest
|
||||
.spyOn(core, "setOutput")
|
||||
.mockImplementation((key, value) => {
|
||||
return jest.requireActual("@actions/core").setOutput(key, value);
|
||||
});
|
||||
|
||||
const saveStateMock = jest
|
||||
.spyOn(core, "saveState")
|
||||
.mockImplementation((key, value) => {
|
||||
return jest.requireActual("@actions/core").saveState(key, value);
|
||||
});
|
||||
|
||||
const cacheMatchedKey = "node-cache";
|
||||
const nullStateProvider: IStateProvider = new NullStateProvider();
|
||||
nullStateProvider.setState(State.CacheMatchedKey, "outputValue");
|
||||
nullStateProvider.setState(State.CachePrimaryKey, cacheMatchedKey);
|
||||
nullStateProvider.getState("outputKey");
|
||||
nullStateProvider.getCacheState();
|
||||
|
||||
expect(getStateMock).toHaveBeenCalledTimes(0);
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(2);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(0);
|
||||
});
|
|
@ -21,7 +21,7 @@ runs:
|
|||
using: 'node16'
|
||||
main: 'dist/restore/index.js'
|
||||
post: 'dist/save/index.js'
|
||||
post-if: 'success()'
|
||||
post-if: success()
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
||||
|
|
61045
dist/restore-only/index.js
vendored
Normal file
61045
dist/restore-only/index.js
vendored
Normal file
File diff suppressed because one or more lines are too long
728
dist/restore/index.js
vendored
728
dist/restore/index.js
vendored
|
@ -3046,19 +3046,18 @@ exports.default = _default;
|
|||
/***/ }),
|
||||
/* 105 */,
|
||||
/* 106 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var tslib = __webpack_require__(640);
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
var listenersMap = new WeakMap();
|
||||
var abortedMap = new WeakMap();
|
||||
/// <reference path="../shims-public.d.ts" />
|
||||
const listenersMap = new WeakMap();
|
||||
const abortedMap = new WeakMap();
|
||||
/**
|
||||
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
||||
*
|
||||
|
@ -3072,8 +3071,8 @@ var abortedMap = new WeakMap();
|
|||
* await doAsyncWork(AbortSignal.none);
|
||||
* ```
|
||||
*/
|
||||
var AbortSignal = /** @class */ (function () {
|
||||
function AbortSignal() {
|
||||
class AbortSignal {
|
||||
constructor() {
|
||||
/**
|
||||
* onabort event listener.
|
||||
*/
|
||||
|
@ -3081,74 +3080,65 @@ var AbortSignal = /** @class */ (function () {
|
|||
listenersMap.set(this, []);
|
||||
abortedMap.set(this, false);
|
||||
}
|
||||
Object.defineProperty(AbortSignal.prototype, "aborted", {
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(AbortSignal, "none", {
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
return new AbortSignal();
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get aborted() {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
static get none() {
|
||||
return new AbortSignal();
|
||||
}
|
||||
/**
|
||||
* Added new "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be added
|
||||
*/
|
||||
AbortSignal.prototype.addEventListener = function (
|
||||
addEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
var listeners = listenersMap.get(this);
|
||||
const listeners = listenersMap.get(this);
|
||||
listeners.push(listener);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Remove "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be removed
|
||||
*/
|
||||
AbortSignal.prototype.removeEventListener = function (
|
||||
removeEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
var listeners = listenersMap.get(this);
|
||||
var index = listeners.indexOf(listener);
|
||||
const listeners = listenersMap.get(this);
|
||||
const index = listeners.indexOf(listener);
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Dispatches a synthetic event to the AbortSignal.
|
||||
*/
|
||||
AbortSignal.prototype.dispatchEvent = function (_event) {
|
||||
dispatchEvent(_event) {
|
||||
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
||||
};
|
||||
return AbortSignal;
|
||||
}());
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
||||
* Will try to trigger abort event for all linked AbortSignal nodes.
|
||||
|
@ -3166,12 +3156,12 @@ function abortSignal(signal) {
|
|||
if (signal.onabort) {
|
||||
signal.onabort.call(signal);
|
||||
}
|
||||
var listeners = listenersMap.get(signal);
|
||||
const listeners = listenersMap.get(signal);
|
||||
if (listeners) {
|
||||
// Create a copy of listeners so mutations to the array
|
||||
// (e.g. via removeListener calls) don't affect the listeners
|
||||
// we invoke.
|
||||
listeners.slice().forEach(function (listener) {
|
||||
listeners.slice().forEach((listener) => {
|
||||
listener.call(signal, { type: "abort" });
|
||||
});
|
||||
}
|
||||
|
@ -3197,15 +3187,12 @@ function abortSignal(signal) {
|
|||
* }
|
||||
* ```
|
||||
*/
|
||||
var AbortError = /** @class */ (function (_super) {
|
||||
tslib.__extends(AbortError, _super);
|
||||
function AbortError(message) {
|
||||
var _this = _super.call(this, message) || this;
|
||||
_this.name = "AbortError";
|
||||
return _this;
|
||||
class AbortError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "AbortError";
|
||||
}
|
||||
return AbortError;
|
||||
}(Error));
|
||||
}
|
||||
/**
|
||||
* An AbortController provides an AbortSignal and the associated controls to signal
|
||||
* that an asynchronous operation should be aborted.
|
||||
|
@ -3240,10 +3227,9 @@ var AbortError = /** @class */ (function (_super) {
|
|||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* ```
|
||||
*/
|
||||
var AbortController = /** @class */ (function () {
|
||||
class AbortController {
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function AbortController(parentSignals) {
|
||||
var _this = this;
|
||||
constructor(parentSignals) {
|
||||
this._signal = new AbortSignal();
|
||||
if (!parentSignals) {
|
||||
return;
|
||||
|
@ -3253,8 +3239,7 @@ var AbortController = /** @class */ (function () {
|
|||
// eslint-disable-next-line prefer-rest-params
|
||||
parentSignals = arguments;
|
||||
}
|
||||
for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {
|
||||
var parentSignal = parentSignals_1[_i];
|
||||
for (const parentSignal of parentSignals) {
|
||||
// if the parent signal has already had abort() called,
|
||||
// then call abort on this signal as well.
|
||||
if (parentSignal.aborted) {
|
||||
|
@ -3262,47 +3247,42 @@ var AbortController = /** @class */ (function () {
|
|||
}
|
||||
else {
|
||||
// when the parent signal aborts, this signal should as well.
|
||||
parentSignal.addEventListener("abort", function () {
|
||||
_this.abort();
|
||||
parentSignal.addEventListener("abort", () => {
|
||||
this.abort();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(AbortController.prototype, "signal", {
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
return this._signal;
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get signal() {
|
||||
return this._signal;
|
||||
}
|
||||
/**
|
||||
* Signal that any operations passed this controller's associated abort signal
|
||||
* to cancel any remaining work and throw an `AbortError`.
|
||||
*/
|
||||
AbortController.prototype.abort = function () {
|
||||
abort() {
|
||||
abortSignal(this._signal);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will abort after the provided ms.
|
||||
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
||||
*/
|
||||
AbortController.timeout = function (ms) {
|
||||
var signal = new AbortSignal();
|
||||
var timer = setTimeout(abortSignal, ms, signal);
|
||||
static timeout(ms) {
|
||||
const signal = new AbortSignal();
|
||||
const timer = setTimeout(abortSignal, ms, signal);
|
||||
// Prevent the active Timer from keeping the Node.js event loop active.
|
||||
if (typeof timer.unref === "function") {
|
||||
timer.unref();
|
||||
}
|
||||
return signal;
|
||||
};
|
||||
return AbortController;
|
||||
}());
|
||||
}
|
||||
}
|
||||
|
||||
exports.AbortController = AbortController;
|
||||
exports.AbortError = AbortError;
|
||||
|
@ -3402,7 +3382,7 @@ const http_client_1 = __webpack_require__(425);
|
|||
const auth_1 = __webpack_require__(554);
|
||||
const crypto = __importStar(__webpack_require__(417));
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const url_1 = __webpack_require__(835);
|
||||
const url_1 = __webpack_require__(414);
|
||||
const utils = __importStar(__webpack_require__(15));
|
||||
const constants_1 = __webpack_require__(931);
|
||||
const downloadUtils_1 = __webpack_require__(251);
|
||||
|
@ -4970,11 +4950,13 @@ var Inputs;
|
|||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
Outputs["CacheHit"] = "cache-hit";
|
||||
Outputs["CachePrimaryKey"] = "cache-primary-key";
|
||||
Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action
|
||||
})(Outputs = exports.Outputs || (exports.Outputs = {}));
|
||||
var State;
|
||||
(function (State) {
|
||||
|
@ -9344,7 +9326,80 @@ function expand(str, isTop) {
|
|||
/***/ }),
|
||||
/* 307 */,
|
||||
/* 308 */,
|
||||
/* 309 */,
|
||||
/* 309 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NullStateProvider = exports.StateProvider = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
class StateProviderBase {
|
||||
constructor() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
|
||||
this.setState = (key, value) => { };
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
this.getState = (key) => "";
|
||||
}
|
||||
getCacheState() {
|
||||
const cacheKey = this.getState(constants_1.State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
class StateProvider extends StateProviderBase {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.setState = core.saveState;
|
||||
this.getState = core.getState;
|
||||
}
|
||||
}
|
||||
exports.StateProvider = StateProvider;
|
||||
class NullStateProvider extends StateProviderBase {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.stateToOutputMap = new Map([
|
||||
[constants_1.State.CacheMatchedKey, constants_1.Outputs.CacheMatchedKey],
|
||||
[constants_1.State.CachePrimaryKey, constants_1.Outputs.CachePrimaryKey]
|
||||
]);
|
||||
this.setState = (key, value) => {
|
||||
core.setOutput(this.stateToOutputMap.get(key), value);
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
this.getState = (key) => "";
|
||||
}
|
||||
}
|
||||
exports.NullStateProvider = NullStateProvider;
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 310 */,
|
||||
/* 311 */,
|
||||
/* 312 */
|
||||
|
@ -35018,7 +35073,7 @@ exports.Path = Path;
|
|||
*/
|
||||
|
||||
const punycode = __webpack_require__(815);
|
||||
const urlParse = __webpack_require__(835).parse;
|
||||
const urlParse = __webpack_require__(414).parse;
|
||||
const util = __webpack_require__(669);
|
||||
const pubsuffix = __webpack_require__(562);
|
||||
const Store = __webpack_require__(338).Store;
|
||||
|
@ -36813,7 +36868,12 @@ module.exports = __webpack_require__(141);
|
|||
|
||||
|
||||
/***/ }),
|
||||
/* 414 */,
|
||||
/* 414 */
|
||||
/***/ (function(module) {
|
||||
|
||||
module.exports = require("url");
|
||||
|
||||
/***/ }),
|
||||
/* 415 */,
|
||||
/* 416 */,
|
||||
/* 417 */
|
||||
|
@ -38422,7 +38482,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
|
@ -38438,29 +38498,6 @@ function isExactKeyMatch(key, cacheKey) {
|
|||
}) === 0);
|
||||
}
|
||||
exports.isExactKeyMatch = isExactKeyMatch;
|
||||
function setCacheState(state) {
|
||||
core.saveState(constants_1.State.CacheMatchedKey, state);
|
||||
}
|
||||
exports.setCacheState = setCacheState;
|
||||
function setCacheHitOutput(isCacheHit) {
|
||||
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
|
||||
}
|
||||
exports.setCacheHitOutput = setCacheHitOutput;
|
||||
function setOutputAndState(key, cacheKey) {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
|
||||
// Store the matched cache key if it exists
|
||||
cacheKey && setCacheState(cacheKey);
|
||||
}
|
||||
exports.setOutputAndState = setOutputAndState;
|
||||
function getCacheState() {
|
||||
const cacheKey = core.getState(constants_1.State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
exports.getCacheState = getCacheState;
|
||||
function logWarning(message) {
|
||||
const warningPrefix = "[warning]";
|
||||
core.info(`${warningPrefix}${message}`);
|
||||
|
@ -38612,7 +38649,7 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau
|
|||
|
||||
var Stream = _interopDefault(__webpack_require__(794));
|
||||
var http = _interopDefault(__webpack_require__(605));
|
||||
var Url = _interopDefault(__webpack_require__(835));
|
||||
var Url = _interopDefault(__webpack_require__(414));
|
||||
var whatwgUrl = _interopDefault(__webpack_require__(70));
|
||||
var https = _interopDefault(__webpack_require__(211));
|
||||
var zlib = _interopDefault(__webpack_require__(761));
|
||||
|
@ -44126,318 +44163,7 @@ exports.default = _default;
|
|||
|
||||
|
||||
/***/ }),
|
||||
/* 640 */
|
||||
/***/ (function(module) {
|
||||
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
***************************************************************************** */
|
||||
/* global global, define, System, Reflect, Promise */
|
||||
var __extends;
|
||||
var __assign;
|
||||
var __rest;
|
||||
var __decorate;
|
||||
var __param;
|
||||
var __metadata;
|
||||
var __awaiter;
|
||||
var __generator;
|
||||
var __exportStar;
|
||||
var __values;
|
||||
var __read;
|
||||
var __spread;
|
||||
var __spreadArrays;
|
||||
var __spreadArray;
|
||||
var __await;
|
||||
var __asyncGenerator;
|
||||
var __asyncDelegator;
|
||||
var __asyncValues;
|
||||
var __makeTemplateObject;
|
||||
var __importStar;
|
||||
var __importDefault;
|
||||
var __classPrivateFieldGet;
|
||||
var __classPrivateFieldSet;
|
||||
var __createBinding;
|
||||
(function (factory) {
|
||||
var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
|
||||
if (typeof define === "function" && define.amd) {
|
||||
define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
|
||||
}
|
||||
else if ( true && typeof module.exports === "object") {
|
||||
factory(createExporter(root, createExporter(module.exports)));
|
||||
}
|
||||
else {
|
||||
factory(createExporter(root));
|
||||
}
|
||||
function createExporter(exports, previous) {
|
||||
if (exports !== root) {
|
||||
if (typeof Object.create === "function") {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
}
|
||||
else {
|
||||
exports.__esModule = true;
|
||||
}
|
||||
}
|
||||
return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
|
||||
}
|
||||
})
|
||||
(function (exporter) {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||
|
||||
__extends = function (d, b) {
|
||||
if (typeof b !== "function" && b !== null)
|
||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
|
||||
__assign = Object.assign || function (t) {
|
||||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||
s = arguments[i];
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
__rest = function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
__decorate = function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
|
||||
__param = function (paramIndex, decorator) {
|
||||
return function (target, key) { decorator(target, key, paramIndex); }
|
||||
};
|
||||
|
||||
__metadata = function (metadataKey, metadataValue) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
|
||||
};
|
||||
|
||||
__awaiter = function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
|
||||
__generator = function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
|
||||
__exportStar = function(m, o) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
|
||||
};
|
||||
|
||||
__createBinding = Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
});
|
||||
|
||||
__values = function (o) {
|
||||
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
|
||||
if (m) return m.call(o);
|
||||
if (o && typeof o.length === "number") return {
|
||||
next: function () {
|
||||
if (o && i >= o.length) o = void 0;
|
||||
return { value: o && o[i++], done: !o };
|
||||
}
|
||||
};
|
||||
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
|
||||
};
|
||||
|
||||
__read = function (o, n) {
|
||||
var m = typeof Symbol === "function" && o[Symbol.iterator];
|
||||
if (!m) return o;
|
||||
var i = m.call(o), r, ar = [], e;
|
||||
try {
|
||||
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
|
||||
}
|
||||
catch (error) { e = { error: error }; }
|
||||
finally {
|
||||
try {
|
||||
if (r && !r.done && (m = i["return"])) m.call(i);
|
||||
}
|
||||
finally { if (e) throw e.error; }
|
||||
}
|
||||
return ar;
|
||||
};
|
||||
|
||||
/** @deprecated */
|
||||
__spread = function () {
|
||||
for (var ar = [], i = 0; i < arguments.length; i++)
|
||||
ar = ar.concat(__read(arguments[i]));
|
||||
return ar;
|
||||
};
|
||||
|
||||
/** @deprecated */
|
||||
__spreadArrays = function () {
|
||||
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
|
||||
for (var r = Array(s), k = 0, i = 0; i < il; i++)
|
||||
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
|
||||
r[k] = a[j];
|
||||
return r;
|
||||
};
|
||||
|
||||
__spreadArray = function (to, from, pack) {
|
||||
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
||||
if (ar || !(i in from)) {
|
||||
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
||||
ar[i] = from[i];
|
||||
}
|
||||
}
|
||||
return to.concat(ar || Array.prototype.slice.call(from));
|
||||
};
|
||||
|
||||
__await = function (v) {
|
||||
return this instanceof __await ? (this.v = v, this) : new __await(v);
|
||||
};
|
||||
|
||||
__asyncGenerator = function (thisArg, _arguments, generator) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||||
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||||
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||||
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||||
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||||
function fulfill(value) { resume("next", value); }
|
||||
function reject(value) { resume("throw", value); }
|
||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||
};
|
||||
|
||||
__asyncDelegator = function (o) {
|
||||
var i, p;
|
||||
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
|
||||
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
|
||||
};
|
||||
|
||||
__asyncValues = function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
|
||||
__makeTemplateObject = function (cooked, raw) {
|
||||
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
|
||||
return cooked;
|
||||
};
|
||||
|
||||
var __setModuleDefault = Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
};
|
||||
|
||||
__importStar = function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
|
||||
__importDefault = function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
|
||||
__classPrivateFieldGet = function (receiver, state, kind, f) {
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
};
|
||||
|
||||
__classPrivateFieldSet = function (receiver, state, value, kind, f) {
|
||||
if (kind === "m") throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||||
};
|
||||
|
||||
exporter("__extends", __extends);
|
||||
exporter("__assign", __assign);
|
||||
exporter("__rest", __rest);
|
||||
exporter("__decorate", __decorate);
|
||||
exporter("__param", __param);
|
||||
exporter("__metadata", __metadata);
|
||||
exporter("__awaiter", __awaiter);
|
||||
exporter("__generator", __generator);
|
||||
exporter("__exportStar", __exportStar);
|
||||
exporter("__createBinding", __createBinding);
|
||||
exporter("__values", __values);
|
||||
exporter("__read", __read);
|
||||
exporter("__spread", __spread);
|
||||
exporter("__spreadArrays", __spreadArrays);
|
||||
exporter("__spreadArray", __spreadArray);
|
||||
exporter("__await", __await);
|
||||
exporter("__asyncGenerator", __asyncGenerator);
|
||||
exporter("__asyncDelegator", __asyncDelegator);
|
||||
exporter("__asyncValues", __asyncValues);
|
||||
exporter("__makeTemplateObject", __makeTemplateObject);
|
||||
exporter("__importStar", __importStar);
|
||||
exporter("__importDefault", __importDefault);
|
||||
exporter("__classPrivateFieldGet", __classPrivateFieldGet);
|
||||
exporter("__classPrivateFieldSet", __classPrivateFieldSet);
|
||||
});
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 640 */,
|
||||
/* 641 */,
|
||||
/* 642 */,
|
||||
/* 643 */,
|
||||
|
@ -48951,29 +48677,6 @@ module.exports = function(dst, src) {
|
|||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
@ -48983,46 +48686,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
utils.setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
// Validate inputs, this can cause task failure
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
|
||||
return;
|
||||
}
|
||||
const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true });
|
||||
core.saveState(constants_1.State.CachePrimaryKey, primaryKey);
|
||||
const restoreKeys = utils.getInputAsArray(constants_1.Inputs.RestoreKeys);
|
||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||
if (!cacheKey) {
|
||||
core.info(`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`);
|
||||
return;
|
||||
}
|
||||
// Store the matched cache key
|
||||
utils.setCacheState(cacheKey);
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
|
@ -49468,7 +49140,7 @@ var util = __webpack_require__(669);
|
|||
var path = __webpack_require__(622);
|
||||
var http = __webpack_require__(605);
|
||||
var https = __webpack_require__(211);
|
||||
var parseUrl = __webpack_require__(835).parse;
|
||||
var parseUrl = __webpack_require__(414).parse;
|
||||
var fs = __webpack_require__(747);
|
||||
var Stream = __webpack_require__(794).Stream;
|
||||
var mime = __webpack_require__(779);
|
||||
|
@ -50634,9 +50306,87 @@ exports.VERSION = '1.0.4';
|
|||
/* 833 */,
|
||||
/* 834 */,
|
||||
/* 835 */
|
||||
/***/ (function(module) {
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
core.setOutput(constants_1.Outputs.CacheHit, "false");
|
||||
return;
|
||||
}
|
||||
// Validate inputs, this can cause task failure
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
|
||||
return;
|
||||
}
|
||||
const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true });
|
||||
stateProvider.setState(constants_1.State.CachePrimaryKey, primaryKey);
|
||||
const restoreKeys = utils.getInputAsArray(constants_1.Inputs.RestoreKeys);
|
||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||
if (!cacheKey) {
|
||||
core.info(`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`);
|
||||
return;
|
||||
}
|
||||
// Store the matched cache key in states
|
||||
stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey);
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey);
|
||||
core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString());
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.default = restoreImpl;
|
||||
|
||||
module.exports = require("url");
|
||||
|
||||
/***/ }),
|
||||
/* 836 */,
|
||||
|
@ -55771,7 +55521,7 @@ var stream = __webpack_require__(794);
|
|||
var FormData = __webpack_require__(790);
|
||||
var node_fetch = __webpack_require__(454);
|
||||
var coreTracing = __webpack_require__(263);
|
||||
var url = __webpack_require__(835);
|
||||
var url = __webpack_require__(414);
|
||||
__webpack_require__(97);
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
|
61080
dist/save-only/index.js
vendored
Normal file
61080
dist/save-only/index.js
vendored
Normal file
File diff suppressed because one or more lines are too long
725
dist/save/index.js
vendored
725
dist/save/index.js
vendored
|
@ -3046,19 +3046,18 @@ exports.default = _default;
|
|||
/***/ }),
|
||||
/* 105 */,
|
||||
/* 106 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var tslib = __webpack_require__(640);
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
// Licensed under the MIT license.
|
||||
var listenersMap = new WeakMap();
|
||||
var abortedMap = new WeakMap();
|
||||
/// <reference path="../shims-public.d.ts" />
|
||||
const listenersMap = new WeakMap();
|
||||
const abortedMap = new WeakMap();
|
||||
/**
|
||||
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
||||
*
|
||||
|
@ -3072,8 +3071,8 @@ var abortedMap = new WeakMap();
|
|||
* await doAsyncWork(AbortSignal.none);
|
||||
* ```
|
||||
*/
|
||||
var AbortSignal = /** @class */ (function () {
|
||||
function AbortSignal() {
|
||||
class AbortSignal {
|
||||
constructor() {
|
||||
/**
|
||||
* onabort event listener.
|
||||
*/
|
||||
|
@ -3081,74 +3080,65 @@ var AbortSignal = /** @class */ (function () {
|
|||
listenersMap.set(this, []);
|
||||
abortedMap.set(this, false);
|
||||
}
|
||||
Object.defineProperty(AbortSignal.prototype, "aborted", {
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
Object.defineProperty(AbortSignal, "none", {
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
return new AbortSignal();
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
/**
|
||||
* Status of whether aborted or not.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get aborted() {
|
||||
if (!abortedMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
return abortedMap.get(this);
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will never be aborted.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
static get none() {
|
||||
return new AbortSignal();
|
||||
}
|
||||
/**
|
||||
* Added new "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be added
|
||||
*/
|
||||
AbortSignal.prototype.addEventListener = function (
|
||||
addEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
var listeners = listenersMap.get(this);
|
||||
const listeners = listenersMap.get(this);
|
||||
listeners.push(listener);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Remove "abort" event listener, only support "abort" event.
|
||||
*
|
||||
* @param _type - Only support "abort" event
|
||||
* @param listener - The listener to be removed
|
||||
*/
|
||||
AbortSignal.prototype.removeEventListener = function (
|
||||
removeEventListener(
|
||||
// tslint:disable-next-line:variable-name
|
||||
_type, listener) {
|
||||
if (!listenersMap.has(this)) {
|
||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||
}
|
||||
var listeners = listenersMap.get(this);
|
||||
var index = listeners.indexOf(listener);
|
||||
const listeners = listenersMap.get(this);
|
||||
const index = listeners.indexOf(listener);
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Dispatches a synthetic event to the AbortSignal.
|
||||
*/
|
||||
AbortSignal.prototype.dispatchEvent = function (_event) {
|
||||
dispatchEvent(_event) {
|
||||
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
||||
};
|
||||
return AbortSignal;
|
||||
}());
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
||||
* Will try to trigger abort event for all linked AbortSignal nodes.
|
||||
|
@ -3166,12 +3156,12 @@ function abortSignal(signal) {
|
|||
if (signal.onabort) {
|
||||
signal.onabort.call(signal);
|
||||
}
|
||||
var listeners = listenersMap.get(signal);
|
||||
const listeners = listenersMap.get(signal);
|
||||
if (listeners) {
|
||||
// Create a copy of listeners so mutations to the array
|
||||
// (e.g. via removeListener calls) don't affect the listeners
|
||||
// we invoke.
|
||||
listeners.slice().forEach(function (listener) {
|
||||
listeners.slice().forEach((listener) => {
|
||||
listener.call(signal, { type: "abort" });
|
||||
});
|
||||
}
|
||||
|
@ -3197,15 +3187,12 @@ function abortSignal(signal) {
|
|||
* }
|
||||
* ```
|
||||
*/
|
||||
var AbortError = /** @class */ (function (_super) {
|
||||
tslib.__extends(AbortError, _super);
|
||||
function AbortError(message) {
|
||||
var _this = _super.call(this, message) || this;
|
||||
_this.name = "AbortError";
|
||||
return _this;
|
||||
class AbortError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = "AbortError";
|
||||
}
|
||||
return AbortError;
|
||||
}(Error));
|
||||
}
|
||||
/**
|
||||
* An AbortController provides an AbortSignal and the associated controls to signal
|
||||
* that an asynchronous operation should be aborted.
|
||||
|
@ -3240,10 +3227,9 @@ var AbortError = /** @class */ (function (_super) {
|
|||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||
* ```
|
||||
*/
|
||||
var AbortController = /** @class */ (function () {
|
||||
class AbortController {
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function AbortController(parentSignals) {
|
||||
var _this = this;
|
||||
constructor(parentSignals) {
|
||||
this._signal = new AbortSignal();
|
||||
if (!parentSignals) {
|
||||
return;
|
||||
|
@ -3253,8 +3239,7 @@ var AbortController = /** @class */ (function () {
|
|||
// eslint-disable-next-line prefer-rest-params
|
||||
parentSignals = arguments;
|
||||
}
|
||||
for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {
|
||||
var parentSignal = parentSignals_1[_i];
|
||||
for (const parentSignal of parentSignals) {
|
||||
// if the parent signal has already had abort() called,
|
||||
// then call abort on this signal as well.
|
||||
if (parentSignal.aborted) {
|
||||
|
@ -3262,47 +3247,42 @@ var AbortController = /** @class */ (function () {
|
|||
}
|
||||
else {
|
||||
// when the parent signal aborts, this signal should as well.
|
||||
parentSignal.addEventListener("abort", function () {
|
||||
_this.abort();
|
||||
parentSignal.addEventListener("abort", () => {
|
||||
this.abort();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Object.defineProperty(AbortController.prototype, "signal", {
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get: function () {
|
||||
return this._signal;
|
||||
},
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
});
|
||||
/**
|
||||
* The AbortSignal associated with this controller that will signal aborted
|
||||
* when the abort method is called on this controller.
|
||||
*
|
||||
* @readonly
|
||||
*/
|
||||
get signal() {
|
||||
return this._signal;
|
||||
}
|
||||
/**
|
||||
* Signal that any operations passed this controller's associated abort signal
|
||||
* to cancel any remaining work and throw an `AbortError`.
|
||||
*/
|
||||
AbortController.prototype.abort = function () {
|
||||
abort() {
|
||||
abortSignal(this._signal);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates a new AbortSignal instance that will abort after the provided ms.
|
||||
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
||||
*/
|
||||
AbortController.timeout = function (ms) {
|
||||
var signal = new AbortSignal();
|
||||
var timer = setTimeout(abortSignal, ms, signal);
|
||||
static timeout(ms) {
|
||||
const signal = new AbortSignal();
|
||||
const timer = setTimeout(abortSignal, ms, signal);
|
||||
// Prevent the active Timer from keeping the Node.js event loop active.
|
||||
if (typeof timer.unref === "function") {
|
||||
timer.unref();
|
||||
}
|
||||
return signal;
|
||||
};
|
||||
return AbortController;
|
||||
}());
|
||||
}
|
||||
}
|
||||
|
||||
exports.AbortController = AbortController;
|
||||
exports.AbortError = AbortError;
|
||||
|
@ -4970,11 +4950,13 @@ var Inputs;
|
|||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size"; // Input for cache, save action
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
Outputs["CacheHit"] = "cache-hit";
|
||||
Outputs["CachePrimaryKey"] = "cache-primary-key";
|
||||
Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action
|
||||
})(Outputs = exports.Outputs || (exports.Outputs = {}));
|
||||
var State;
|
||||
(function (State) {
|
||||
|
@ -9344,7 +9326,80 @@ function expand(str, isTop) {
|
|||
/***/ }),
|
||||
/* 307 */,
|
||||
/* 308 */,
|
||||
/* 309 */,
|
||||
/* 309 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NullStateProvider = exports.StateProvider = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
class StateProviderBase {
|
||||
constructor() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
|
||||
this.setState = (key, value) => { };
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
this.getState = (key) => "";
|
||||
}
|
||||
getCacheState() {
|
||||
const cacheKey = this.getState(constants_1.State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
class StateProvider extends StateProviderBase {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.setState = core.saveState;
|
||||
this.getState = core.getState;
|
||||
}
|
||||
}
|
||||
exports.StateProvider = StateProvider;
|
||||
class NullStateProvider extends StateProviderBase {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this.stateToOutputMap = new Map([
|
||||
[constants_1.State.CacheMatchedKey, constants_1.Outputs.CacheMatchedKey],
|
||||
[constants_1.State.CachePrimaryKey, constants_1.Outputs.CachePrimaryKey]
|
||||
]);
|
||||
this.setState = (key, value) => {
|
||||
core.setOutput(this.stateToOutputMap.get(key), value);
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
this.getState = (key) => "";
|
||||
}
|
||||
}
|
||||
exports.NullStateProvider = NullStateProvider;
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 310 */,
|
||||
/* 311 */,
|
||||
/* 312 */
|
||||
|
@ -38422,7 +38477,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
|
@ -38438,29 +38493,6 @@ function isExactKeyMatch(key, cacheKey) {
|
|||
}) === 0);
|
||||
}
|
||||
exports.isExactKeyMatch = isExactKeyMatch;
|
||||
function setCacheState(state) {
|
||||
core.saveState(constants_1.State.CacheMatchedKey, state);
|
||||
}
|
||||
exports.setCacheState = setCacheState;
|
||||
function setCacheHitOutput(isCacheHit) {
|
||||
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
|
||||
}
|
||||
exports.setCacheHitOutput = setCacheHitOutput;
|
||||
function setOutputAndState(key, cacheKey) {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
|
||||
// Store the matched cache key if it exists
|
||||
cacheKey && setCacheState(cacheKey);
|
||||
}
|
||||
exports.setOutputAndState = setOutputAndState;
|
||||
function getCacheState() {
|
||||
const cacheKey = core.getState(constants_1.State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
exports.getCacheState = getCacheState;
|
||||
function logWarning(message) {
|
||||
const warningPrefix = "[warning]";
|
||||
core.info(`${warningPrefix}${message}`);
|
||||
|
@ -40892,7 +40924,98 @@ Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: functi
|
|||
//# sourceMappingURL=core.js.map
|
||||
|
||||
/***/ }),
|
||||
/* 471 */,
|
||||
/* 471 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
process.on("uncaughtException", e => utils.logWarning(e.message));
|
||||
function saveImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let cacheId = -1;
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
|
||||
return;
|
||||
}
|
||||
// If restore has stored a primary key in state, reuse that
|
||||
// Else re-evaluate from inputs
|
||||
const primaryKey = stateProvider.getState(constants_1.State.CachePrimaryKey) ||
|
||||
core.getInput(constants_1.Inputs.Key);
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Key is not specified.`);
|
||||
return;
|
||||
}
|
||||
// If matched restore key is same as primary key, then do not save cache
|
||||
// NO-OP in case of SaveOnly action
|
||||
const restoredKey = stateProvider.getCacheState();
|
||||
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
|
||||
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
||||
});
|
||||
if (cacheId != -1) {
|
||||
core.info(`Cache saved with key: ${primaryKey}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
exports.default = saveImpl;
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 472 */,
|
||||
/* 473 */,
|
||||
/* 474 */,
|
||||
|
@ -44126,318 +44249,7 @@ exports.default = _default;
|
|||
|
||||
|
||||
/***/ }),
|
||||
/* 640 */
|
||||
/***/ (function(module) {
|
||||
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
***************************************************************************** */
|
||||
/* global global, define, System, Reflect, Promise */
|
||||
var __extends;
|
||||
var __assign;
|
||||
var __rest;
|
||||
var __decorate;
|
||||
var __param;
|
||||
var __metadata;
|
||||
var __awaiter;
|
||||
var __generator;
|
||||
var __exportStar;
|
||||
var __values;
|
||||
var __read;
|
||||
var __spread;
|
||||
var __spreadArrays;
|
||||
var __spreadArray;
|
||||
var __await;
|
||||
var __asyncGenerator;
|
||||
var __asyncDelegator;
|
||||
var __asyncValues;
|
||||
var __makeTemplateObject;
|
||||
var __importStar;
|
||||
var __importDefault;
|
||||
var __classPrivateFieldGet;
|
||||
var __classPrivateFieldSet;
|
||||
var __createBinding;
|
||||
(function (factory) {
|
||||
var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
|
||||
if (typeof define === "function" && define.amd) {
|
||||
define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
|
||||
}
|
||||
else if ( true && typeof module.exports === "object") {
|
||||
factory(createExporter(root, createExporter(module.exports)));
|
||||
}
|
||||
else {
|
||||
factory(createExporter(root));
|
||||
}
|
||||
function createExporter(exports, previous) {
|
||||
if (exports !== root) {
|
||||
if (typeof Object.create === "function") {
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
}
|
||||
else {
|
||||
exports.__esModule = true;
|
||||
}
|
||||
}
|
||||
return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
|
||||
}
|
||||
})
|
||||
(function (exporter) {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||
|
||||
__extends = function (d, b) {
|
||||
if (typeof b !== "function" && b !== null)
|
||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
|
||||
__assign = Object.assign || function (t) {
|
||||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||
s = arguments[i];
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
__rest = function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
|
||||
__decorate = function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
|
||||
__param = function (paramIndex, decorator) {
|
||||
return function (target, key) { decorator(target, key, paramIndex); }
|
||||
};
|
||||
|
||||
__metadata = function (metadataKey, metadataValue) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
|
||||
};
|
||||
|
||||
__awaiter = function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
|
||||
__generator = function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
|
||||
__exportStar = function(m, o) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
|
||||
};
|
||||
|
||||
__createBinding = Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
});
|
||||
|
||||
__values = function (o) {
|
||||
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
|
||||
if (m) return m.call(o);
|
||||
if (o && typeof o.length === "number") return {
|
||||
next: function () {
|
||||
if (o && i >= o.length) o = void 0;
|
||||
return { value: o && o[i++], done: !o };
|
||||
}
|
||||
};
|
||||
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
|
||||
};
|
||||
|
||||
__read = function (o, n) {
|
||||
var m = typeof Symbol === "function" && o[Symbol.iterator];
|
||||
if (!m) return o;
|
||||
var i = m.call(o), r, ar = [], e;
|
||||
try {
|
||||
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
|
||||
}
|
||||
catch (error) { e = { error: error }; }
|
||||
finally {
|
||||
try {
|
||||
if (r && !r.done && (m = i["return"])) m.call(i);
|
||||
}
|
||||
finally { if (e) throw e.error; }
|
||||
}
|
||||
return ar;
|
||||
};
|
||||
|
||||
/** @deprecated */
|
||||
__spread = function () {
|
||||
for (var ar = [], i = 0; i < arguments.length; i++)
|
||||
ar = ar.concat(__read(arguments[i]));
|
||||
return ar;
|
||||
};
|
||||
|
||||
/** @deprecated */
|
||||
__spreadArrays = function () {
|
||||
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
|
||||
for (var r = Array(s), k = 0, i = 0; i < il; i++)
|
||||
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
|
||||
r[k] = a[j];
|
||||
return r;
|
||||
};
|
||||
|
||||
__spreadArray = function (to, from, pack) {
|
||||
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
||||
if (ar || !(i in from)) {
|
||||
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
||||
ar[i] = from[i];
|
||||
}
|
||||
}
|
||||
return to.concat(ar || Array.prototype.slice.call(from));
|
||||
};
|
||||
|
||||
__await = function (v) {
|
||||
return this instanceof __await ? (this.v = v, this) : new __await(v);
|
||||
};
|
||||
|
||||
__asyncGenerator = function (thisArg, _arguments, generator) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||||
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||||
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||||
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||||
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||||
function fulfill(value) { resume("next", value); }
|
||||
function reject(value) { resume("throw", value); }
|
||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||
};
|
||||
|
||||
__asyncDelegator = function (o) {
|
||||
var i, p;
|
||||
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
|
||||
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
|
||||
};
|
||||
|
||||
__asyncValues = function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
|
||||
__makeTemplateObject = function (cooked, raw) {
|
||||
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
|
||||
return cooked;
|
||||
};
|
||||
|
||||
var __setModuleDefault = Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
};
|
||||
|
||||
__importStar = function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
|
||||
__importDefault = function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
|
||||
__classPrivateFieldGet = function (receiver, state, kind, f) {
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
};
|
||||
|
||||
__classPrivateFieldSet = function (receiver, state, value, kind, f) {
|
||||
if (kind === "m") throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||||
};
|
||||
|
||||
exporter("__extends", __extends);
|
||||
exporter("__assign", __assign);
|
||||
exporter("__rest", __rest);
|
||||
exporter("__decorate", __decorate);
|
||||
exporter("__param", __param);
|
||||
exporter("__metadata", __metadata);
|
||||
exporter("__awaiter", __awaiter);
|
||||
exporter("__generator", __generator);
|
||||
exporter("__exportStar", __exportStar);
|
||||
exporter("__createBinding", __createBinding);
|
||||
exporter("__values", __values);
|
||||
exporter("__read", __read);
|
||||
exporter("__spread", __spread);
|
||||
exporter("__spreadArrays", __spreadArrays);
|
||||
exporter("__spreadArray", __spreadArray);
|
||||
exporter("__await", __await);
|
||||
exporter("__asyncGenerator", __asyncGenerator);
|
||||
exporter("__asyncDelegator", __asyncDelegator);
|
||||
exporter("__asyncValues", __asyncValues);
|
||||
exporter("__makeTemplateObject", __makeTemplateObject);
|
||||
exporter("__importStar", __importStar);
|
||||
exporter("__importDefault", __importDefault);
|
||||
exporter("__classPrivateFieldGet", __classPrivateFieldGet);
|
||||
exporter("__classPrivateFieldSet", __classPrivateFieldSet);
|
||||
});
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 640 */,
|
||||
/* 641 */,
|
||||
/* 642 */,
|
||||
/* 643 */,
|
||||
|
@ -47268,29 +47080,6 @@ exports.default = _default;
|
|||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
|
@ -47300,49 +47089,15 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
process.on("uncaughtException", e => utils.logWarning(e.message));
|
||||
const saveImpl_1 = __importDefault(__webpack_require__(471));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
|
||||
return;
|
||||
}
|
||||
const state = utils.getCacheState();
|
||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||
const primaryKey = core.getState(constants_1.State.CachePrimaryKey);
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Error retrieving key from state.`);
|
||||
return;
|
||||
}
|
||||
if (utils.isExactKeyMatch(primaryKey, state)) {
|
||||
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
||||
});
|
||||
if (cacheId != -1) {
|
||||
core.info(`Cache saved with key: ${primaryKey}`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
yield (0, saveImpl_1.default)(new stateProvider_1.StateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
|
|
|
@ -317,7 +317,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||
### Bash shell
|
||||
```yaml
|
||||
- name: Get npm cache directory
|
||||
id: npm-cache
|
||||
id: npm-cache-dir
|
||||
shell: bash
|
||||
run: echo "dir=$(npm config get cache)" >> ${GITHUB_OUTPUT}
|
||||
```
|
||||
|
@ -325,7 +325,7 @@ After [deprecation](https://github.blog/changelog/2022-10-11-github-actions-depr
|
|||
### PWSH shell
|
||||
```yaml
|
||||
- name: Get npm cache directory
|
||||
id: npm-cache
|
||||
id: npm-cache-dir
|
||||
shell: pwsh
|
||||
run: echo "dir=$(npm config get cache)" >> ${env:GITHUB_OUTPUT}
|
||||
```
|
||||
|
|
38
package-lock.json
generated
38
package-lock.json
generated
|
@ -1,15 +1,15 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "3.0.11",
|
||||
"version": "3.2.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "cache",
|
||||
"version": "3.0.11",
|
||||
"version": "3.2.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.0.5",
|
||||
"@actions/cache": "^3.0.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
@ -36,15 +36,16 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
||||
"version": "3.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"semver": "^6.1.0",
|
||||
|
@ -111,14 +112,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@azure/abort-controller": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz",
|
||||
"integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
|
||||
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.0.0"
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/abort-controller/node_modules/tslib": {
|
||||
|
@ -9721,15 +9722,16 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/cache": {
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
||||
"version": "3.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"semver": "^6.1.0",
|
||||
|
@ -9792,11 +9794,11 @@
|
|||
}
|
||||
},
|
||||
"@azure/abort-controller": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz",
|
||||
"integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
|
||||
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
|
||||
"requires": {
|
||||
"tslib": "^2.0.0"
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"tslib": {
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "3.0.11",
|
||||
"version": "3.2.0",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
"scripts": {
|
||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
|
||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/restore-only src/restoreOnly.ts && ncc build -o dist/save-only src/saveOnly.ts",
|
||||
"test": "tsc --noEmit && jest --coverage",
|
||||
"lint": "eslint **/*.ts --cache",
|
||||
"format": "prettier --write **/*.ts",
|
||||
|
@ -23,7 +23,7 @@
|
|||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.0.5",
|
||||
"@actions/cache": "^3.0.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
|
131
restore/README.md
Normal file
131
restore/README.md
Normal file
|
@ -0,0 +1,131 @@
|
|||
# Restore action
|
||||
|
||||
The restore action, as the name suggest, restores a cache. It acts similar to the`cache` action except that it doesn't have a post step to save the cache. This action can provide you a granular control to only restore a cache without having to necessarily save it. It accepts the same set of inputs as the `cache` action.
|
||||
|
||||
## Inputs
|
||||
|
||||
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
||||
* `key` - String used while saving cache for restoring the cache
|
||||
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
||||
|
||||
## Outputs
|
||||
|
||||
* `cache-hit` - A boolean value to indicate an exact match was found for the key.
|
||||
* `cache-primary-key` - Cache primary key passed in the input to use in subsequent steps of the workflow.
|
||||
* `cache-matched-key` - Key of the cache that was restored, it could either be the primary key on cache-hit or a partial/complete match of one of the restore keys.
|
||||
|
||||
> **Note**
|
||||
`cache-hit` will be set to `true` only when cache hit occurs for the exact `key` match. For a partial key match via `restore-keys` or a cache miss, it will be set to `false`.
|
||||
|
||||
### Environment Variables
|
||||
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `60`) to abort download of the segment if not completed in the defined number of minutes. [Read more](https://github.com/actions/cache/blob/main/workarounds.md#cache-segment-restore-timeout)
|
||||
|
||||
## Use cases
|
||||
|
||||
As this is a newly introduced action to give users more control in their workflows, below are some use cases where one can use this action.
|
||||
|
||||
### Only restore cache
|
||||
|
||||
In case you are using another workflow to create and save your cache that can be reused by other jobs in your repository, this action will take care of your restore only needs.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: /install.sh
|
||||
|
||||
- name: Build
|
||||
run: /build.sh
|
||||
|
||||
- name: Publish package to public
|
||||
run: /publish.sh
|
||||
```
|
||||
|
||||
Once the cache is restored, this action won't run any post step to do post-processing like `actions/cache` and the rest of the workflow will run as usual.
|
||||
|
||||
### Save intermediate private build artifacts
|
||||
|
||||
In case of multi-module projects, where the built artifact of one project needs to be reused in subsequent child modules, the need of rebuilding the parent module again and again with every build can be eliminated. The `actions/cache` or `actions/cache/save` action can be used to build and save the parent module artifact once, and restored multiple times while building the child modules.
|
||||
|
||||
|
||||
#### Step 1 - Build the parent module and save it
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build
|
||||
run: /build-parent-module.sh
|
||||
|
||||
- uses: actions/cache/save@v3
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
#### Step 2 - Restore the built artifact from cache using the same key and path
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: /install.sh
|
||||
|
||||
- name: Build
|
||||
run: /build-child-module.sh
|
||||
|
||||
- name: Publish package to public
|
||||
run: /publish.sh
|
||||
```
|
||||
|
||||
### Exit workflow on cache miss
|
||||
|
||||
You can use the output of this action to exit the workflow on cache miss. This way you can restrict your workflow to only initiate the build when `cache-hit` occurs, in other words, cache with exact key is found.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/cache/restore@v3
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
|
||||
- name: Check cache hit
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: exit 1
|
||||
|
||||
- name: Build
|
||||
run: /build.sh
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
|
||||
#### Reusing primary key and restored key in the save action
|
||||
|
||||
Usually you may want to use same `key` in both actions/cache/restore` and `actions/cache/save` action. To achieve this, use `outputs` from the restore action to reuse the same primary key (or the key of the cache that was restored).
|
||||
|
||||
#### Using restore action outputs to make save action behave just like the cache action
|
||||
|
||||
The outputs `cache-primary-key` and `cache-matched-key` can be used to check if the restored cache is same as the given primary key. Alternatively, the `cache-hit` output can also be used to check if the restored was a complete match or a partially restored cache.
|
||||
|
||||
#### Ensuring proper restores and save happen across the actions
|
||||
|
||||
It is very important to use the same `key` and `path` that were used by either `actions/cache` or `actions/cache/save` while saving the cache. Learn more about cache key [naming](https://github.com/actions/cache#creating-a-cache-key) and [versioning](https://github.com/actions/cache#cache-version) here.
|
26
restore/action.yml
Normal file
26
restore/action.yml
Normal file
|
@ -0,0 +1,26 @@
|
|||
name: 'Restore Cache'
|
||||
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to restore'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
required: false
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
cache-primary-key:
|
||||
description: 'A resolved cache key for which cache match was attempted'
|
||||
cache-matched-key:
|
||||
description: 'Key of the cache that was restored, it could either be the primary key on cache-hit or a partial/complete match of one of the restore keys'
|
||||
runs:
|
||||
using: 'node16'
|
||||
main: '../dist/restore-only/index.js'
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
84
save/README.md
Normal file
84
save/README.md
Normal file
|
@ -0,0 +1,84 @@
|
|||
# Save action
|
||||
|
||||
The save action, as the name suggest, saves a cache. It acts similar to the `cache` action except that it doesn't necessarily first do a restore. This action can provide you a granular control to only save a cache without having to necessarily restore it, or to do a restore anywhere in the workflow job and not only in post phase.
|
||||
|
||||
## Inputs
|
||||
|
||||
* `key` - 'An explicit key for saving the cache'
|
||||
* `path` - 'A list of files, directories, and wildcard patterns to cache'
|
||||
* `upload-chunk-size` - 'The chunk size used to split up large files during upload, in bytes'
|
||||
|
||||
## Outputs
|
||||
|
||||
This action has no outputs.
|
||||
|
||||
## Use cases
|
||||
|
||||
|
||||
### Only save cache
|
||||
|
||||
In case you are using separate jobs for generating common artifacts and sharing them across different jobs, this action will help you with your save only needs.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Dependencies
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: /install.sh
|
||||
|
||||
- name: Build common artifacts
|
||||
run: /build.sh
|
||||
|
||||
- uses: actions/cache/save@v3
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
### Re-evaluate cache key while saving
|
||||
|
||||
With save action, the key can now be re-evaluated while executing the action. This helps in cases where the lockfiles are generated during the build.
|
||||
|
||||
Let's say we have a restore step that computes key at runtime
|
||||
|
||||
```yaml
|
||||
uses: actions/cache/restore@v3
|
||||
id: restore-cache
|
||||
with:
|
||||
key: cache-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
Case 1: Where an user would want to reuse the key as it is
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
key: steps.restore-cache.output.key
|
||||
```
|
||||
|
||||
Case 2: Where the user would want to re-evaluate the key
|
||||
```yaml
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
key: npm-cache-${{hashfiles(package-lock.json)}}
|
||||
```
|
||||
|
||||
### Always save cache
|
||||
|
||||
There are instances where some flaky test cases would fail the entire workflow and users would get frustrated because the builds would run for hours and the cache couldn't get saved as the workflow failed in between. For such use-cases, users would now have the ability to use `actions/cache/save` action to save the cache by using `if: always()` condition. This way the cache will always be saved if generated, or a warning will be thrown that nothing is found on the cache path. Users can also use the `if` condition to only execute the `actions/cache/save` action depending on the output of the previous steps. This way they get more control on when to save the cache.
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
.
|
||||
. // restore if need be
|
||||
.
|
||||
- name: Build
|
||||
run: /build.sh
|
||||
- uses: actions/cache/save@v3
|
||||
if: always() // or any other condition to invoke the save action
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
19
save/action.yml
Normal file
19
save/action.yml
Normal file
|
@ -0,0 +1,19 @@
|
|||
name: 'Save a cache'
|
||||
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for saving the cache'
|
||||
required: true
|
||||
upload-chunk-size:
|
||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||
required: false
|
||||
runs:
|
||||
using: 'node16'
|
||||
main: '../dist/save-only/index.js'
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
|
@ -1,12 +1,14 @@
|
|||
export enum Inputs {
|
||||
Key = "key",
|
||||
Path = "path",
|
||||
RestoreKeys = "restore-keys",
|
||||
UploadChunkSize = "upload-chunk-size"
|
||||
Key = "key", // Input for cache, restore, save action
|
||||
Path = "path", // Input for cache, restore, save action
|
||||
RestoreKeys = "restore-keys", // Input for cache, restore action
|
||||
UploadChunkSize = "upload-chunk-size" // Input for cache, save action
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
CacheHit = "cache-hit"
|
||||
CacheHit = "cache-hit", // Output from cache, restore action
|
||||
CachePrimaryKey = "cache-primary-key", // Output from restore action
|
||||
CacheMatchedKey = "cache-matched-key" // Output from restore action
|
||||
}
|
||||
|
||||
export enum State {
|
||||
|
|
|
@ -1,60 +1,8 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
import restoreImpl from "./restoreImpl";
|
||||
import { StateProvider } from "./stateProvider";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
utils.setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate inputs, this can cause task failure
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
|
||||
const cacheKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys
|
||||
);
|
||||
|
||||
if (!cacheKey) {
|
||||
core.info(
|
||||
`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the matched cache key
|
||||
utils.setCacheState(cacheKey);
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
} catch (error: unknown) {
|
||||
core.setFailed((error as Error).message);
|
||||
}
|
||||
await restoreImpl(new StateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
|
69
src/restoreImpl.ts
Normal file
69
src/restoreImpl.ts
Normal file
|
@ -0,0 +1,69 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, Outputs, State } from "./constants";
|
||||
import { IStateProvider } from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function restoreImpl(
|
||||
stateProvider: IStateProvider
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
core.setOutput(Outputs.CacheHit, "false");
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate inputs, this can cause task failure
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||
stateProvider.setState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
|
||||
const cacheKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys
|
||||
);
|
||||
|
||||
if (!cacheKey) {
|
||||
core.info(
|
||||
`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the matched cache key in states
|
||||
stateProvider.setState(State.CacheMatchedKey, cacheKey);
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||
core.getInput(Inputs.Key, { required: true }),
|
||||
cacheKey
|
||||
);
|
||||
|
||||
core.setOutput(Outputs.CacheHit, isExactKeyMatch.toString());
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
|
||||
return cacheKey;
|
||||
} catch (error: unknown) {
|
||||
core.setFailed((error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
export default restoreImpl;
|
10
src/restoreOnly.ts
Normal file
10
src/restoreOnly.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
import restoreImpl from "./restoreImpl";
|
||||
import { NullStateProvider } from "./stateProvider";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new NullStateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
57
src/save.ts
57
src/save.ts
|
@ -1,59 +1,8 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
process.on("uncaughtException", e => utils.logWarning(e.message));
|
||||
import saveImpl from "./saveImpl";
|
||||
import { StateProvider } from "./stateProvider";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const state = utils.getCacheState();
|
||||
|
||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Error retrieving key from state.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (utils.isExactKeyMatch(primaryKey, state)) {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
|
||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||
});
|
||||
|
||||
if (cacheId != -1) {
|
||||
core.info(`Cache saved with key: ${primaryKey}`);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
utils.logWarning((error as Error).message);
|
||||
}
|
||||
await saveImpl(new StateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
|
68
src/saveImpl.ts
Normal file
68
src/saveImpl.ts
Normal file
|
@ -0,0 +1,68 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import { IStateProvider } from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
process.on("uncaughtException", e => utils.logWarning(e.message));
|
||||
|
||||
async function saveImpl(stateProvider: IStateProvider): Promise<number | void> {
|
||||
let cacheId = -1;
|
||||
try {
|
||||
if (!utils.isCacheFeatureAvailable()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!utils.isValidEvent()) {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// If restore has stored a primary key in state, reuse that
|
||||
// Else re-evaluate from inputs
|
||||
const primaryKey =
|
||||
stateProvider.getState(State.CachePrimaryKey) ||
|
||||
core.getInput(Inputs.Key);
|
||||
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Key is not specified.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// If matched restore key is same as primary key, then do not save cache
|
||||
// NO-OP in case of SaveOnly action
|
||||
const restoredKey = stateProvider.getCacheState();
|
||||
|
||||
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
|
||||
cacheId = await cache.saveCache(cachePaths, primaryKey, {
|
||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||
});
|
||||
|
||||
if (cacheId != -1) {
|
||||
core.info(`Cache saved with key: ${primaryKey}`);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
utils.logWarning((error as Error).message);
|
||||
}
|
||||
return cacheId;
|
||||
}
|
||||
|
||||
export default saveImpl;
|
15
src/saveOnly.ts
Normal file
15
src/saveOnly.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import saveImpl from "./saveImpl";
|
||||
import { NullStateProvider } from "./stateProvider";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const cacheId = await saveImpl(new NullStateProvider());
|
||||
if (cacheId === -1) {
|
||||
core.warning(`Cache save failed.`);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
46
src/stateProvider.ts
Normal file
46
src/stateProvider.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Outputs, State } from "./constants";
|
||||
|
||||
export interface IStateProvider {
|
||||
setState(key: string, value: string): void;
|
||||
getState(key: string): string;
|
||||
|
||||
getCacheState(): string | undefined;
|
||||
}
|
||||
|
||||
class StateProviderBase implements IStateProvider {
|
||||
getCacheState(): string | undefined {
|
||||
const cacheKey = this.getState(State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
|
||||
setState = (key: string, value: string) => {};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
getState = (key: string) => "";
|
||||
}
|
||||
|
||||
export class StateProvider extends StateProviderBase {
|
||||
setState = core.saveState;
|
||||
getState = core.getState;
|
||||
}
|
||||
|
||||
export class NullStateProvider extends StateProviderBase {
|
||||
stateToOutputMap = new Map<string, string>([
|
||||
[State.CacheMatchedKey, Outputs.CacheMatchedKey],
|
||||
[State.CachePrimaryKey, Outputs.CachePrimaryKey]
|
||||
]);
|
||||
|
||||
setState = (key: string, value: string) => {
|
||||
core.setOutput(this.stateToOutputMap.get(key) as string, value);
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
getState = (key: string) => "";
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Outputs, RefKey, State } from "../constants";
|
||||
import { RefKey } from "../constants";
|
||||
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
|
@ -19,30 +19,6 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
|||
);
|
||||
}
|
||||
|
||||
export function setCacheState(state: string): void {
|
||||
core.saveState(State.CacheMatchedKey, state);
|
||||
}
|
||||
|
||||
export function setCacheHitOutput(isCacheHit: boolean): void {
|
||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
||||
}
|
||||
|
||||
export function setOutputAndState(key: string, cacheKey?: string): void {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
|
||||
// Store the matched cache key if it exists
|
||||
cacheKey && setCacheState(cacheKey);
|
||||
}
|
||||
|
||||
export function getCacheState(): string | undefined {
|
||||
const cacheKey = core.getState(State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function logWarning(message: string): void {
|
||||
const warningPrefix = "[warning]";
|
||||
core.info(`${warningPrefix}${message}`);
|
||||
|
|
|
@ -14,7 +14,7 @@ A cache today is immutable and cannot be updated. But some use cases require the
|
|||
restore-keys: |
|
||||
primes-${{ runner.os }}
|
||||
```
|
||||
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
|
||||
Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits).
|
||||
|
||||
## Use cache across feature branches
|
||||
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
||||
|
|
Loading…
Reference in a new issue