diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index f6c5448..629953d 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -4,51 +4,130 @@ on: pull_request: branches: - master + - releases/** paths-ignore: - '**.md' push: branches: - master + - releases/** paths-ignore: - '**.md' jobs: - test: - name: Test on ${{ matrix.os }} - + # Build and unit test + build: strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] fail-fast: false - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v1 - - - uses: actions/setup-node@v1 + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Node.js + uses: actions/setup-node@v1 with: node-version: '12.x' - - - name: Get npm cache directory + - name: Determine npm cache directory id: npm-cache run: | echo "::set-output name=dir::$(npm config get cache)" - - - uses: actions/cache@v1 + - name: Restore npm cache + uses: actions/cache@v1 with: path: ${{ steps.npm-cache.outputs.dir }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - - run: npm ci - - name: Prettier Format Check run: npm run format-check - - name: ESLint Check run: npm run lint - - name: Build & Test run: npm run test + + # End to end save and restore + test-save: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Generate files + shell: bash + run: __tests__/create-cache-files.sh ${{ runner.os }} + - name: Save cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: test-cache + test-restore: + needs: test-save + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Restore cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: test-cache + - name: Verify cache + shell: bash + run: __tests__/verify-cache-files.sh ${{ runner.os }} + + # End to end with proxy + test-proxy-save: + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Generate files + run: __tests__/create-cache-files.sh proxy + - name: Save cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + test-proxy-restore: + needs: test-proxy-save + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Restore cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + - name: Verify cache + run: __tests__/verify-cache-files.sh proxy \ No newline at end of file diff --git a/__tests__/cacheHttpsClient.test.ts b/__tests__/cacheHttpsClient.test.ts new file mode 100644 index 0000000..c9f4fac --- /dev/null +++ b/__tests__/cacheHttpsClient.test.ts @@ -0,0 +1,144 @@ +import { retry } from "../src/cacheHttpClient"; +import * as testUtils from "../src/utils/testUtils"; + +afterEach(() => { + testUtils.clearInputs(); +}); + +interface TestResponse { + statusCode: number; + result: string | null; +} + +function handleResponse( + response: TestResponse | undefined +): Promise { + if (!response) { + fail("Retry method called too many times"); + } + + if (response.statusCode === 999) { + throw Error("Test Error"); + } else { + return Promise.resolve(response); + } +} + +async function testRetryExpectingResult( + responses: Array, + expectedResult: string | null +): Promise { + responses = responses.reverse(); // Reverse responses since we pop from end + + const actualResult = await retry( + "test", + () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ); + + expect(actualResult.result).toEqual(expectedResult); +} + +async function testRetryExpectingError( + responses: Array +): Promise { + responses = responses.reverse(); // Reverse responses since we pop from end + + expect( + retry( + "test", + () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + ).rejects.toBeInstanceOf(Error); +} + +test("retry works on successful response", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry works after retryable status code", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry fails after exhausting retries", async () => { + await testRetryExpectingError([ + { + statusCode: 503, + result: null + }, + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ]); +}); + +test("retry fails after non-retryable status code", async () => { + await testRetryExpectingError([ + { + statusCode: 500, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ]); +}); + +test("retry works after error", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 999, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry returns after client error", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 400, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + null + ); +}); diff --git a/__tests__/create-cache-files.sh b/__tests__/create-cache-files.sh new file mode 100755 index 0000000..885a5f2 --- /dev/null +++ b/__tests__/create-cache-files.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +mkdir test-cache +echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt \ No newline at end of file diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts index 55ff4c7..6de03c3 100644 --- a/__tests__/tar.test.ts +++ b/__tests__/tar.test.ts @@ -2,6 +2,8 @@ import * as exec from "@actions/exec"; import * as io from "@actions/io"; import * as tar from "../src/tar"; +import fs = require("fs"); + jest.mock("@actions/exec"); jest.mock("@actions/io"); @@ -11,17 +13,19 @@ beforeAll(() => { }); }); -test("extract tar", async () => { +test("extract BSD tar", async () => { const mkdirMock = jest.spyOn(io, "mkdirP"); const execMock = jest.spyOn(exec, "exec"); - const archivePath = "cache.tar"; + const IS_WINDOWS = process.platform === "win32"; + const archivePath = IS_WINDOWS + ? `${process.env["windir"]}\\fakepath\\cache.tar` + : "cache.tar"; const targetDirectory = "~/.npm/cache"; await tar.extractTar(archivePath, targetDirectory); expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); - const IS_WINDOWS = process.platform === "win32"; const tarPath = IS_WINDOWS ? `${process.env["windir"]}\\System32\\tar.exe` : "tar"; @@ -29,13 +33,37 @@ test("extract tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-xz", "-f", - archivePath, + IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, "-C", - targetDirectory + IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory ]); }); -test("create tar", async () => { +test("extract GNU tar", async () => { + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); + jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true)); + + const execMock = jest.spyOn(exec, "exec"); + const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; + const targetDirectory = "~/.npm/cache"; + + await tar.extractTar(archivePath, targetDirectory); + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [ + "-xz", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + targetDirectory?.replace(/\\/g, "/"), + "--force-local" + ]); + } +}); + +test("create BSD tar", async () => { const execMock = jest.spyOn(exec, "exec"); const archivePath = "cache.tar"; @@ -50,9 +78,9 @@ test("create tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-cz", "-f", - archivePath, + IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, "-C", - sourceDirectory, + IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory, "." ]); }); diff --git a/__tests__/verify-cache-files.sh b/__tests__/verify-cache-files.sh new file mode 100755 index 0000000..c7b75ae --- /dev/null +++ b/__tests__/verify-cache-files.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +# Sanity check GITHUB_RUN_ID defined +if [ -z "$GITHUB_RUN_ID" ]; then + echo "GITHUB_RUN_ID not defined" + exit 1 +fi + +# Verify file exists +file="test-cache/test-file.txt" +echo "Checking for $file" +if [ ! -e $file ]; then + echo "File does not exist" + exit 1 +fi + +# Verify file content +content="$(cat $file)" +echo "File content:\n$content" +if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then + echo "Unexpected file content" + exit 1 +fi \ No newline at end of file diff --git a/dist/restore/index.js b/dist/restore/index.js index a3ea855..b5e894b 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1252,9 +1252,12 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const fs = __importStar(__webpack_require__(747)); -const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const auth_1 = __webpack_require__(226); +const fs = __importStar(__webpack_require__(747)); +const stream = __importStar(__webpack_require__(794)); +const util = __importStar(__webpack_require__(669)); +const constants_1 = __webpack_require__(694); const utils = __importStar(__webpack_require__(443)); function isSuccessStatusCode(statusCode) { if (!statusCode) { @@ -1262,6 +1265,12 @@ function isSuccessStatusCode(statusCode) { } return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; @@ -1301,12 +1310,56 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions()); } +function retry(name, method, getStatusCode, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield method(); + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + catch (error) { + isRetryable = true; + errorMessage = error.message; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; function getCacheEntry(keys) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = yield httpClient.getJson(getCacheApiUrl(resource)); + const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource))); if (response.statusCode === 204) { return null; } @@ -1325,21 +1378,35 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function pipeResponseToStream(response, stream) { +function pipeResponseToStream(response, output) { return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); }); } function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(archiveLocation); + const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation)); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); yield pipeResponseToStream(downloadResponse, stream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + if (actualLength != expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug("Unable to validate download, no Content-Length header"); + } }); } exports.downloadCache = downloadCache; @@ -1351,7 +1418,7 @@ function reserveCache(key) { const reserveCacheRequest = { key }; - const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest)); return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); }); } @@ -1364,7 +1431,7 @@ function getContentRange(start, end) { // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } -function uploadChunk(httpClient, resourceUrl, data, start, end) { +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + @@ -1373,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); - }); - const response = yield uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`); - const retryResponse = yield uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`); + yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders)); }); } function parseEnvNumber(key) { @@ -1417,13 +1470,16 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { fd, start, end, autoClose: false - }); - yield uploadChunk(httpClient, resourceUrl, chunk, start, end); + }) + .on("error", error => { + throw new Error(`Cache upload failed because file read failed with ${error.Message}`); + }), start, end); } }))); } @@ -1436,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest)); }); } function saveCache(cacheId, archivePath) { @@ -2721,6 +2777,10 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; /***/ }), @@ -2861,6 +2921,13 @@ run(); exports.default = run; +/***/ }), + +/***/ 794: +/***/ (function(module) { + +module.exports = require("stream"); + /***/ }), /***/ 826: @@ -2928,10 +2995,30 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); -function getTarPath() { +const path = __importStar(__webpack_require__(622)); +const tar = __importStar(__webpack_require__(943)); +function isGnuTar() { + return __awaiter(this, void 0, void 0, function* () { + core.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); + }); +} +exports.isGnuTar = isGnuTar; +function getTarPath(args) { return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2940,22 +3027,21 @@ function getTarPath() { if (fs_1.existsSync(systemTar)) { return systemTar; } + else if (yield tar.isGnuTar()) { + args.push("--force-local"); + } } return yield io.which("tar", true); }); } function execTar(args) { - var _a, _b; + var _a; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath(args)}"`, args); } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); - } - throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } @@ -2963,14 +3049,27 @@ function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") + ]; yield execTar(args); }); } exports.extractTar = extractTar; function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), + "." + ]; yield execTar(args); }); } diff --git a/dist/save/index.js b/dist/save/index.js index e7e0eae..a90a6e4 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1252,9 +1252,12 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const fs = __importStar(__webpack_require__(747)); -const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const auth_1 = __webpack_require__(226); +const fs = __importStar(__webpack_require__(747)); +const stream = __importStar(__webpack_require__(794)); +const util = __importStar(__webpack_require__(669)); +const constants_1 = __webpack_require__(694); const utils = __importStar(__webpack_require__(443)); function isSuccessStatusCode(statusCode) { if (!statusCode) { @@ -1262,6 +1265,12 @@ function isSuccessStatusCode(statusCode) { } return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; @@ -1301,12 +1310,56 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions()); } +function retry(name, method, getStatusCode, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield method(); + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + catch (error) { + isRetryable = true; + errorMessage = error.message; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; function getCacheEntry(keys) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = yield httpClient.getJson(getCacheApiUrl(resource)); + const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource))); if (response.statusCode === 204) { return null; } @@ -1325,21 +1378,35 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function pipeResponseToStream(response, stream) { +function pipeResponseToStream(response, output) { return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); }); } function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(archiveLocation); + const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation)); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); yield pipeResponseToStream(downloadResponse, stream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + if (actualLength != expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug("Unable to validate download, no Content-Length header"); + } }); } exports.downloadCache = downloadCache; @@ -1351,7 +1418,7 @@ function reserveCache(key) { const reserveCacheRequest = { key }; - const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest)); return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); }); } @@ -1364,7 +1431,7 @@ function getContentRange(start, end) { // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } -function uploadChunk(httpClient, resourceUrl, data, start, end) { +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + @@ -1373,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); - }); - const response = yield uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`); - const retryResponse = yield uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`); + yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders)); }); } function parseEnvNumber(key) { @@ -1417,13 +1470,16 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { fd, start, end, autoClose: false - }); - yield uploadChunk(httpClient, resourceUrl, chunk, start, end); + }) + .on("error", error => { + throw new Error(`Cache upload failed because file read failed with ${error.Message}`); + }), start, end); } }))); } @@ -1436,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest)); }); } function saveCache(cacheId, archivePath) { @@ -2802,6 +2858,10 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; /***/ }), @@ -2844,6 +2904,13 @@ module.exports = require("fs"); /***/ }), +/***/ 794: +/***/ (function(module) { + +module.exports = require("stream"); + +/***/ }), + /***/ 826: /***/ (function(module, __unusedexports, __webpack_require__) { @@ -2909,10 +2976,30 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); -function getTarPath() { +const path = __importStar(__webpack_require__(622)); +const tar = __importStar(__webpack_require__(943)); +function isGnuTar() { + return __awaiter(this, void 0, void 0, function* () { + core.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); + }); +} +exports.isGnuTar = isGnuTar; +function getTarPath(args) { return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2921,22 +3008,21 @@ function getTarPath() { if (fs_1.existsSync(systemTar)) { return systemTar; } + else if (yield tar.isGnuTar()) { + args.push("--force-local"); + } } return yield io.which("tar", true); }); } function execTar(args) { - var _a, _b; + var _a; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath(args)}"`, args); } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); - } - throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } @@ -2944,14 +3030,27 @@ function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") + ]; yield execTar(args); }); } exports.extractTar = extractTar; function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), + "." + ]; yield execTar(args); }); } diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 62ae2c1..19124e9 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -1,12 +1,16 @@ import * as core from "@actions/core"; -import * as fs from "fs"; -import { BearerCredentialHandler } from "@actions/http-client/auth"; import { HttpClient, HttpCodes } from "@actions/http-client"; +import { BearerCredentialHandler } from "@actions/http-client/auth"; import { IHttpClientResponse, IRequestOptions, ITypedResponse } from "@actions/http-client/interfaces"; +import * as fs from "fs"; +import * as stream from "stream"; +import * as util from "util"; + +import { SocketTimeout } from "./constants"; import { ArtifactCacheEntry, CommitCacheRequest, @@ -22,6 +26,13 @@ function isSuccessStatusCode(statusCode?: number): boolean { return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} + function isRetryableStatusCode(statusCode?: number): boolean { if (!statusCode) { return false; @@ -77,14 +88,83 @@ function createHttpClient(): HttpClient { ); } +export async function retry( + name: string, + method: () => Promise, + getStatusCode: (T) => number | undefined, + maxAttempts = 2 +): Promise { + let response: T | undefined = undefined; + let statusCode: number | undefined = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + + while (attempt <= maxAttempts) { + try { + response = await method(); + statusCode = getStatusCode(response); + + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } catch (error) { + isRetryable = true; + errorMessage = error.message; + } + + core.debug( + `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` + ); + + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + + attempt++; + } + + throw Error(`${name} failed: ${errorMessage}`); +} + +export async function retryTypedResponse( + name: string, + method: () => Promise>, + maxAttempts = 2 +): Promise> { + return await retry( + name, + method, + (response: ITypedResponse) => response.statusCode, + maxAttempts + ); +} + +export async function retryHttpClientResponse( + name: string, + method: () => Promise, + maxAttempts = 2 +): Promise { + return await retry( + name, + method, + (response: IHttpClientResponse) => response.message.statusCode, + maxAttempts + ); +} + export async function getCacheEntry( keys: string[] ): Promise { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = await httpClient.getJson( - getCacheApiUrl(resource) + const response = await retryTypedResponse("getCacheEntry", () => + httpClient.getJson(getCacheApiUrl(resource)) ); if (response.statusCode === 204) { return null; @@ -107,13 +187,10 @@ export async function getCacheEntry( async function pipeResponseToStream( response: IHttpClientResponse, - stream: NodeJS.WritableStream + output: NodeJS.WritableStream ): Promise { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + await pipeline(response.message, output); } export async function downloadCache( @@ -122,8 +199,37 @@ export async function downloadCache( ): Promise { const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient("actions/cache"); - const downloadResponse = await httpClient.get(archiveLocation); + const downloadResponse = await retryHttpClientResponse( + "downloadCache", + () => httpClient.get(archiveLocation) + ); + + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug( + `Aborting download, socket timed out after ${SocketTimeout} ms` + ); + }); + await pipeResponseToStream(downloadResponse, stream); + + // Validate download size. + const contentLengthHeader = + downloadResponse.message.headers["content-length"]; + + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + + if (actualLength != expectedLength) { + throw new Error( + `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` + ); + } + } else { + core.debug("Unable to validate download, no Content-Length header"); + } } // Reserve Cache @@ -133,9 +239,11 @@ export async function reserveCache(key: string): Promise { const reserveCacheRequest: ReserveCacheRequest = { key }; - const response = await httpClient.postJson( - getCacheApiUrl("caches"), - reserveCacheRequest + const response = await retryTypedResponse("reserveCache", () => + httpClient.postJson( + getCacheApiUrl("caches"), + reserveCacheRequest + ) ); return response?.result?.cacheId ?? -1; } @@ -152,7 +260,7 @@ function getContentRange(start: number, end: number): string { async function uploadChunk( httpClient: HttpClient, resourceUrl: string, - data: NodeJS.ReadableStream, + openStream: () => NodeJS.ReadableStream, start: number, end: number ): Promise { @@ -169,32 +277,15 @@ async function uploadChunk( "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = async (): Promise => { - return await httpClient.sendStream( - "PATCH", - resourceUrl, - data, - additionalHeaders - ); - }; - - const response = await uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug( - `Received ${response.message.statusCode}, retrying chunk at offset ${start}.` - ); - const retryResponse = await uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - - throw new Error( - `Cache service responded with ${response.message.statusCode} during chunk upload.` + await retryHttpClientResponse( + `uploadChunk (start: ${start}, end: ${end})`, + () => + httpClient.sendStream( + "PATCH", + resourceUrl, + openStream(), + additionalHeaders + ) ); } @@ -236,17 +327,23 @@ async function uploadFile( const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }); await uploadChunk( httpClient, resourceUrl, - chunk, + () => + fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on("error", error => { + throw new Error( + `Cache upload failed because file read failed with ${error.Message}` + ); + }), start, end ); @@ -265,9 +362,11 @@ async function commitCache( filesize: number ): Promise> { const commitCacheRequest: CommitCacheRequest = { size: filesize }; - return await httpClient.postJson( - getCacheApiUrl(`caches/${cacheId.toString()}`), - commitCacheRequest + return await retryTypedResponse("commitCache", () => + httpClient.postJson( + getCacheApiUrl(`caches/${cacheId.toString()}`), + commitCacheRequest + ) ); } diff --git a/src/constants.ts b/src/constants.ts index 5f26e8c..2e60e34 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -18,3 +18,8 @@ export enum Events { Push = "push", PullRequest = "pull_request" } + +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +export const SocketTimeout = 5000; diff --git a/src/tar.ts b/src/tar.ts index 1f572d1..00bed5a 100644 --- a/src/tar.ts +++ b/src/tar.ts @@ -1,14 +1,36 @@ +import * as core from "@actions/core"; import { exec } from "@actions/exec"; import * as io from "@actions/io"; import { existsSync } from "fs"; +import * as path from "path"; +import * as tar from "./tar"; -async function getTarPath(): Promise { +export async function isGnuTar(): Promise { + core.debug("Checking tar --version"); + let versionOutput = ""; + await exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data: Buffer): string => + (versionOutput += data.toString()), + stderr: (data: Buffer): string => (versionOutput += data.toString()) + } + }); + + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); +} + +async function getTarPath(args: string[]): Promise { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; if (existsSync(systemTar)) { return systemTar; + } else if (await tar.isGnuTar()) { + args.push("--force-local"); } } return await io.which("tar", true); @@ -16,14 +38,8 @@ async function getTarPath(): Promise { async function execTar(args: string[]): Promise { try { - await exec(`"${await getTarPath()}"`, args); + await exec(`"${await getTarPath(args)}"`, args); } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error( - `Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.` - ); - } throw new Error(`Tar failed with error: ${error?.message}`); } } @@ -34,7 +50,13 @@ export async function extractTar( ): Promise { // Create directory to extract tar into await io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") + ]; await execTar(args); } @@ -42,6 +64,13 @@ export async function createTar( archivePath: string, sourceDirectory: string ): Promise { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), + "." + ]; await execTar(args); }