mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-11-22 18:41:16 +01:00
Release v1.1.1
This commit is contained in:
commit
fe1055e9d1
12 changed files with 1512 additions and 5965 deletions
1
.github/workflows/workflow.yml
vendored
1
.github/workflows/workflow.yml
vendored
|
@ -19,6 +19,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -94,3 +94,6 @@ typings/
|
||||||
|
|
||||||
# DynamoDB Local files
|
# DynamoDB Local files
|
||||||
.dynamodb/
|
.dynamodb/
|
||||||
|
|
||||||
|
# Text editor files
|
||||||
|
.vscode/
|
||||||
|
|
11
README.md
11
README.md
|
@ -1,6 +1,6 @@
|
||||||
# cache
|
# cache
|
||||||
|
|
||||||
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
|
This action allows caching dependencies and build outputs to improve workflow execution time.
|
||||||
|
|
||||||
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
|
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
|
||||||
|
|
||||||
|
@ -63,21 +63,24 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
||||||
- [C# - Nuget](./examples.md#c---nuget)
|
- [C# - Nuget](./examples.md#c---nuget)
|
||||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||||
- [Go - Modules](./examples.md#go---modules)
|
- [Go - Modules](./examples.md#go---modules)
|
||||||
|
- [Haskell - Cabal](./examples.md#haskell---cabal)
|
||||||
- [Java - Gradle](./examples.md#java---gradle)
|
- [Java - Gradle](./examples.md#java---gradle)
|
||||||
- [Java - Maven](./examples.md#java---maven)
|
- [Java - Maven](./examples.md#java---maven)
|
||||||
- [Node - npm](./examples.md#node---npm)
|
- [Node - npm](./examples.md#node---npm)
|
||||||
- [Node - Yarn](./examples.md#node---yarn)
|
- [Node - Yarn](./examples.md#node---yarn)
|
||||||
- [PHP - Composer](./examples.md#php---composer)
|
- [PHP - Composer](./examples.md#php---composer)
|
||||||
- [Python - pip](./examples.md#python---pip)
|
- [Python - pip](./examples.md#python---pip)
|
||||||
- [Ruby - Gem](./examples.md#ruby---gem)
|
- [R - renv](./examples.md#r---renv)
|
||||||
|
- [Ruby - Bundler](./examples.md#ruby---bundler)
|
||||||
- [Rust - Cargo](./examples.md#rust---cargo)
|
- [Rust - Cargo](./examples.md#rust---cargo)
|
||||||
|
- [Scala - SBT](./examples.md#scala---sbt)
|
||||||
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
|
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
|
||||||
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
||||||
|
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
|
||||||
|
|
||||||
## Cache Limits
|
## Cache Limits
|
||||||
|
|
||||||
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
A repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||||
|
|
||||||
## Skipping steps based on cache-hit
|
## Skipping steps based on cache-hit
|
||||||
|
|
||||||
|
|
|
@ -194,7 +194,7 @@ test("save with large cache outputs warning", async () => {
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit
|
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||||
return cacheSize;
|
return cacheSize;
|
||||||
});
|
});
|
||||||
|
@ -208,7 +208,7 @@ test("save with large cache outputs warning", async () => {
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
"Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache."
|
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: 'Cache'
|
name: 'Cache Artifacts'
|
||||||
description: 'Cache dependencies and build outputs to improve workflow execution time'
|
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
path:
|
path:
|
||||||
|
@ -21,4 +21,4 @@ runs:
|
||||||
post-if: 'success()'
|
post-if: 'success()'
|
||||||
branding:
|
branding:
|
||||||
icon: 'archive'
|
icon: 'archive'
|
||||||
color: 'gray-dark'
|
color: 'gray-dark'
|
||||||
|
|
3598
dist/restore/index.js
vendored
3598
dist/restore/index.js
vendored
File diff suppressed because it is too large
Load diff
3602
dist/save/index.js
vendored
3602
dist/save/index.js
vendored
File diff suppressed because it is too large
Load diff
109
examples.md
109
examples.md
|
@ -3,16 +3,20 @@
|
||||||
- [C# - NuGet](#c---nuget)
|
- [C# - NuGet](#c---nuget)
|
||||||
- [Elixir - Mix](#elixir---mix)
|
- [Elixir - Mix](#elixir---mix)
|
||||||
- [Go - Modules](#go---modules)
|
- [Go - Modules](#go---modules)
|
||||||
|
- [Haskell - Cabal](#haskell---cabal)
|
||||||
- [Java - Gradle](#java---gradle)
|
- [Java - Gradle](#java---gradle)
|
||||||
- [Java - Maven](#java---maven)
|
- [Java - Maven](#java---maven)
|
||||||
- [Node - npm](#node---npm)
|
- [Node - npm](#node---npm)
|
||||||
- [Node - Yarn](#node---yarn)
|
- [Node - Yarn](#node---yarn)
|
||||||
- [PHP - Composer](#php---composer)
|
- [PHP - Composer](#php---composer)
|
||||||
- [Python - pip](#python---pip)
|
- [Python - pip](#python---pip)
|
||||||
- [Ruby - Gem](#ruby---gem)
|
- [R - renv](#r---renv)
|
||||||
|
- [Ruby - Bundler](#ruby---bundler)
|
||||||
- [Rust - Cargo](#rust---cargo)
|
- [Rust - Cargo](#rust---cargo)
|
||||||
|
- [Scala - SBT](#scala---sbt)
|
||||||
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
|
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
|
||||||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||||
|
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||||
|
|
||||||
## C# - NuGet
|
## C# - NuGet
|
||||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||||
|
@ -62,6 +66,28 @@ steps:
|
||||||
${{ runner.os }}-go-
|
${{ runner.os }}-go-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Haskell - Cabal
|
||||||
|
|
||||||
|
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
name: Cache ~/.cabal/packages
|
||||||
|
with:
|
||||||
|
path: ~/.cabal/packages
|
||||||
|
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
name: Cache ~/.cabal/store
|
||||||
|
with:
|
||||||
|
path: ~/.cabal/store
|
||||||
|
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
name: Cache dist-newstyle
|
||||||
|
with:
|
||||||
|
path: dist-newstyle
|
||||||
|
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
|
||||||
|
```
|
||||||
|
|
||||||
## Java - Gradle
|
## Java - Gradle
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -224,15 +250,64 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
```
|
```
|
||||||
|
|
||||||
## Ruby - Gem
|
## R - renv
|
||||||
|
|
||||||
|
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
|
||||||
|
|
||||||
|
Locations:
|
||||||
|
- Ubuntu: `~/.local/share/renv`
|
||||||
|
- macOS: `~/Library/Application Support/renv`
|
||||||
|
- Windows: `%LOCALAPPDATA%/renv`
|
||||||
|
|
||||||
|
### Simple example
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/renv
|
||||||
|
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-renv-
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||||
|
|
||||||
|
### Multiple OS's in a workflow
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
if: startsWith(runner.os, 'Linux')
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/renv
|
||||||
|
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-renv-
|
||||||
|
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
if: startsWith(runner.os, 'macOS')
|
||||||
|
with:
|
||||||
|
path: ~/Library/Application Support/renv
|
||||||
|
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-renv-
|
||||||
|
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
if: startsWith(runner.os, 'Windows')
|
||||||
|
with:
|
||||||
|
path: ~\AppData\Local\renv
|
||||||
|
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-renv-
|
||||||
|
```
|
||||||
|
|
||||||
|
## Ruby - Bundler
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v1
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: vendor/bundle
|
path: vendor/bundle
|
||||||
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
|
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-gem-
|
${{ runner.os }}-gems-
|
||||||
```
|
```
|
||||||
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
|
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
|
||||||
|
|
||||||
|
@ -263,6 +338,21 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Scala - SBT
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Cache SBT ivy cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.ivy2/cache
|
||||||
|
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
|
||||||
|
- name: Cache SBT
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.sbt
|
||||||
|
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
|
||||||
|
```
|
||||||
|
|
||||||
## Swift, Objective-C - Carthage
|
## Swift, Objective-C - Carthage
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
@ -284,3 +374,14 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pods-
|
${{ runner.os }}-pods-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Swift - Swift Package Manager
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: .build
|
||||||
|
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-spm-
|
||||||
|
```
|
||||||
|
|
30
package-lock.json
generated
30
package-lock.json
generated
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "1.1.0",
|
"version": "1.1.1",
|
||||||
"lockfileVersion": 1,
|
"lockfileVersion": 1,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
@ -14,6 +14,14 @@
|
||||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
|
||||||
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
||||||
},
|
},
|
||||||
|
"@actions/http-client": {
|
||||||
|
"version": "1.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
|
||||||
|
"integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==",
|
||||||
|
"requires": {
|
||||||
|
"tunnel": "0.0.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"@actions/io": {
|
"@actions/io": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
|
||||||
|
@ -5933,9 +5941,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"tunnel": {
|
"tunnel": {
|
||||||
"version": "0.0.4",
|
"version": "0.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
|
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||||
"integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM="
|
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
|
||||||
},
|
},
|
||||||
"tunnel-agent": {
|
"tunnel-agent": {
|
||||||
"version": "0.6.0",
|
"version": "0.6.0",
|
||||||
|
@ -5973,15 +5981,6 @@
|
||||||
"integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==",
|
"integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"typed-rest-client": {
|
|
||||||
"version": "1.5.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz",
|
|
||||||
"integrity": "sha512-DVZRlmsfnTjp6ZJaatcdyvvwYwbWvR4YDNFDqb+qdTxpvaVP99YCpBkA8rxsLtAPjBVoDe4fNsnMIdZTiPuKWg==",
|
|
||||||
"requires": {
|
|
||||||
"tunnel": "0.0.4",
|
|
||||||
"underscore": "1.8.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"typescript": {
|
"typescript": {
|
||||||
"version": "3.7.3",
|
"version": "3.7.3",
|
||||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz",
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz",
|
||||||
|
@ -5999,11 +5998,6 @@
|
||||||
"source-map": "~0.6.1"
|
"source-map": "~0.6.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"underscore": {
|
|
||||||
"version": "1.8.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
|
|
||||||
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
|
|
||||||
},
|
|
||||||
"union-value": {
|
"union-value": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "1.1.0",
|
"version": "1.1.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
|
@ -26,8 +26,8 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.0",
|
"@actions/core": "^1.2.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
"@actions/http-client": "^1.0.6",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"typed-rest-client": "^1.5.0",
|
|
||||||
"uuid": "^3.3.3"
|
"uuid": "^3.3.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
|
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||||
import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient";
|
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||||
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
|
|
||||||
import {
|
import {
|
||||||
|
IHttpClientResponse,
|
||||||
IRequestOptions,
|
IRequestOptions,
|
||||||
RestClient,
|
ITypedResponse
|
||||||
IRestResponse
|
} from "@actions/http-client/interfaces";
|
||||||
} from "typed-rest-client/RestClient";
|
|
||||||
import {
|
import {
|
||||||
ArtifactCacheEntry,
|
ArtifactCacheEntry,
|
||||||
CommitCacheRequest,
|
CommitCacheRequest,
|
||||||
|
@ -16,11 +15,17 @@ import {
|
||||||
} from "./contracts";
|
} from "./contracts";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
function isSuccessStatusCode(statusCode: number): boolean {
|
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
return statusCode >= 200 && statusCode < 300;
|
return statusCode >= 200 && statusCode < 300;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isRetryableStatusCode(statusCode: number): boolean {
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
const retryableStatusCodes = [
|
const retryableStatusCodes = [
|
||||||
HttpCodes.BadGateway,
|
HttpCodes.BadGateway,
|
||||||
HttpCodes.ServiceUnavailable,
|
HttpCodes.ServiceUnavailable,
|
||||||
|
@ -29,7 +34,7 @@ function isRetryableStatusCode(statusCode: number): boolean {
|
||||||
return retryableStatusCodes.includes(statusCode);
|
return retryableStatusCodes.includes(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCacheApiUrl(): string {
|
function getCacheApiUrl(resource: string): string {
|
||||||
// Ideally we just use ACTIONS_CACHE_URL
|
// Ideally we just use ACTIONS_CACHE_URL
|
||||||
const baseUrl: string = (
|
const baseUrl: string = (
|
||||||
process.env["ACTIONS_CACHE_URL"] ||
|
process.env["ACTIONS_CACHE_URL"] ||
|
||||||
|
@ -42,8 +47,9 @@ function getCacheApiUrl(): string {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
core.debug(`Cache Url: ${baseUrl}`);
|
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
||||||
return `${baseUrl}_apis/artifactcache/`;
|
core.debug(`Resource Url: ${url}`);
|
||||||
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createAcceptHeader(type: string, apiVersion: string): string {
|
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||||
|
@ -52,30 +58,33 @@ function createAcceptHeader(type: string, apiVersion: string): string {
|
||||||
|
|
||||||
function getRequestOptions(): IRequestOptions {
|
function getRequestOptions(): IRequestOptions {
|
||||||
const requestOptions: IRequestOptions = {
|
const requestOptions: IRequestOptions = {
|
||||||
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1")
|
headers: {
|
||||||
|
Accept: createAcceptHeader("application/json", "6.0-preview.1")
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return requestOptions;
|
return requestOptions;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createRestClient(): RestClient {
|
function createHttpClient(): HttpClient {
|
||||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||||
|
|
||||||
return new RestClient("actions/cache", getCacheApiUrl(), [
|
return new HttpClient(
|
||||||
bearerCredentialHandler
|
"actions/cache",
|
||||||
]);
|
[bearerCredentialHandler],
|
||||||
|
getRequestOptions()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[]
|
keys: string[]
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const restClient = createRestClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||||
|
|
||||||
const response = await restClient.get<ArtifactCacheEntry>(
|
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
||||||
resource,
|
getCacheApiUrl(resource)
|
||||||
getRequestOptions()
|
|
||||||
);
|
);
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -83,6 +92,7 @@ export async function getCacheEntry(
|
||||||
if (!isSuccessStatusCode(response.statusCode)) {
|
if (!isSuccessStatusCode(response.statusCode)) {
|
||||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
||||||
if (!cacheDownloadUrl) {
|
if (!cacheDownloadUrl) {
|
||||||
|
@ -118,17 +128,15 @@ export async function downloadCache(
|
||||||
|
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
export async function reserveCache(key: string): Promise<number> {
|
export async function reserveCache(key: string): Promise<number> {
|
||||||
const restClient = createRestClient();
|
const httpClient = createHttpClient();
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key
|
key
|
||||||
};
|
};
|
||||||
const response = await restClient.create<ReserveCacheResponse>(
|
const response = await httpClient.postJson<ReserveCacheResponse>(
|
||||||
"caches",
|
getCacheApiUrl("caches"),
|
||||||
reserveCacheRequest,
|
reserveCacheRequest
|
||||||
getRequestOptions()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return response?.result?.cacheId ?? -1;
|
return response?.result?.cacheId ?? -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -142,7 +150,7 @@ function getContentRange(start: number, end: number): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function uploadChunk(
|
async function uploadChunk(
|
||||||
restClient: RestClient,
|
httpClient: HttpClient,
|
||||||
resourceUrl: string,
|
resourceUrl: string,
|
||||||
data: NodeJS.ReadableStream,
|
data: NodeJS.ReadableStream,
|
||||||
start: number,
|
start: number,
|
||||||
|
@ -156,38 +164,37 @@ async function uploadChunk(
|
||||||
end
|
end
|
||||||
)}`
|
)}`
|
||||||
);
|
);
|
||||||
const requestOptions = getRequestOptions();
|
const additionalHeaders = {
|
||||||
requestOptions.additionalHeaders = {
|
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
"Content-Range": getContentRange(start, end)
|
"Content-Range": getContentRange(start, end)
|
||||||
};
|
};
|
||||||
|
|
||||||
const uploadChunkRequest = async (): Promise<IRestResponse<void>> => {
|
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||||
return await restClient.uploadStream<void>(
|
return await httpClient.sendStream(
|
||||||
"PATCH",
|
"PATCH",
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
data,
|
data,
|
||||||
requestOptions
|
additionalHeaders
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = await uploadChunkRequest();
|
const response = await uploadChunkRequest();
|
||||||
if (isSuccessStatusCode(response.statusCode)) {
|
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isRetryableStatusCode(response.statusCode)) {
|
if (isRetryableStatusCode(response.message.statusCode)) {
|
||||||
core.debug(
|
core.debug(
|
||||||
`Received ${response.statusCode}, retrying chunk at offset ${start}.`
|
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
||||||
);
|
);
|
||||||
const retryResponse = await uploadChunkRequest();
|
const retryResponse = await uploadChunkRequest();
|
||||||
if (isSuccessStatusCode(retryResponse.statusCode)) {
|
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Cache service responded with ${response.statusCode} during chunk upload.`
|
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,13 +207,13 @@ function parseEnvNumber(key: string): number | undefined {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function uploadFile(
|
async function uploadFile(
|
||||||
restClient: RestClient,
|
httpClient: HttpClient,
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||||
const fd = fs.openSync(archivePath, "r");
|
const fd = fs.openSync(archivePath, "r");
|
||||||
|
|
||||||
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
||||||
|
@ -237,7 +244,7 @@ async function uploadFile(
|
||||||
});
|
});
|
||||||
|
|
||||||
await uploadChunk(
|
await uploadChunk(
|
||||||
restClient,
|
httpClient,
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
chunk,
|
chunk,
|
||||||
start,
|
start,
|
||||||
|
@ -253,16 +260,14 @@ async function uploadFile(
|
||||||
}
|
}
|
||||||
|
|
||||||
async function commitCache(
|
async function commitCache(
|
||||||
restClient: RestClient,
|
httpClient: HttpClient,
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
filesize: number
|
filesize: number
|
||||||
): Promise<IRestResponse<void>> {
|
): Promise<ITypedResponse<null>> {
|
||||||
const requestOptions = getRequestOptions();
|
|
||||||
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||||
return await restClient.create(
|
return await httpClient.postJson<null>(
|
||||||
`caches/${cacheId.toString()}`,
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||||
commitCacheRequest,
|
commitCacheRequest
|
||||||
requestOptions
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,16 +275,16 @@ export async function saveCache(
|
||||||
cacheId: number,
|
cacheId: number,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const restClient = createRestClient();
|
const httpClient = createHttpClient();
|
||||||
|
|
||||||
core.debug("Upload cache");
|
core.debug("Upload cache");
|
||||||
await uploadFile(restClient, cacheId, archivePath);
|
await uploadFile(httpClient, cacheId, archivePath);
|
||||||
|
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug("Commiting cache");
|
core.debug("Commiting cache");
|
||||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||||
const commitCacheResponse = await commitCache(
|
const commitCacheResponse = await commitCache(
|
||||||
restClient,
|
httpClient,
|
||||||
cacheId,
|
cacheId,
|
||||||
cacheSize
|
cacheSize
|
||||||
);
|
);
|
||||||
|
|
|
@ -56,14 +56,14 @@ async function run(): Promise<void> {
|
||||||
|
|
||||||
await createTar(archivePath, cachePath);
|
await createTar(archivePath, cachePath);
|
||||||
|
|
||||||
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
if (archiveFileSize > fileSizeLimit) {
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
utils.logWarning(
|
utils.logWarning(
|
||||||
`Cache size of ~${Math.round(
|
`Cache size of ~${Math.round(
|
||||||
archiveFileSize / (1024 * 1024)
|
archiveFileSize / (1024 * 1024)
|
||||||
)} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
|
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue