mirror of
https://code.forgejo.org/actions/cache.git
synced 2024-12-22 16:51:47 +01:00
commit
e12d46a63a
16 changed files with 860 additions and 1003 deletions
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
BIN
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/ip-regex.dep.yml
generated
BIN
.licenses/npm/ip-regex.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/psl.dep.yml
generated
BIN
.licenses/npm/psl.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/punycode.dep.yml
generated
BIN
.licenses/npm/punycode.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/semver.dep.yml
generated
BIN
.licenses/npm/semver.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/tough-cookie.dep.yml
generated
BIN
.licenses/npm/tough-cookie.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/xml2js.dep.yml
generated
BIN
.licenses/npm/xml2js.dep.yml
generated
Binary file not shown.
|
@ -111,3 +111,8 @@
|
|||
### 3.3.2
|
||||
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
|
||||
### 3.3.3
|
||||
|
||||
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Additional audit fixes of npm package(s)
|
||||
|
|
176
dist/restore-only/index.js
vendored
176
dist/restore-only/index.js
vendored
|
@ -1136,26 +1136,21 @@ function resolvePaths(patterns) {
|
|||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3389,7 +3384,8 @@ function createHttpClient() {
|
|||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
|
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
|
@ -7286,6 +7285,31 @@ function tok (n) {
|
|||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
|
@ -7295,14 +7319,14 @@ function tok (n) {
|
|||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
|
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
|
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
|||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
|
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
|||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
|
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
|||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
|
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
|
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
|||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7521,7 +7557,7 @@ function parse (version, options) {
|
|||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
|
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
|
|||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
|
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
|
|||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
|
|||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
|
@ -8161,9 +8198,16 @@ function Range (range, options) {
|
|||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
|
@ -8171,7 +8215,7 @@ function Range (range, options) {
|
|||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
|
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
|
|||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
|
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
|||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
|
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
|
|||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
|
|||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
|||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
|
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
|
|||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
|
@ -8812,7 +8855,7 @@ function coerce (version, options) {
|
|||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
|
@ -8823,17 +8866,17 @@ function coerce (version, options) {
|
|||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
|
@ -50262,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
|
@ -50319,7 +50362,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
|
@ -50343,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
|
@ -50358,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
|
@ -50375,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
|
@ -52577,7 +52624,7 @@ var CacheFilename;
|
|||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
|
@ -52585,12 +52632,12 @@ var CompressionMethod;
|
|||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
|
@ -54988,7 +55035,7 @@ class HttpHeaders {
|
|||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
@ -55128,7 +55175,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -55206,13 +55253,6 @@ const XML_CHARKEY = "_";
|
|||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
|
@ -59893,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
|||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
|
@ -59925,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
|
@ -59938,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
|
@ -60069,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
|
|||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
|
@ -60305,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
|
|||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
|
@ -60344,7 +60385,6 @@ exports.generateUuid = generateUuid;
|
|||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
|
176
dist/restore/index.js
vendored
176
dist/restore/index.js
vendored
|
@ -1136,26 +1136,21 @@ function resolvePaths(patterns) {
|
|||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3389,7 +3384,8 @@ function createHttpClient() {
|
|||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
|
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
|
@ -7286,6 +7285,31 @@ function tok (n) {
|
|||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
|
@ -7295,14 +7319,14 @@ function tok (n) {
|
|||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
|
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
|
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
|||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
|
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
|||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
|
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
|||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
|
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
|
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
|||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7521,7 +7557,7 @@ function parse (version, options) {
|
|||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
|
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
|
|||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
|
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
|
|||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
|
|||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
|
@ -8161,9 +8198,16 @@ function Range (range, options) {
|
|||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
|
@ -8171,7 +8215,7 @@ function Range (range, options) {
|
|||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
|
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
|
|||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
|
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
|||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
|
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
|
|||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
|
|||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
|||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
|
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
|
|||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
|
@ -8812,7 +8855,7 @@ function coerce (version, options) {
|
|||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
|
@ -8823,17 +8866,17 @@ function coerce (version, options) {
|
|||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
|
@ -50262,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
|
@ -50319,7 +50362,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
|
@ -50343,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
|
@ -50358,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
|
@ -50375,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
|
@ -52577,7 +52624,7 @@ var CacheFilename;
|
|||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
|
@ -52585,12 +52632,12 @@ var CompressionMethod;
|
|||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
|
@ -54988,7 +55035,7 @@ class HttpHeaders {
|
|||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
@ -55128,7 +55175,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -55206,13 +55253,6 @@ const XML_CHARKEY = "_";
|
|||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
|
@ -59893,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
|||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
|
@ -59925,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
|
@ -59938,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
|
@ -60069,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
|
|||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
|
@ -60305,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
|
|||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
|
@ -60344,7 +60385,6 @@ exports.generateUuid = generateUuid;
|
|||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
|
176
dist/save-only/index.js
vendored
176
dist/save-only/index.js
vendored
|
@ -1192,26 +1192,21 @@ function resolvePaths(patterns) {
|
|||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3445,7 +3440,8 @@ function createHttpClient() {
|
|||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
|
@ -7332,8 +7328,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
|
@ -7342,6 +7341,31 @@ function tok (n) {
|
|||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
|
@ -7351,14 +7375,14 @@ function tok (n) {
|
|||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
|
@ -7400,7 +7424,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
|
@ -7480,6 +7504,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
|||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
|
@ -7489,6 +7514,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
|||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
|
@ -7504,6 +7530,7 @@ src[t.LONECARET] = '(?:\\^)'
|
|||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
|
@ -7525,6 +7552,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
|
@ -7553,6 +7581,14 @@ for (var i = 0; i < R; i++) {
|
|||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7577,7 +7613,7 @@ function parse (version, options) {
|
|||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
|
@ -7632,7 +7668,7 @@ function SemVer (version, options) {
|
|||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
|
@ -8077,6 +8113,7 @@ function Comparator (comp, options) {
|
|||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
@ -8093,7 +8130,7 @@ function Comparator (comp, options) {
|
|||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
|
@ -8217,9 +8254,16 @@ function Range (range, options) {
|
|||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
|
@ -8227,7 +8271,7 @@ function Range (range, options) {
|
|||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
|
@ -8246,20 +8290,19 @@ Range.prototype.toString = function () {
|
|||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
|
@ -8267,7 +8310,7 @@ Range.prototype.parseRange = function (range) {
|
|||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
|
@ -8367,7 +8410,7 @@ function replaceTildes (comp, options) {
|
|||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8408,7 +8451,7 @@ function replaceCarets (comp, options) {
|
|||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8467,7 +8510,7 @@ function replaceXRanges (comp, options) {
|
|||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
|
@ -8542,7 +8585,7 @@ function replaceXRange (comp, options) {
|
|||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
|
@ -8868,7 +8911,7 @@ function coerce (version, options) {
|
|||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
|
@ -8879,17 +8922,17 @@ function coerce (version, options) {
|
|||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
|
@ -50269,14 +50312,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
|
@ -50326,7 +50369,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
|
@ -50350,7 +50397,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
|
@ -50365,7 +50412,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
|
@ -50382,7 +50429,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
|
@ -52584,7 +52631,7 @@ var CacheFilename;
|
|||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
|
@ -52592,12 +52639,12 @@ var CompressionMethod;
|
|||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
|
@ -54995,7 +55042,7 @@ class HttpHeaders {
|
|||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
@ -55135,7 +55182,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -55213,13 +55260,6 @@ const XML_CHARKEY = "_";
|
|||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
|
@ -59900,7 +59940,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
|||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
|
@ -59932,7 +59972,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
|
@ -59945,7 +59985,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
|
@ -60076,10 +60116,7 @@ function flattenResponse(_response, responseSpec) {
|
|||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
|
@ -60312,6 +60349,10 @@ Object.defineProperty(exports, 'delay', {
|
|||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
|
@ -60351,7 +60392,6 @@ exports.generateUuid = generateUuid;
|
|||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
|
176
dist/save/index.js
vendored
176
dist/save/index.js
vendored
|
@ -1136,26 +1136,21 @@ function resolvePaths(patterns) {
|
|||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
if (relativeFile === '') {
|
||||
// path.relative returns empty string if workspace and file are equal
|
||||
paths.push('.');
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
else {
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3389,7 +3384,8 @@ function createHttpClient() {
|
|||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
|
@ -7276,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
|||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
|
@ -7286,6 +7285,31 @@ function tok (n) {
|
|||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
|
@ -7295,14 +7319,14 @@ function tok (n) {
|
|||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
|
@ -7344,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
|||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
|
@ -7424,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
|||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
|
@ -7433,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
|||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
|
@ -7448,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
|||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
|
@ -7469,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
|||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
|
@ -7497,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
|||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7521,7 +7557,7 @@ function parse (version, options) {
|
|||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
|
@ -7576,7 +7612,7 @@ function SemVer (version, options) {
|
|||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
|
@ -8021,6 +8057,7 @@ function Comparator (comp, options) {
|
|||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
@ -8037,7 +8074,7 @@ function Comparator (comp, options) {
|
|||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
|
@ -8161,9 +8198,16 @@ function Range (range, options) {
|
|||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
|
@ -8171,7 +8215,7 @@ function Range (range, options) {
|
|||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
|
@ -8190,20 +8234,19 @@ Range.prototype.toString = function () {
|
|||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
|
@ -8211,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
|||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
|
@ -8311,7 +8354,7 @@ function replaceTildes (comp, options) {
|
|||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8352,7 +8395,7 @@ function replaceCarets (comp, options) {
|
|||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
|
@ -8411,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
|||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
|
@ -8486,7 +8529,7 @@ function replaceXRange (comp, options) {
|
|||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
|
@ -8812,7 +8855,7 @@ function coerce (version, options) {
|
|||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
|
@ -8823,17 +8866,17 @@ function coerce (version, options) {
|
|||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
|
@ -50242,14 +50285,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
|
@ -50299,7 +50342,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
|
@ -50323,7 +50370,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
|
@ -50338,7 +50385,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
|
@ -50355,7 +50402,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
|||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
|
@ -52557,7 +52604,7 @@ var CacheFilename;
|
|||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
|
@ -52565,12 +52612,12 @@ var CompressionMethod;
|
|||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
|
@ -54968,7 +55015,7 @@ class HttpHeaders {
|
|||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
|
@ -55108,7 +55155,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -55186,13 +55233,6 @@ const XML_CHARKEY = "_";
|
|||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
|
@ -59873,7 +59913,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
|||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
|
@ -59905,7 +59945,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
|
@ -59918,7 +59958,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
|||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
|
@ -60049,10 +60089,7 @@ function flattenResponse(_response, responseSpec) {
|
|||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
|
@ -60285,6 +60322,10 @@ Object.defineProperty(exports, 'delay', {
|
|||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
|
@ -60324,7 +60365,6 @@ exports.generateUuid = generateUuid;
|
|||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
|
1150
package-lock.json
generated
1150
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "3.3.2",
|
||||
"version": "3.3.3",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
|
@ -23,7 +23,7 @@
|
|||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.2",
|
||||
"@actions/cache": "^3.2.3",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
|
Loading…
Reference in a new issue