2020-02-13 18:38:56 +01:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
/******/ return _ _webpack _require _ _ ( 778 ) ;
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
/******/ ( {
/***/ 1 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const childProcess = _ _webpack _require _ _ ( 129 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const ioUtil = _ _webpack _require _ _ ( 672 ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
try {
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ioUtil . mkdirP ( fsPath ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
}
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
}
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
}
} ) ;
}
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
/***/ 9 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-05-19 19:46:58 +02:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
2020-02-13 18:38:56 +01:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const events = _ _importStar ( _ _webpack _require _ _ ( 614 ) ) ;
const child = _ _importStar ( _ _webpack _require _ _ ( 129 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 1 ) ) ;
const ioUtil = _ _importStar ( _ _webpack _require _ _ ( 672 ) ) ;
2020-02-13 18:38:56 +01:00
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
strBuffer = s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-05-19 19:46:58 +02:00
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
2020-02-13 18:38:56 +01:00
return new Promise ( ( resolve , reject ) => {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
const stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
const errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
2020-05-19 19:46:58 +02:00
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
2020-02-13 18:38:56 +01:00
} ) ;
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ } ) ,
2020-05-14 23:27:38 +02:00
/***/ 15 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
2020-05-14 23:27:38 +02:00
const exec = _ _importStar ( _ _webpack _require _ _ ( 986 ) ) ;
const glob = _ _importStar ( _ _webpack _require _ _ ( 281 ) ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 1 ) ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
2020-05-19 19:46:58 +02:00
const semver = _ _importStar ( _ _webpack _require _ _ ( 280 ) ) ;
2020-05-14 23:27:38 +02:00
const util = _ _importStar ( _ _webpack _require _ _ ( 669 ) ) ;
const uuid _1 = _ _webpack _require _ _ ( 898 ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const IS _WINDOWS = process . platform === 'win32' ;
let tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
if ( ! tempDirectory ) {
let baseLocation ;
if ( IS _WINDOWS ) {
// On Windows use the USERPROFILE env variable
baseLocation = process . env [ 'USERPROFILE' ] || 'C:\\' ;
}
else {
if ( process . platform === 'darwin' ) {
baseLocation = '/Users' ;
}
else {
baseLocation = '/home' ;
}
}
tempDirectory = path . join ( baseLocation , 'actions' , 'temp' ) ;
}
const dest = path . join ( tempDirectory , uuid _1 . v4 ( ) ) ;
yield io . mkdirP ( dest ) ;
return dest ;
} ) ;
}
exports . createTempDirectory = createTempDirectory ;
function getArchiveFileSizeIsBytes ( filePath ) {
return fs . statSync ( filePath ) . size ;
}
exports . getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes ;
function resolvePaths ( patterns ) {
var e _1 , _a ;
var _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const paths = [ ] ;
const workspace = ( _b = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _b !== void 0 ? _b : process . cwd ( ) ;
const globber = yield glob . create ( patterns . join ( '\n' ) , {
implicitDescendants : false
} ) ;
try {
for ( var _c = _ _asyncValues ( globber . globGenerator ( ) ) , _d ; _d = yield _c . next ( ) , ! _d . done ; ) {
const file = _d . value ;
const relativeFile = path . relative ( workspace , file ) ;
core . debug ( ` Matched: ${ relativeFile } ` ) ;
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths . push ( ` ${ relativeFile } ` ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( _d && ! _d . done && ( _a = _c . return ) ) yield _a . call ( _c ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
return paths ;
} ) ;
}
exports . resolvePaths = resolvePaths ;
function unlinkFile ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return util . promisify ( fs . unlink ) ( filePath ) ;
} ) ;
}
exports . unlinkFile = unlinkFile ;
function getVersion ( app ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . debug ( ` Checking ${ app } --version ` ) ;
let versionOutput = '' ;
try {
yield exec . exec ( ` ${ app } --version ` , [ ] , {
ignoreReturnCode : true ,
silent : true ,
listeners : {
stdout : ( data ) => ( versionOutput += data . toString ( ) ) ,
stderr : ( data ) => ( versionOutput += data . toString ( ) )
}
} ) ;
}
catch ( err ) {
core . debug ( err . message ) ;
}
versionOutput = versionOutput . trim ( ) ;
core . debug ( versionOutput ) ;
return versionOutput ;
} ) ;
}
// Use zstandard if possible to maximize cache performance
function getCompressionMethod ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-05-19 22:02:31 +02:00
if ( process . platform === 'win32' && ! ( yield isGnuTarInstalled ( ) ) ) {
2020-05-19 19:46:58 +02:00
// Disable zstd due to bug https://github.com/actions/cache/issues/301
return constants _1 . CompressionMethod . Gzip ;
}
2020-05-14 23:27:38 +02:00
const versionOutput = yield getVersion ( 'zstd' ) ;
2020-05-19 19:46:58 +02:00
const version = semver . clean ( versionOutput ) ;
if ( ! versionOutput . toLowerCase ( ) . includes ( 'zstd command line interface' ) ) {
// zstd is not installed
return constants _1 . CompressionMethod . Gzip ;
}
else if ( ! version || semver . lt ( version , 'v1.3.2' ) ) {
// zstd is installed but using a version earlier than v1.3.2
// v1.3.2 is required to use the `--long` options in zstd
return constants _1 . CompressionMethod . ZstdWithoutLong ;
}
else {
return constants _1 . CompressionMethod . Zstd ;
}
2020-05-14 23:27:38 +02:00
} ) ;
}
exports . getCompressionMethod = getCompressionMethod ;
function getCacheFileName ( compressionMethod ) {
2020-05-19 19:46:58 +02:00
return compressionMethod === constants _1 . CompressionMethod . Gzip
? constants _1 . CacheFilename . Gzip
: constants _1 . CacheFilename . Zstd ;
2020-05-14 23:27:38 +02:00
}
exports . getCacheFileName = getCacheFileName ;
2020-05-19 19:46:58 +02:00
function isGnuTarInstalled ( ) {
2020-05-14 23:27:38 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const versionOutput = yield getVersion ( 'tar' ) ;
return versionOutput . toLowerCase ( ) . includes ( 'gnu tar' ) ;
} ) ;
}
2020-05-19 19:46:58 +02:00
exports . isGnuTarInstalled = isGnuTarInstalled ;
2020-05-14 23:27:38 +02:00
//# sourceMappingURL=cacheUtils.js.map
/***/ } ) ,
2020-02-13 18:38:56 +01:00
/***/ 16 :
/***/ ( function ( module ) {
module . exports = require ( "tls" ) ;
2020-05-14 23:27:38 +02:00
/***/ } ) ,
/***/ 86 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var rng = _ _webpack _require _ _ ( 139 ) ;
var bytesToUuid = _ _webpack _require _ _ ( 722 ) ;
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
var _nodeId ;
var _clockseq ;
// Previous uuid creation time
var _lastMSecs = 0 ;
var _lastNSecs = 0 ;
2020-05-19 19:46:58 +02:00
// See https://github.com/uuidjs/uuid for API details
2020-05-14 23:27:38 +02:00
function v1 ( options , buf , offset ) {
var i = buf && offset || 0 ;
var b = buf || [ ] ;
options = options || { } ;
var node = options . node || _nodeId ;
var clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ;
// node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
var seedBytes = rng ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [
seedBytes [ 0 ] | 0x01 ,
seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ]
] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
}
// UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
var msecs = options . msecs !== undefined ? options . msecs : new Date ( ) . getTime ( ) ;
// Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
var nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ;
// Time since last uuid creation (in msecs)
var dt = ( msecs - _lastMSecs ) + ( nsecs - _lastNSecs ) / 10000 ;
// Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
}
// Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
}
// Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( 'uuid.v1(): Can\'t create more than 10M uuids/sec' ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ;
// Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ;
// `time_low`
var tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ;
// `time_mid`
var tmh = ( msecs / 0x100000000 * 10000 ) & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ;
// `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ;
// `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ;
// `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ;
// `node`
for ( var n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf ? buf : bytesToUuid ( b ) ;
}
module . exports = v1 ;
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
2020-03-20 21:02:11 +01:00
/***/ } ) ,
/***/ 93 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = minimatch
minimatch . Minimatch = Minimatch
var path = { sep : '/' }
try {
path = _ _webpack _require _ _ ( 622 )
} catch ( er ) { }
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _webpack _require _ _ ( 306 )
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
}
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
// normalizes slashes.
var slashSplit = /\/+/
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
}
function ext ( a , b ) {
a = a || { }
b = b || { }
var t = { }
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
return t
}
minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return minimatch
var orig = minimatch
var m = function minimatch ( p , pattern , options ) {
return orig . minimatch ( p , pattern , ext ( def , options ) )
}
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
return m
}
Minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return Minimatch
return minimatch . defaults ( def ) . Minimatch
}
function minimatch ( p , pattern , options ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
}
// "" only matches ""
if ( pattern . trim ( ) === '' ) return p === ''
return new Minimatch ( pattern , options ) . match ( p )
}
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
}
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
pattern = pattern . trim ( )
// windows support: need to use /, not \
if ( path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
}
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
// make the set of regexps etc.
this . make ( )
}
Minimatch . prototype . debug = function ( ) { }
Minimatch . prototype . make = make
function make ( ) {
// don't do it more than once.
if ( this . _made ) return
var pattern = this . pattern
var options = this . options
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
}
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = console . error
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
} )
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
}
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
if ( options . nonegate ) return
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
}
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
if ( typeof pattern === 'undefined' ) {
throw new TypeError ( 'undefined pattern' )
}
if ( options . nobrace ||
! pattern . match ( /\{.*\}/ ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
if ( pattern . length > 1024 * 64 ) {
throw new TypeError ( 'pattern is too long' )
}
var options = this . options
// shortcuts
if ( ! options . noglobstar && pattern === '**' ) return GLOBSTAR
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
case '/' :
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
}
if ( ! stateChar ) {
re += '\\('
continue
}
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
}
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
}
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
}
clearStateChar ( )
re += '|'
continue
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
if ( inClass ) {
re += '\\' + c
continue
}
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if ( inClass ) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
}
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '.' :
case '[' :
case '(' : addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
if ( addPatternStart ) {
re = patternStart + re
}
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
}
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
regExp . _glob = pattern
regExp . _src = re
return regExp
}
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
}
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) {
this . regexp = false
}
return this . regexp
}
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
Minimatch . prototype . match = match
function match ( f , partial ) {
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
if ( f === '/' && partial ) return true
var options = this . options
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
var set = this . set
this . debug ( this . pattern , 'set' , set )
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
if ( options . nocase ) {
hit = f . toLowerCase ( ) === p . toLowerCase ( )
} else {
hit = f === p
}
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = ( fi === fl - 1 ) && ( file [ fi ] === '' )
return emptyFileEnd
}
// should be unreachable.
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
}
2020-02-13 18:38:56 +01:00
/***/ } ) ,
2020-05-14 23:27:38 +02:00
/***/ 114 :
2020-02-13 18:38:56 +01:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-05-14 23:27:38 +02:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
2020-05-14 23:27:38 +02:00
const http _client _1 = _ _webpack _require _ _ ( 539 ) ;
const auth _1 = _ _webpack _require _ _ ( 226 ) ;
const crypto = _ _importStar ( _ _webpack _require _ _ ( 417 ) ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const stream = _ _importStar ( _ _webpack _require _ _ ( 794 ) ) ;
const util = _ _importStar ( _ _webpack _require _ _ ( 669 ) ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
const versionSalt = '1.0' ;
function isSuccessStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode >= 200 && statusCode < 300 ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
function isServerErrorStatusCode ( statusCode ) {
if ( ! statusCode ) {
return true ;
}
return statusCode >= 500 ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
function isRetryableStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
const retryableStatusCodes = [
http _client _1 . HttpCodes . BadGateway ,
http _client _1 . HttpCodes . ServiceUnavailable ,
http _client _1 . HttpCodes . GatewayTimeout
] ;
return retryableStatusCodes . includes ( statusCode ) ;
}
function getCacheApiUrl ( resource ) {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = ( process . env [ 'ACTIONS_CACHE_URL' ] ||
process . env [ 'ACTIONS_RUNTIME_URL' ] ||
'' ) . replace ( 'pipelines' , 'artifactcache' ) ;
if ( ! baseUrl ) {
throw new Error ( 'Cache Service Url not found, unable to restore cache.' ) ;
}
const url = ` ${ baseUrl } _apis/artifactcache/ ${ resource } ` ;
core . debug ( ` Resource Url: ${ url } ` ) ;
return url ;
}
function createAcceptHeader ( type , apiVersion ) {
return ` ${ type } ;api-version= ${ apiVersion } ` ;
}
function getRequestOptions ( ) {
const requestOptions = {
headers : {
Accept : createAcceptHeader ( 'application/json' , '6.0-preview.1' )
}
} ;
return requestOptions ;
}
function createHttpClient ( ) {
const token = process . env [ 'ACTIONS_RUNTIME_TOKEN' ] || '' ;
const bearerCredentialHandler = new auth _1 . BearerCredentialHandler ( token ) ;
return new http _client _1 . HttpClient ( 'actions/cache' , [ bearerCredentialHandler ] , getRequestOptions ( ) ) ;
}
function getCacheVersion ( paths , compressionMethod ) {
2020-05-19 19:46:58 +02:00
const components = paths . concat ( ! compressionMethod || compressionMethod === constants _1 . CompressionMethod . Gzip
? [ ]
: [ compressionMethod ] ) ;
2020-05-14 23:27:38 +02:00
// Add salt to cache version to support breaking changes in cache entry
components . push ( versionSalt ) ;
return crypto
. createHash ( 'sha256' )
. update ( components . join ( '|' ) )
. digest ( 'hex' ) ;
}
exports . getCacheVersion = getCacheVersion ;
function retry ( name , method , getStatusCode , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let response = undefined ;
let statusCode = undefined ;
let isRetryable = false ;
let errorMessage = '' ;
let attempt = 1 ;
while ( attempt <= maxAttempts ) {
try {
response = yield method ( ) ;
statusCode = getStatusCode ( response ) ;
if ( ! isServerErrorStatusCode ( statusCode ) ) {
return response ;
}
isRetryable = isRetryableStatusCode ( statusCode ) ;
errorMessage = ` Cache service responded with ${ statusCode } ` ;
}
catch ( error ) {
isRetryable = true ;
errorMessage = error . message ;
}
core . debug ( ` ${ name } - Attempt ${ attempt } of ${ maxAttempts } failed with error: ${ errorMessage } ` ) ;
if ( ! isRetryable ) {
core . debug ( ` ${ name } - Error is not retryable ` ) ;
break ;
}
attempt ++ ;
}
throw Error ( ` ${ name } failed: ${ errorMessage } ` ) ;
} ) ;
}
exports . retry = retry ;
function retryTypedResponse ( name , method , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , ( response ) => response . statusCode , maxAttempts ) ;
} ) ;
}
exports . retryTypedResponse = retryTypedResponse ;
function retryHttpClientResponse ( name , method , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , ( response ) => response . message . statusCode , maxAttempts ) ;
} ) ;
}
exports . retryHttpClientResponse = retryHttpClientResponse ;
function getCacheEntry ( keys , paths , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
const version = getCacheVersion ( paths , options === null || options === void 0 ? void 0 : options . compressionMethod ) ;
const resource = ` cache?keys= ${ encodeURIComponent ( keys . join ( ',' ) ) } &version= ${ version } ` ;
const response = yield retryTypedResponse ( 'getCacheEntry' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return httpClient . getJson ( getCacheApiUrl ( resource ) ) ; } ) ) ;
if ( response . statusCode === 204 ) {
return null ;
}
if ( ! isSuccessStatusCode ( response . statusCode ) ) {
throw new Error ( ` Cache service responded with ${ response . statusCode } ` ) ;
}
const cacheResult = response . result ;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult . archiveLocation ;
if ( ! cacheDownloadUrl ) {
throw new Error ( 'Cache not found.' ) ;
}
core . setSecret ( cacheDownloadUrl ) ;
core . debug ( ` Cache Result: ` ) ;
core . debug ( JSON . stringify ( cacheResult ) ) ;
return cacheResult ;
} ) ;
}
exports . getCacheEntry = getCacheEntry ;
function pipeResponseToStream ( response , output ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const pipeline = util . promisify ( stream . pipeline ) ;
yield pipeline ( response . message , output ) ;
} ) ;
}
function downloadCache ( archiveLocation , archivePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const writeStream = fs . createWriteStream ( archivePath ) ;
const httpClient = new http _client _1 . HttpClient ( 'actions/cache' ) ;
const downloadResponse = yield retryHttpClientResponse ( 'downloadCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return httpClient . get ( archiveLocation ) ; } ) ) ;
// Abort download if no traffic received over the socket.
downloadResponse . message . socket . setTimeout ( constants _1 . SocketTimeout , ( ) => {
downloadResponse . message . destroy ( ) ;
core . debug ( ` Aborting download, socket timed out after ${ constants _1 . SocketTimeout } ms ` ) ;
} ) ;
yield pipeResponseToStream ( downloadResponse , writeStream ) ;
// Validate download size.
const contentLengthHeader = downloadResponse . message . headers [ 'content-length' ] ;
if ( contentLengthHeader ) {
const expectedLength = parseInt ( contentLengthHeader ) ;
const actualLength = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
if ( actualLength !== expectedLength ) {
throw new Error ( ` Incomplete download. Expected file size: ${ expectedLength } , actual file size: ${ actualLength } ` ) ;
}
}
else {
core . debug ( 'Unable to validate download, no Content-Length header' ) ;
}
} ) ;
}
exports . downloadCache = downloadCache ;
// Reserve Cache
function reserveCache ( key , paths , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
const version = getCacheVersion ( paths , options === null || options === void 0 ? void 0 : options . compressionMethod ) ;
const reserveCacheRequest = {
key ,
version
} ;
const response = yield retryTypedResponse ( 'reserveCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . postJson ( getCacheApiUrl ( 'caches' ) , reserveCacheRequest ) ;
} ) ) ;
return ( _b = ( _a = response === null || response === void 0 ? void 0 : response . result ) === null || _a === void 0 ? void 0 : _a . cacheId ) !== null && _b !== void 0 ? _b : - 1 ;
} ) ;
}
exports . reserveCache = reserveCache ;
function getContentRange ( start , end ) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return ` bytes ${ start } - ${ end } /* ` ;
}
function uploadChunk ( httpClient , resourceUrl , openStream , start , end ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . debug ( ` Uploading chunk of size ${ end -
start +
1 } bytes at offset $ { start } with content range : $ { getContentRange ( start , end ) } ` );
const additionalHeaders = {
'Content-Type' : 'application/octet-stream' ,
'Content-Range' : getContentRange ( start , end )
} ;
yield retryHttpClientResponse ( ` uploadChunk (start: ${ start } , end: ${ end } ) ` , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . sendStream ( 'PATCH' , resourceUrl , openStream ( ) , additionalHeaders ) ;
} ) ) ;
} ) ;
}
function uploadFile ( httpClient , cacheId , archivePath , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Upload Chunks
const fileSize = fs . statSync ( archivePath ) . size ;
const resourceUrl = getCacheApiUrl ( ` caches/ ${ cacheId . toString ( ) } ` ) ;
const fd = fs . openSync ( archivePath , 'r' ) ;
const concurrency = ( _a = options === null || options === void 0 ? void 0 : options . uploadConcurrency ) !== null && _a !== void 0 ? _a : 4 ; // # of HTTP requests in parallel
const MAX _CHUNK _SIZE = ( _b = options === null || options === void 0 ? void 0 : options . uploadChunkSize ) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024 ; // 32 MB Chunks
core . debug ( ` Concurrency: ${ concurrency } and Chunk Size: ${ MAX _CHUNK _SIZE } ` ) ;
const parallelUploads = [ ... new Array ( concurrency ) . keys ( ) ] ;
core . debug ( 'Awaiting all uploads' ) ;
let offset = 0 ;
try {
yield Promise . all ( parallelUploads . map ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( offset < fileSize ) {
const chunkSize = Math . min ( fileSize - offset , MAX _CHUNK _SIZE ) ;
const start = offset ;
const end = offset + chunkSize - 1 ;
offset += MAX _CHUNK _SIZE ;
yield uploadChunk ( httpClient , resourceUrl , ( ) => fs
. createReadStream ( archivePath , {
fd ,
start ,
end ,
autoClose : false
} )
. on ( 'error' , error => {
throw new Error ( ` Cache upload failed because file read failed with ${ error . Message } ` ) ;
} ) , start , end ) ;
}
} ) ) ) ;
}
finally {
fs . closeSync ( fd ) ;
}
return ;
} ) ;
}
function commitCache ( httpClient , cacheId , filesize ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commitCacheRequest = { size : filesize } ;
return yield retryTypedResponse ( 'commitCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . postJson ( getCacheApiUrl ( ` caches/ ${ cacheId . toString ( ) } ` ) , commitCacheRequest ) ;
} ) ) ;
} ) ;
}
function saveCache ( cacheId , archivePath , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
core . debug ( 'Upload cache' ) ;
yield uploadFile ( httpClient , cacheId , archivePath , options ) ;
// Commit Cache
core . debug ( 'Commiting cache' ) ;
const cacheSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
const commitCacheResponse = yield commitCache ( httpClient , cacheId , cacheSize ) ;
if ( ! isSuccessStatusCode ( commitCacheResponse . statusCode ) ) {
throw new Error ( ` Cache service responded with ${ commitCacheResponse . statusCode } during commit cache. ` ) ;
}
core . info ( 'Cache saved successfully' ) ;
} ) ;
}
exports . saveCache = saveCache ;
//# sourceMappingURL=cacheHttpClient.js.map
/***/ } ) ,
/***/ 129 :
/***/ ( function ( module ) {
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 139 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = _ _webpack _require _ _ ( 417 ) ;
module . exports = function nodeRNG ( ) {
return crypto . randomBytes ( 16 ) ;
} ;
/***/ } ) ,
/***/ 141 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 16 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
2020-02-13 18:38:56 +01:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
2020-05-14 23:27:38 +02:00
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
2020-02-13 18:38:56 +01:00
} ) ;
2020-05-14 23:27:38 +02:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
}
}
}
return target ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
exports . debug = debug ; // for test
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 226 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
2020-05-11 16:49:48 +02:00
options . headers [ 'Authorization' ] =
'Basic ' +
Buffer . from ( this . username + ':' + this . password ) . toString ( 'base64' ) ;
2020-02-13 18:38:56 +01:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
options . headers [ 'Authorization' ] = 'Bearer ' + this . token ;
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
2020-05-11 16:49:48 +02:00
options . headers [ 'Authorization' ] =
'Basic ' + Buffer . from ( 'PAT:' + this . token ) . toString ( 'base64' ) ;
2020-02-13 18:38:56 +01:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
2020-03-20 21:02:11 +01:00
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
/***/ } ) ,
2020-05-19 19:46:58 +02:00
/***/ 280 :
/***/ ( function ( module , exports ) {
exports = module . exports = SemVer
var debug
/* istanbul ignore next */
if ( typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments , 0 )
args . unshift ( 'SEMVER' )
console . log . apply ( console , args )
}
} else {
debug = function ( ) { }
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
exports . SEMVER _SPEC _VERSION = '2.0.0'
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
var MAX _LENGTH = 256
var MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
var MAX _SAFE _COMPONENT _LENGTH = 16
// The actual regexps go on exports.re
var re = exports . re = [ ]
var src = exports . src = [ ]
var t = exports . tokens = { }
var R = 0
function tok ( n ) {
t [ n ] = R ++
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
tok ( 'NUMERICIDENTIFIER' )
src [ t . NUMERICIDENTIFIER ] = '0|[1-9]\\d*'
tok ( 'NUMERICIDENTIFIERLOOSE' )
src [ t . NUMERICIDENTIFIERLOOSE ] = '[0-9]+'
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
tok ( 'NONNUMERICIDENTIFIER' )
src [ t . NONNUMERICIDENTIFIER ] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
// ## Main Version
// Three dot-separated numeric identifiers.
tok ( 'MAINVERSION' )
src [ t . MAINVERSION ] = '(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIER ] + ')'
tok ( 'MAINVERSIONLOOSE' )
src [ t . MAINVERSIONLOOSE ] = '(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')\\.' +
'(' + src [ t . NUMERICIDENTIFIERLOOSE ] + ')'
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
tok ( 'PRERELEASEIDENTIFIER' )
src [ t . PRERELEASEIDENTIFIER ] = '(?:' + src [ t . NUMERICIDENTIFIER ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
tok ( 'PRERELEASEIDENTIFIERLOOSE' )
src [ t . PRERELEASEIDENTIFIERLOOSE ] = '(?:' + src [ t . NUMERICIDENTIFIERLOOSE ] +
'|' + src [ t . NONNUMERICIDENTIFIER ] + ')'
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
tok ( 'PRERELEASE' )
src [ t . PRERELEASE ] = '(?:-(' + src [ t . PRERELEASEIDENTIFIER ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIER ] + ')*))'
tok ( 'PRERELEASELOOSE' )
src [ t . PRERELEASELOOSE ] = '(?:-?(' + src [ t . PRERELEASEIDENTIFIERLOOSE ] +
'(?:\\.' + src [ t . PRERELEASEIDENTIFIERLOOSE ] + ')*))'
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
tok ( 'BUILDIDENTIFIER' )
src [ t . BUILDIDENTIFIER ] = '[0-9A-Za-z-]+'
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
tok ( 'BUILD' )
src [ t . BUILD ] = '(?:\\+(' + src [ t . BUILDIDENTIFIER ] +
'(?:\\.' + src [ t . BUILDIDENTIFIER ] + ')*))'
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
tok ( 'FULL' )
tok ( 'FULLPLAIN' )
src [ t . FULLPLAIN ] = 'v?' + src [ t . MAINVERSION ] +
src [ t . PRERELEASE ] + '?' +
src [ t . BUILD ] + '?'
src [ t . FULL ] = '^' + src [ t . FULLPLAIN ] + '$'
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
tok ( 'LOOSEPLAIN' )
src [ t . LOOSEPLAIN ] = '[v=\\s]*' + src [ t . MAINVERSIONLOOSE ] +
src [ t . PRERELEASELOOSE ] + '?' +
src [ t . BUILD ] + '?'
tok ( 'LOOSE' )
src [ t . LOOSE ] = '^' + src [ t . LOOSEPLAIN ] + '$'
tok ( 'GTLT' )
src [ t . GTLT ] = '((?:<|>)?=?)'
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
tok ( 'XRANGEIDENTIFIERLOOSE' )
src [ t . XRANGEIDENTIFIERLOOSE ] = src [ t . NUMERICIDENTIFIERLOOSE ] + '|x|X|\\*'
tok ( 'XRANGEIDENTIFIER' )
src [ t . XRANGEIDENTIFIER ] = src [ t . NUMERICIDENTIFIER ] + '|x|X|\\*'
tok ( 'XRANGEPLAIN' )
src [ t . XRANGEPLAIN ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIER ] + ')' +
'(?:' + src [ t . PRERELEASE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
tok ( 'XRANGEPLAINLOOSE' )
src [ t . XRANGEPLAINLOOSE ] = '[v=\\s]*(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:\\.(' + src [ t . XRANGEIDENTIFIERLOOSE ] + ')' +
'(?:' + src [ t . PRERELEASELOOSE ] + ')?' +
src [ t . BUILD ] + '?' +
')?)?'
tok ( 'XRANGE' )
src [ t . XRANGE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAIN ] + '$'
tok ( 'XRANGELOOSE' )
src [ t . XRANGELOOSE ] = '^' + src [ t . GTLT ] + '\\s*' + src [ t . XRANGEPLAINLOOSE ] + '$'
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
tok ( 'COERCE' )
src [ t . COERCE ] = '(^|[^\\d])' +
'(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '})' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:\\.(\\d{1,' + MAX _SAFE _COMPONENT _LENGTH + '}))?' +
'(?:$|[^\\d])'
tok ( 'COERCERTL' )
re [ t . COERCERTL ] = new RegExp ( src [ t . COERCE ] , 'g' )
// Tilde ranges.
// Meaning is "reasonably at or greater than"
tok ( 'LONETILDE' )
src [ t . LONETILDE ] = '(?:~>?)'
tok ( 'TILDETRIM' )
src [ t . TILDETRIM ] = '(\\s*)' + src [ t . LONETILDE ] + '\\s+'
re [ t . TILDETRIM ] = new RegExp ( src [ t . TILDETRIM ] , 'g' )
var tildeTrimReplace = '$1~'
tok ( 'TILDE' )
src [ t . TILDE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'TILDELOOSE' )
src [ t . TILDELOOSE ] = '^' + src [ t . LONETILDE ] + src [ t . XRANGEPLAINLOOSE ] + '$'
// Caret ranges.
// Meaning is "at least and backwards compatible with"
tok ( 'LONECARET' )
src [ t . LONECARET ] = '(?:\\^)'
tok ( 'CARETTRIM' )
src [ t . CARETTRIM ] = '(\\s*)' + src [ t . LONECARET ] + '\\s+'
re [ t . CARETTRIM ] = new RegExp ( src [ t . CARETTRIM ] , 'g' )
var caretTrimReplace = '$1^'
tok ( 'CARET' )
src [ t . CARET ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAIN ] + '$'
tok ( 'CARETLOOSE' )
src [ t . CARETLOOSE ] = '^' + src [ t . LONECARET ] + src [ t . XRANGEPLAINLOOSE ] + '$'
// A simple gt/lt/eq thing, or just "" to indicate "any version"
tok ( 'COMPARATORLOOSE' )
src [ t . COMPARATORLOOSE ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . LOOSEPLAIN ] + ')$|^$'
tok ( 'COMPARATOR' )
src [ t . COMPARATOR ] = '^' + src [ t . GTLT ] + '\\s*(' + src [ t . FULLPLAIN ] + ')$|^$'
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
tok ( 'COMPARATORTRIM' )
src [ t . COMPARATORTRIM ] = '(\\s*)' + src [ t . GTLT ] +
'\\s*(' + src [ t . LOOSEPLAIN ] + '|' + src [ t . XRANGEPLAIN ] + ')'
// this one has to use the /g flag
re [ t . COMPARATORTRIM ] = new RegExp ( src [ t . COMPARATORTRIM ] , 'g' )
var comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
tok ( 'HYPHENRANGE' )
src [ t . HYPHENRANGE ] = '^\\s*(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAIN ] + ')' +
'\\s*$'
tok ( 'HYPHENRANGELOOSE' )
src [ t . HYPHENRANGELOOSE ] = '^\\s*(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s+-\\s+' +
'(' + src [ t . XRANGEPLAINLOOSE ] + ')' +
'\\s*$'
// Star ranges basically just allow anything at all.
tok ( 'STAR' )
src [ t . STAR ] = '(<|>)?=?\\s*\\*'
// Compile to actual regexp objects.
// All are flag-free, unless they were created above with a flag.
for ( var i = 0 ; i < R ; i ++ ) {
debug ( i , src [ i ] )
if ( ! re [ i ] ) {
re [ i ] = new RegExp ( src [ i ] )
}
}
exports . parse = parse
function parse ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
}
if ( version instanceof SemVer ) {
return version
}
if ( typeof version !== 'string' ) {
return null
}
if ( version . length > MAX _LENGTH ) {
return null
}
var r = options . loose ? re [ t . LOOSE ] : re [ t . FULL ]
if ( ! r . test ( version ) ) {
return null
}
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
}
}
exports . valid = valid
function valid ( version , options ) {
var v = parse ( version , options )
return v ? v . version : null
}
exports . clean = clean
function clean ( version , options ) {
var s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
exports . SemVer = SemVer
function SemVer ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
}
if ( version instanceof SemVer ) {
if ( version . loose === options . loose ) {
return version
} else {
version = version . version
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
} else if ( typeof version !== 'string' ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( version . length > MAX _LENGTH ) {
throw new TypeError ( 'version is longer than ' + MAX _LENGTH + ' characters' )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( ! ( this instanceof SemVer ) ) {
return new SemVer ( version , options )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
var m = version . trim ( ) . match ( options . loose ? re [ t . LOOSE ] : re [ t . FULL ] )
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( ! m ) {
throw new TypeError ( 'Invalid Version: ' + version )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
this . raw = version
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( function ( id ) {
if ( /^[0-9]+$/ . test ( id ) ) {
var num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
}
return id
} )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
SemVer . prototype . format = function ( ) {
this . version = this . major + '.' + this . minor + '.' + this . patch
if ( this . prerelease . length ) {
this . version += '-' + this . prerelease . join ( '.' )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
return this . version
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
SemVer . prototype . toString = function ( ) {
return this . version
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
SemVer . prototype . compare = function ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
return this . compareMain ( other ) || this . comparePre ( other )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
SemVer . prototype . compareMain = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
return compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
SemVer . prototype . comparePre = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
var i = 0
do {
var a = this . prerelease [ i ]
var b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
} while ( ++ i )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
SemVer . prototype . compareBuild = function ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
var i = 0
do {
var a = this . build [ i ]
var b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
SemVer . prototype . inc = function ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
}
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if ( this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0 ) {
this . major ++
}
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
}
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
2020-03-20 21:02:11 +01:00
} else {
2020-05-19 19:46:58 +02:00
var i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
2020-03-20 21:02:11 +01:00
}
}
2020-05-19 19:46:58 +02:00
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
}
} else {
this . prerelease = [ identifier , 0 ]
}
}
break
default :
throw new Error ( 'invalid increment argument: ' + release )
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
this . format ( )
this . raw = this . version
return this
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . inc = inc
function inc ( version , release , loose , identifier ) {
if ( typeof ( loose ) === 'string' ) {
identifier = loose
loose = undefined
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
try {
return new SemVer ( version , loose ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
2020-03-20 21:02:11 +01:00
}
2020-05-19 19:46:58 +02:00
exports . diff = diff
function diff ( version1 , version2 ) {
if ( eq ( version1 , version2 ) ) {
return null
} else {
var v1 = parse ( version1 )
var v2 = parse ( version2 )
var prefix = ''
if ( v1 . prerelease . length || v2 . prerelease . length ) {
prefix = 'pre'
var defaultResult = 'prerelease'
}
for ( var key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . compareIdentifiers = compareIdentifiers
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
var numeric = /^[0-9]+$/
function compareIdentifiers ( a , b ) {
var anum = numeric . test ( a )
var bnum = numeric . test ( b )
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
if ( anum && bnum ) {
a = + a
b = + b
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . rcompareIdentifiers = rcompareIdentifiers
function rcompareIdentifiers ( a , b ) {
return compareIdentifiers ( b , a )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . major = major
function major ( a , loose ) {
return new SemVer ( a , loose ) . major
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . minor = minor
function minor ( a , loose ) {
return new SemVer ( a , loose ) . minor
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . patch = patch
function patch ( a , loose ) {
return new SemVer ( a , loose ) . patch
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . compare = compare
function compare ( a , b , loose ) {
return new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . compareLoose = compareLoose
function compareLoose ( a , b ) {
return compare ( a , b , true )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . compareBuild = compareBuild
function compareBuild ( a , b , loose ) {
var versionA = new SemVer ( a , loose )
var versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
2020-03-20 21:02:11 +01:00
2020-05-19 19:46:58 +02:00
exports . rcompare = rcompare
function rcompare ( a , b , loose ) {
return compare ( b , a , loose )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . sort = sort
function sort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( a , b , loose )
} )
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . rsort = rsort
function rsort ( list , loose ) {
return list . sort ( function ( a , b ) {
return exports . compareBuild ( b , a , loose )
} )
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . gt = gt
function gt ( a , b , loose ) {
return compare ( a , b , loose ) > 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . lt = lt
function lt ( a , b , loose ) {
return compare ( a , b , loose ) < 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . eq = eq
function eq ( a , b , loose ) {
return compare ( a , b , loose ) === 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . neq = neq
function neq ( a , b , loose ) {
return compare ( a , b , loose ) !== 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . gte = gte
function gte ( a , b , loose ) {
return compare ( a , b , loose ) >= 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . lte = lte
function lte ( a , b , loose ) {
return compare ( a , b , loose ) <= 0
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
exports . cmp = cmp
function cmp ( a , op , b , loose ) {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
case '!=' :
return neq ( a , b , loose )
case '>' :
return gt ( a , b , loose )
case '>=' :
return gte ( a , b , loose )
case '<' :
return lt ( a , b , loose )
case '<=' :
return lte ( a , b , loose )
default :
throw new TypeError ( 'Invalid operator: ' + op )
}
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . Comparator = Comparator
function Comparator ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
}
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
}
if ( ! ( this instanceof Comparator ) ) {
return new Comparator ( comp , options )
}
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
debug ( 'comp' , this )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
var ANY = { }
Comparator . prototype . parse = function ( comp ) {
var r = this . options . loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
var m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( 'Invalid comparator: ' + comp )
}
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
Comparator . prototype . toString = function ( ) {
return this . value
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
Comparator . prototype . test = function ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
if ( this . semver === ANY || version === ANY ) {
return true
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
return cmp ( version , this . operator , this . semver , this . options )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
Comparator . prototype . intersects = function ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
var rangeTmp
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
rangeTmp = new Range ( comp . value , options )
return satisfies ( this . value , rangeTmp , options )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
rangeTmp = new Range ( this . value , options )
return satisfies ( comp . semver , rangeTmp , options )
}
var sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
var sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
var sameSemVer = this . semver . version === comp . semver . version
var differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
var oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( ( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' ) )
var oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( ( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' ) )
return sameDirectionIncreasing || sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
}
exports . Range = Range
function Range ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( range instanceof Range ) {
if ( range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease ) {
return range
} else {
return new Range ( range . raw , options )
}
}
if ( range instanceof Comparator ) {
return new Range ( range . value , options )
}
if ( ! ( this instanceof Range ) ) {
return new Range ( range , options )
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First, split based on boolean or ||
this . raw = range
this . set = range . split ( /\s*\|\|\s*/ ) . map ( function ( range ) {
return this . parseRange ( range . trim ( ) )
} , this ) . filter ( function ( c ) {
// throw out any that are not relevant for whatever reason
return c . length
} )
if ( ! this . set . length ) {
throw new TypeError ( 'Invalid SemVer Range: ' + range )
}
this . format ( )
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
Range . prototype . format = function ( ) {
this . range = this . set . map ( function ( comps ) {
return comps . join ( ' ' ) . trim ( )
} ) . join ( '||' ) . trim ( )
return this . range
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
Range . prototype . toString = function ( ) {
return this . range
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
Range . prototype . parseRange = function ( range ) {
var loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
var hr = loose ? re [ t . HYPHENRANGELOOSE ] : re [ t . HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ t . COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ t . COMPARATORTRIM ] )
2020-05-14 23:27:38 +02:00
2020-05-19 19:46:58 +02:00
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ t . TILDETRIM ] , tildeTrimReplace )
2020-05-14 23:27:38 +02:00
2020-05-19 19:46:58 +02:00
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ t . CARETTRIM ] , caretTrimReplace )
2020-05-14 23:27:38 +02:00
2020-05-19 19:46:58 +02:00
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
// At this point, the range is completely trimmed and
// ready to be split into comparators.
var compRe = loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
var set = range . split ( ' ' ) . map ( function ( comp ) {
return parseComparator ( comp , this . options )
} , this ) . join ( ' ' ) . split ( /\s+/ )
if ( this . options . loose ) {
// in loose mode, throw out any that are not valid comparators
set = set . filter ( function ( comp ) {
return ! ! comp . match ( compRe )
} )
}
set = set . map ( function ( comp ) {
return new Comparator ( comp , this . options )
} , this )
return set
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
Range . prototype . intersects = function ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
}
return this . set . some ( function ( thisComparators ) {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( function ( rangeComparators ) {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( function ( thisComparator ) {
return rangeComparators . every ( function ( rangeComparator ) {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// take a set of comparators and determine whether there
// exists a version which can satisfy it
function isSatisfiable ( comparators , options ) {
var result = true
var remainingComparators = comparators . slice ( )
var testComparator = remainingComparators . pop ( )
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( function ( otherComparator ) {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// Mostly just for testing and legacy API reasons
exports . toComparators = toComparators
function toComparators ( range , options ) {
return new Range ( range , options ) . set . map ( function ( comp ) {
return comp . map ( function ( c ) {
return c . value
} ) . join ( ' ' ) . trim ( ) . split ( ' ' )
} )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
function parseComparator ( comp , options ) {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
}
function isX ( id ) {
return ! id || id . toLowerCase ( ) === 'x' || id === '*'
}
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
function replaceTildes ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
}
function replaceTilde ( comp , options ) {
var r = options . loose ? re [ t . TILDELOOSE ] : re [ t . TILDE ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'tilde' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
} else {
// ~1.2.3 == >=1.2.3 <1.3.0
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
debug ( 'tilde return' , ret )
return ret
} )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2.0 --> >=1.2.0 <2.0.0
function replaceCarets ( comp , options ) {
return comp . trim ( ) . split ( /\s+/ ) . map ( function ( comp ) {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
}
function replaceCaret ( comp , options ) {
debug ( 'caret' , comp , options )
var r = options . loose ? re [ t . CARETLOOSE ] : re [ t . CARET ]
return comp . replace ( r , function ( _ , M , m , p , pr ) {
debug ( 'caret' , comp , _ , M , m , p , pr )
var ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = '>=' + M + '.0.0 <' + ( + M + 1 ) + '.0.0'
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = '>=' + M + '.' + m + '.0 <' + M + '.' + ( + m + 1 ) + '.0'
} else {
ret = '>=' + M + '.' + m + '.0 <' + ( + M + 1 ) + '.0.0'
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
' <' + ( + M + 1 ) + '.0.0'
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + m + '.' + ( + p + 1 )
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + M + '.' + ( + m + 1 ) + '.0'
}
} else {
ret = '>=' + M + '.' + m + '.' + p +
' <' + ( + M + 1 ) + '.0.0'
}
}
debug ( 'caret return' , ret )
return ret
} )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
function replaceXRanges ( comp , options ) {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( function ( comp ) {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
function replaceXRange ( comp , options ) {
comp = comp . trim ( )
var r = options . loose ? re [ t . XRANGELOOSE ] : re [ t . XRANGE ]
return comp . replace ( r , function ( ret , gtlt , M , m , p , pr ) {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
var xM = isX ( M )
var xm = xM || isX ( m )
var xp = xm || isX ( p )
var anyX = xp
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
if ( gtlt === '=' && anyX ) {
gtlt = ''
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options . includePrerelease ? '-0' : ''
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
// >1.2.3 => >= 1.2.4
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
}
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
}
}
2020-02-13 18:38:56 +01:00
2020-05-19 19:46:58 +02:00
ret = gtlt + M + '.' + m + '.' + p + pr
} else if ( xm ) {
ret = '>=' + M + '.0.0' + pr + ' <' + ( + M + 1 ) + '.0.0' + pr
} else if ( xp ) {
ret = '>=' + M + '.' + m + '.0' + pr +
' <' + M + '.' + ( + m + 1 ) + '.0' + pr
}
debug ( 'xRange return' , ret )
return ret
} )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
function replaceStars ( comp , options ) {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ t . STAR ] , '' )
}
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0
function hyphenReplace ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = '>=' + fM + '.0.0'
} else if ( isX ( fp ) ) {
from = '>=' + fM + '.' + fm + '.0'
} else {
from = '>=' + from
}
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = '<' + ( + tM + 1 ) + '.0.0'
} else if ( isX ( tp ) ) {
to = '<' + tM + '.' + ( + tm + 1 ) + '.0'
} else if ( tpr ) {
to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
} else {
to = '<=' + to
}
return ( from + ' ' + to ) . trim ( )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// if ANY of the sets match ALL of its comparators, then pass
Range . prototype . test = function ( version ) {
if ( ! version ) {
return false
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
}
for ( var i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
}
return false
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
function testSet ( set , version , options ) {
for ( var i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === ANY ) {
continue
}
if ( set [ i ] . semver . prerelease . length > 0 ) {
var allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
}
}
// Version has a -pre, but it's not one of the ones we like.
return false
}
return true
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . satisfies = satisfies
function satisfies ( version , range , options ) {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . maxSatisfying = maxSatisfying
function maxSatisfying ( versions , range , options ) {
var max = null
var maxSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . minSatisfying = minSatisfying
function minSatisfying ( versions , range , options ) {
var min = null
var minSV = null
try {
var rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( function ( v ) {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . minVersion = minVersion
function minVersion ( range , loose ) {
range = new Range ( range , loose )
var minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
minver = null
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
comparators . forEach ( function ( comparator ) {
// Clone to avoid manipulating the comparator's semver object.
var compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( 'Unexpected operation: ' + comparator . operator )
}
} )
}
if ( minver && range . test ( minver ) ) {
return minver
}
return null
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . validRange = validRange
function validRange ( range , options ) {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
// Determine if version is less than all the versions possible in the range
exports . ltr = ltr
function ltr ( version , range , options ) {
return outside ( version , range , '<' , options )
}
// Determine if version is greater than all the versions possible in the range.
exports . gtr = gtr
function gtr ( version , range , options ) {
return outside ( version , range , '>' , options )
}
exports . outside = outside
function outside ( version , range , hilo , options ) {
version = new SemVer ( version , options )
range = new Range ( range , options )
var gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( var i = 0 ; i < range . set . length ; ++ i ) {
var comparators = range . set [ i ]
var high = null
var low = null
comparators . forEach ( function ( comparator ) {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . prerelease = prerelease
function prerelease ( version , options ) {
var parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . intersects = intersects
function intersects ( r1 , r2 , options ) {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . coerce = coerce
function coerce ( version , options ) {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version === 'number' ) {
version = String ( version )
}
if ( typeof version !== 'string' ) {
return null
}
options = options || { }
var match = null
if ( ! options . rtl ) {
match = version . match ( re [ t . COERCE ] )
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
var next
while ( ( next = re [ t . COERCERTL ] . exec ( version ) ) &&
( ! match || match . index + match [ 0 ] . length !== version . length )
) {
if ( ! match ||
next . index + next [ 0 ] . length !== match . index + match [ 0 ] . length ) {
match = next
}
re [ t . COERCERTL ] . lastIndex = next . index + next [ 1 ] . length + next [ 2 ] . length
}
// leave it in a clean state
re [ t . COERCERTL ] . lastIndex = - 1
}
if ( match === null ) {
return null
}
return parse ( match [ 2 ] +
'.' + ( match [ 3 ] || '0' ) +
'.' + ( match [ 4 ] || '0' ) , options )
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
/***/ } ) ,
/***/ 281 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const internal _globber _1 = _ _webpack _require _ _ ( 297 ) ;
2020-02-13 18:38:56 +01:00
/ * *
2020-05-19 19:46:58 +02:00
* Constructs a globber
2020-02-13 18:38:56 +01:00
*
2020-05-19 19:46:58 +02:00
* @ param patterns Patterns separated by newlines
* @ param options Glob options
2020-02-13 18:38:56 +01:00
* /
2020-05-19 19:46:58 +02:00
function create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield internal _globber _1 . DefaultGlobber . create ( patterns , options ) ;
} ) ;
2020-02-13 18:38:56 +01:00
}
2020-05-19 19:46:58 +02:00
exports . create = create ;
//# sourceMappingURL=glob.js.map
2020-02-13 18:38:56 +01:00
/***/ } ) ,
2020-05-19 19:46:58 +02:00
/***/ 297 :
2020-02-13 18:38:56 +01:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-05-19 19:46:58 +02:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _await = ( this && this . _ _await ) || function ( v ) { return this instanceof _ _await ? ( this . v = v , this ) : new _ _await ( v ) ; }
var _ _asyncGenerator = ( this && this . _ _asyncGenerator ) || function ( thisArg , _arguments , generator ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var g = generator . apply ( thisArg , _arguments || [ ] ) , i , q = [ ] ;
return i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ;
function verb ( n ) { if ( g [ n ] ) i [ n ] = function ( v ) { return new Promise ( function ( a , b ) { q . push ( [ n , v , a , b ] ) > 1 || resume ( n , v ) ; } ) ; } ; }
function resume ( n , v ) { try { step ( g [ n ] ( v ) ) ; } catch ( e ) { settle ( q [ 0 ] [ 3 ] , e ) ; } }
function step ( r ) { r . value instanceof _ _await ? Promise . resolve ( r . value . v ) . then ( fulfill , reject ) : settle ( q [ 0 ] [ 2 ] , r ) ; }
function fulfill ( value ) { resume ( "next" , value ) ; }
function reject ( value ) { resume ( "throw" , value ) ; }
function settle ( f , v ) { if ( f ( v ) , q . shift ( ) , q . length ) resume ( q [ 0 ] [ 0 ] , q [ 0 ] [ 1 ] ) ; }
} ;
2020-02-13 18:38:56 +01:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const core = _ _webpack _require _ _ ( 470 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const globOptionsHelper = _ _webpack _require _ _ ( 601 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const patternHelper = _ _webpack _require _ _ ( 597 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const internal _pattern _1 = _ _webpack _require _ _ ( 923 ) ;
const internal _search _state _1 = _ _webpack _require _ _ ( 728 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class DefaultGlobber {
constructor ( options ) {
this . patterns = [ ] ;
this . searchPaths = [ ] ;
this . options = globOptionsHelper . getOptions ( options ) ;
}
getSearchPaths ( ) {
// Return a copy
return this . searchPaths . slice ( ) ;
}
glob ( ) {
var e _1 , _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = [ ] ;
try {
for ( var _b = _ _asyncValues ( this . globGenerator ( ) ) , _c ; _c = yield _b . next ( ) , ! _c . done ; ) {
const itemPath = _c . value ;
result . push ( itemPath ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( _c && ! _c . done && ( _a = _b . return ) ) yield _a . call ( _b ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
return result ;
} ) ;
}
globGenerator ( ) {
return _ _asyncGenerator ( this , arguments , function * globGenerator _1 ( ) {
// Fill in defaults options
const options = globOptionsHelper . getOptions ( this . options ) ;
// Implicit descendants?
const patterns = [ ] ;
for ( const pattern of this . patterns ) {
patterns . push ( pattern ) ;
if ( options . implicitDescendants &&
( pattern . trailingSeparator ||
pattern . segments [ pattern . segments . length - 1 ] !== '**' ) ) {
patterns . push ( new internal _pattern _1 . Pattern ( pattern . negate , pattern . segments . concat ( '**' ) ) ) ;
}
}
// Push the search paths
const stack = [ ] ;
for ( const searchPath of patternHelper . getSearchPaths ( patterns ) ) {
core . debug ( ` Search path ' ${ searchPath } ' ` ) ;
// Exists?
try {
// Intentionally using lstat. Detection for broken symlink
// will be performed later (if following symlinks).
yield _ _await ( fs . promises . lstat ( searchPath ) ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
continue ;
}
throw err ;
}
stack . unshift ( new internal _search _state _1 . SearchState ( searchPath , 1 ) ) ;
}
// Search
const traversalChain = [ ] ; // used to detect cycles
while ( stack . length ) {
// Pop
const item = stack . pop ( ) ;
// Match?
const match = patternHelper . match ( patterns , item . path ) ;
const partialMatch = ! ! match || patternHelper . partialMatch ( patterns , item . path ) ;
if ( ! match && ! partialMatch ) {
continue ;
}
// Stat
const stats = yield _ _await ( DefaultGlobber . stat ( item , options , traversalChain )
// Broken symlink, or symlink cycle detected, or no longer exists
) ;
// Broken symlink, or symlink cycle detected, or no longer exists
if ( ! stats ) {
continue ;
}
// Directory
if ( stats . isDirectory ( ) ) {
// Matched
if ( match & internal _match _kind _1 . MatchKind . Directory ) {
yield yield _ _await ( item . path ) ;
}
// Descend?
else if ( ! partialMatch ) {
continue ;
}
// Push the child items in reverse
const childLevel = item . level + 1 ;
const childItems = ( yield _ _await ( fs . promises . readdir ( item . path ) ) ) . map ( x => new internal _search _state _1 . SearchState ( path . join ( item . path , x ) , childLevel ) ) ;
stack . push ( ... childItems . reverse ( ) ) ;
}
// File
else if ( match & internal _match _kind _1 . MatchKind . File ) {
yield yield _ _await ( item . path ) ;
}
}
} ) ;
}
/ * *
* Constructs a DefaultGlobber
* /
static create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = new DefaultGlobber ( options ) ;
if ( IS _WINDOWS ) {
patterns = patterns . replace ( /\r\n/g , '\n' ) ;
patterns = patterns . replace ( /\r/g , '\n' ) ;
}
const lines = patterns . split ( '\n' ) . map ( x => x . trim ( ) ) ;
for ( const line of lines ) {
// Empty or comment
if ( ! line || line . startsWith ( '#' ) ) {
continue ;
}
// Pattern
else {
result . patterns . push ( new internal _pattern _1 . Pattern ( line ) ) ;
}
}
result . searchPaths . push ( ... patternHelper . getSearchPaths ( result . patterns ) ) ;
return result ;
} ) ;
}
static stat ( item , options , traversalChain ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Note:
// `stat` returns info about the target of a symlink (or symlink chain)
// `lstat` returns info about a symlink itself
let stats ;
if ( options . followSymbolicLinks ) {
try {
// Use `stat` (following symlinks)
stats = yield fs . promises . stat ( item . path ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
if ( options . omitBrokenSymbolicLinks ) {
core . debug ( ` Broken symlink ' ${ item . path } ' ` ) ;
return undefined ;
}
throw new Error ( ` No information found for the path ' ${ item . path } '. This may indicate a broken symbolic link. ` ) ;
}
throw err ;
}
}
else {
// Use `lstat` (not following symlinks)
stats = yield fs . promises . lstat ( item . path ) ;
}
// Note, isDirectory() returns false for the lstat of a symlink
if ( stats . isDirectory ( ) && options . followSymbolicLinks ) {
// Get the realpath
const realPath = yield fs . promises . realpath ( item . path ) ;
// Fixup the traversal chain to match the item level
while ( traversalChain . length >= item . level ) {
traversalChain . pop ( ) ;
}
// Test for a cycle
if ( traversalChain . some ( ( x ) => x === realPath ) ) {
core . debug ( ` Symlink cycle detected for path ' ${ item . path } ' and realpath ' ${ realPath } ' ` ) ;
return undefined ;
}
// Update the traversal chain
traversalChain . push ( realPath ) ;
}
return stats ;
} ) ;
}
}
exports . DefaultGlobber = DefaultGlobber ;
//# sourceMappingURL=internal-globber.js.map
/***/ } ) ,
/***/ 306 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var concatMap = _ _webpack _require _ _ ( 896 ) ;
var balanced = _ _webpack _require _ _ ( 621 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 327 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
/ * *
* Indicates whether a pattern matches a path
* /
var MatchKind ;
( function ( MatchKind ) {
/** Not matched */
MatchKind [ MatchKind [ "None" ] = 0 ] = "None" ;
/** Matched if the path is a directory */
MatchKind [ MatchKind [ "Directory" ] = 1 ] = "Directory" ;
/** Matched if the path is a regular file */
MatchKind [ MatchKind [ "File" ] = 2 ] = "File" ;
/** Matched */
MatchKind [ MatchKind [ "All" ] = 3 ] = "All" ;
} ) ( MatchKind = exports . MatchKind || ( exports . MatchKind = { } ) ) ;
//# sourceMappingURL=internal-match-kind.js.map
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 383 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Helper class for parsing paths into segments
* /
class Path {
/ * *
* Constructs a Path
* @ param itemPath Path or array of segments
* /
constructor ( itemPath ) {
this . segments = [ ] ;
// String
if ( typeof itemPath === 'string' ) {
assert ( itemPath , ` Parameter 'itemPath' must not be empty ` ) ;
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// Not rooted
if ( ! pathHelper . hasRoot ( itemPath ) ) {
this . segments = itemPath . split ( path . sep ) ;
}
// Rooted
else {
// Add all segments, while not at the root
let remaining = itemPath ;
let dir = pathHelper . dirname ( remaining ) ;
while ( dir !== remaining ) {
// Add the segment
const basename = path . basename ( remaining ) ;
this . segments . unshift ( basename ) ;
// Truncate the last segment
remaining = dir ;
dir = pathHelper . dirname ( remaining ) ;
}
// Remainder is the root
this . segments . unshift ( remaining ) ;
}
}
// Array
else {
// Must not be empty
assert ( itemPath . length > 0 , ` Parameter 'itemPath' must not be an empty array ` ) ;
// Each segment
for ( let i = 0 ; i < itemPath . length ; i ++ ) {
let segment = itemPath [ i ] ;
// Must not be empty
assert ( segment , ` Parameter 'itemPath' must not contain any empty segments ` ) ;
// Normalize slashes
segment = pathHelper . normalizeSeparators ( itemPath [ i ] ) ;
// Root segment
if ( i === 0 && pathHelper . hasRoot ( segment ) ) {
segment = pathHelper . safeTrimTrailingSeparator ( segment ) ;
assert ( segment === pathHelper . dirname ( segment ) , ` Parameter 'itemPath' root segment contains information for multiple segments ` ) ;
this . segments . push ( segment ) ;
}
// All other segments
else {
// Must not contain slash
assert ( ! segment . includes ( path . sep ) , ` Parameter 'itemPath' contains unexpected path separators ` ) ;
this . segments . push ( segment ) ;
}
}
}
}
/ * *
* Converts the path to it ' s string representation
* /
toString ( ) {
// First segment
let result = this . segments [ 0 ] ;
// All others
let skipSlash = result . endsWith ( path . sep ) || ( IS _WINDOWS && /^[A-Z]:$/i . test ( result ) ) ;
for ( let i = 1 ; i < this . segments . length ; i ++ ) {
if ( skipSlash ) {
skipSlash = false ;
}
else {
result += path . sep ;
}
result += this . segments [ i ] ;
}
return result ;
}
}
exports . Path = Path ;
//# sourceMappingURL=internal-path.js.map
/***/ } ) ,
/***/ 413 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 141 ) ;
/***/ } ) ,
/***/ 417 :
/***/ ( function ( module ) {
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 431 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
function escapeData ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 434 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const exec _1 = _ _webpack _require _ _ ( 986 ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 1 ) ) ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
function getTarPath ( args , compressionMethod ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const IS _WINDOWS = process . platform === 'win32' ;
if ( IS _WINDOWS ) {
const systemTar = ` ${ process . env [ 'windir' ] } \\ System32 \\ tar.exe ` ;
if ( compressionMethod !== constants _1 . CompressionMethod . Gzip ) {
// We only use zstandard compression on windows when gnu tar is installed due to
// a bug with compressing large files with bsdtar + zstd
args . push ( '--force-local' ) ;
}
else if ( fs _1 . existsSync ( systemTar ) ) {
return systemTar ;
}
else if ( yield utils . isGnuTarInstalled ( ) ) {
args . push ( '--force-local' ) ;
}
}
return yield io . which ( 'tar' , true ) ;
} ) ;
}
function execTar ( args , compressionMethod , cwd ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exec _1 . exec ( ` " ${ yield getTarPath ( args , compressionMethod ) } " ` , args , { cwd } ) ;
}
catch ( error ) {
throw new Error ( ` Tar failed with error: ${ error === null || error === void 0 ? void 0 : error . message } ` ) ;
}
} ) ;
}
function getWorkingDirectory ( ) {
var _a ;
return ( _a = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _a !== void 0 ? _a : process . cwd ( ) ;
}
function extractTar ( archivePath , compressionMethod ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory ( ) ;
yield io . mkdirP ( workingDirectory ) ;
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram ( ) {
switch ( compressionMethod ) {
case constants _1 . CompressionMethod . Zstd :
return [ '--use-compress-program' , 'zstd -d --long=30' ] ;
case constants _1 . CompressionMethod . ZstdWithoutLong :
return [ '--use-compress-program' , 'zstd -d' ] ;
default :
return [ '-z' ] ;
}
}
const args = [
... getCompressionProgram ( ) ,
'-xf' ,
archivePath . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'-P' ,
'-C' ,
workingDirectory . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' )
] ;
yield execTar ( args , compressionMethod ) ;
} ) ;
}
exports . extractTar = extractTar ;
function createTar ( archiveFolder , sourceDirectories , compressionMethod ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt' ;
const cacheFileName = utils . getCacheFileName ( compressionMethod ) ;
fs _1 . writeFileSync ( path . join ( archiveFolder , manifestFilename ) , sourceDirectories . join ( '\n' ) ) ;
const workingDirectory = getWorkingDirectory ( ) ;
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram ( ) {
switch ( compressionMethod ) {
case constants _1 . CompressionMethod . Zstd :
return [ '--use-compress-program' , 'zstd -T0 --long=30' ] ;
case constants _1 . CompressionMethod . ZstdWithoutLong :
return [ '--use-compress-program' , 'zstd -T0' ] ;
default :
return [ '-z' ] ;
}
}
const args = [
... getCompressionProgram ( ) ,
'-cf' ,
cacheFileName . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'-P' ,
'-C' ,
workingDirectory . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'--files-from' ,
manifestFilename
] ;
yield execTar ( args , compressionMethod , archiveFolder ) ;
} ) ;
}
exports . createTar = createTar ;
//# sourceMappingURL=tar.js.map
/***/ } ) ,
/***/ 443 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-06-02 17:21:03 +02:00
exports . getInputAsArray = exports . isValidEvent = exports . logWarning = exports . getCacheState = exports . setOutputAndState = exports . setCacheHitOutput = exports . setCacheState = exports . isExactKeyMatch = void 0 ;
2020-05-19 19:46:58 +02:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
function isExactKeyMatch ( key , cacheKey ) {
return ! ! ( cacheKey &&
cacheKey . localeCompare ( key , undefined , {
sensitivity : "accent"
} ) === 0 ) ;
}
exports . isExactKeyMatch = isExactKeyMatch ;
function setCacheState ( state ) {
core . saveState ( constants _1 . State . CacheMatchedKey , state ) ;
}
exports . setCacheState = setCacheState ;
function setCacheHitOutput ( isCacheHit ) {
core . setOutput ( constants _1 . Outputs . CacheHit , isCacheHit . toString ( ) ) ;
}
exports . setCacheHitOutput = setCacheHitOutput ;
function setOutputAndState ( key , cacheKey ) {
setCacheHitOutput ( isExactKeyMatch ( key , cacheKey ) ) ;
// Store the matched cache key if it exists
cacheKey && setCacheState ( cacheKey ) ;
}
exports . setOutputAndState = setOutputAndState ;
function getCacheState ( ) {
const cacheKey = core . getState ( constants _1 . State . CacheMatchedKey ) ;
if ( cacheKey ) {
core . debug ( ` Cache state/key: ${ cacheKey } ` ) ;
return cacheKey ;
}
return undefined ;
}
exports . getCacheState = getCacheState ;
function logWarning ( message ) {
const warningPrefix = "[warning]" ;
core . info ( ` ${ warningPrefix } ${ message } ` ) ;
}
exports . logWarning = logWarning ;
// Cache token authorized for all events that are tied to a ref
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent ( ) {
return constants _1 . RefKey in process . env && Boolean ( process . env [ constants _1 . RefKey ] ) ;
}
exports . isValidEvent = isValidEvent ;
2020-06-02 17:21:03 +02:00
function getInputAsArray ( name , options ) {
return core
. getInput ( name , options )
. split ( "\n" )
. map ( s => s . trim ( ) )
. filter ( x => x !== "" ) ;
}
exports . getInputAsArray = getInputAsArray ;
2020-05-19 19:46:58 +02:00
/***/ } ) ,
/***/ 470 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 431 ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = command _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 539 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 950 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
2020-02-13 18:38:56 +01:00
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
2020-05-11 16:49:48 +02:00
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
2020-02-13 18:38:56 +01:00
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
2020-05-11 16:49:48 +02:00
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
2020-02-13 18:38:56 +01:00
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
2020-05-11 16:49:48 +02:00
throw new Error ( 'Client has already been disposed.' ) ;
2020-02-13 18:38:56 +01:00
}
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
2020-05-11 16:49:48 +02:00
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
2020-02-13 18:38:56 +01:00
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
2020-05-11 16:49:48 +02:00
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
2020-02-13 18:38:56 +01:00
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
2020-05-11 16:49:48 +02:00
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
2020-02-13 18:38:56 +01:00
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
2020-05-11 16:49:48 +02:00
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
2020-02-13 18:38:56 +01:00
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
2020-05-11 16:49:48 +02:00
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
2020-02-13 18:38:56 +01:00
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
}
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
2020-05-11 16:49:48 +02:00
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-02-13 18:38:56 +01:00
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
2020-05-11 16:49:48 +02:00
req . on ( 'socket' , sock => {
2020-02-13 18:38:56 +01:00
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
2020-05-11 16:49:48 +02:00
if ( data && typeof data === 'string' ) {
2020-02-13 18:38:56 +01:00
req . write ( data , 'utf8' ) ;
}
2020-05-11 16:49:48 +02:00
if ( data && typeof data !== 'string' ) {
2020-02-13 18:38:56 +01:00
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
2020-05-11 16:49:48 +02:00
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
2020-02-13 18:38:56 +01:00
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
2020-05-11 16:49:48 +02:00
info . options . headers [ 'user-agent' ] = this . userAgent ;
2020-02-13 18:38:56 +01:00
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
2020-05-11 16:49:48 +02:00
this . handlers . forEach ( handler => {
2020-02-13 18:38:56 +01:00
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
2020-05-11 16:49:48 +02:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-02-13 18:38:56 +01:00
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
2020-05-11 16:49:48 +02:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-02-13 18:38:56 +01:00
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _webpack _require _ _ ( 413 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
2020-05-11 16:49:48 +02:00
}
2020-02-13 18:38:56 +01:00
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
2020-05-11 16:49:48 +02:00
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
2020-02-13 18:38:56 +01:00
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
2020-05-11 16:49:48 +02:00
msg = 'Failed request: (' + statusCode + ')' ;
2020-02-13 18:38:56 +01:00
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
2020-05-19 19:46:58 +02:00
}
}
exports . HttpClient = HttpClient ;
2020-02-13 18:38:56 +01:00
/***/ } ) ,
2020-05-19 19:46:58 +02:00
/***/ 597 :
2020-05-14 23:27:38 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
2020-05-14 23:27:38 +02:00
/ * *
2020-05-19 19:46:58 +02:00
* Given an array of patterns , returns an array of paths to search .
* Duplicates and paths under other included paths are filtered out .
2020-05-14 23:27:38 +02:00
* /
2020-05-19 19:46:58 +02:00
function getSearchPaths ( patterns ) {
// Ignore negate patterns
patterns = patterns . filter ( x => ! x . negate ) ;
// Create a map of all search paths
const searchPathMap = { } ;
for ( const pattern of patterns ) {
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
searchPathMap [ key ] = 'candidate' ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
const result = [ ] ;
for ( const pattern of patterns ) {
// Check if already included
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
if ( searchPathMap [ key ] === 'included' ) {
continue ;
}
// Check for an ancestor search path
let foundAncestor = false ;
let tempKey = key ;
let parent = pathHelper . dirname ( tempKey ) ;
while ( parent !== tempKey ) {
if ( searchPathMap [ parent ] ) {
foundAncestor = true ;
break ;
}
tempKey = parent ;
parent = pathHelper . dirname ( tempKey ) ;
}
// Include the search pattern in the result
if ( ! foundAncestor ) {
result . push ( pattern . searchPath ) ;
searchPathMap [ key ] = 'included' ;
}
}
return result ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
exports . getSearchPaths = getSearchPaths ;
2020-05-14 23:27:38 +02:00
/ * *
2020-05-19 19:46:58 +02:00
* Matches the patterns against the path
2020-05-14 23:27:38 +02:00
* /
2020-05-19 19:46:58 +02:00
function match ( patterns , itemPath ) {
let result = internal _match _kind _1 . MatchKind . None ;
for ( const pattern of patterns ) {
if ( pattern . negate ) {
result &= ~ pattern . match ( itemPath ) ;
}
else {
result |= pattern . match ( itemPath ) ;
}
}
return result ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
exports . match = match ;
2020-05-14 23:27:38 +02:00
/ * *
2020-05-19 19:46:58 +02:00
* Checks whether to descend further into the directory
2020-05-14 23:27:38 +02:00
* /
2020-05-19 19:46:58 +02:00
function partialMatch ( patterns , itemPath ) {
return patterns . some ( x => ! x . negate && x . partialMatch ( itemPath ) ) ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
exports . partialMatch = partialMatch ;
//# sourceMappingURL=internal-pattern-helper.js.map
/***/ } ) ,
/***/ 601 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _webpack _require _ _ ( 470 ) ;
2020-05-14 23:27:38 +02:00
/ * *
2020-05-19 19:46:58 +02:00
* Returns a copy with defaults filled in .
2020-05-14 23:27:38 +02:00
* /
2020-05-19 19:46:58 +02:00
function getOptions ( copy ) {
const result = {
followSymbolicLinks : true ,
implicitDescendants : true ,
omitBrokenSymbolicLinks : true
} ;
if ( copy ) {
if ( typeof copy . followSymbolicLinks === 'boolean' ) {
result . followSymbolicLinks = copy . followSymbolicLinks ;
core . debug ( ` followSymbolicLinks ' ${ result . followSymbolicLinks } ' ` ) ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
if ( typeof copy . implicitDescendants === 'boolean' ) {
result . implicitDescendants = copy . implicitDescendants ;
core . debug ( ` implicitDescendants ' ${ result . implicitDescendants } ' ` ) ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
if ( typeof copy . omitBrokenSymbolicLinks === 'boolean' ) {
result . omitBrokenSymbolicLinks = copy . omitBrokenSymbolicLinks ;
core . debug ( ` omitBrokenSymbolicLinks ' ${ result . omitBrokenSymbolicLinks } ' ` ) ;
}
}
return result ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
exports . getOptions = getOptions ;
//# sourceMappingURL=internal-glob-options-helper.js.map
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 621 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
2020-05-14 23:27:38 +02:00
}
2020-05-19 19:46:58 +02:00
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
2020-05-14 23:27:38 +02:00
/***/ } ) ,
2020-02-13 18:38:56 +01:00
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 672 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert _1 = _ _webpack _require _ _ ( 357 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Recursively create a directory at ` fsPath ` .
*
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
* /
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
2020-05-14 23:27:38 +02:00
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
//# sourceMappingURL=io-util.js.map
/***/ } ) ,
/***/ 692 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
2020-05-14 23:27:38 +02:00
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const cacheHttpClient = _ _importStar ( _ _webpack _require _ _ ( 114 ) ) ;
const tar _1 = _ _webpack _require _ _ ( 434 ) ;
class ValidationError extends Error {
constructor ( message ) {
super ( message ) ;
this . name = 'ValidationError' ;
Object . setPrototypeOf ( this , ValidationError . prototype ) ;
}
}
exports . ValidationError = ValidationError ;
class ReserveCacheError extends Error {
constructor ( message ) {
super ( message ) ;
this . name = 'ReserveCacheError' ;
Object . setPrototypeOf ( this , ReserveCacheError . prototype ) ;
}
}
exports . ReserveCacheError = ReserveCacheError ;
function checkPaths ( paths ) {
if ( ! paths || paths . length === 0 ) {
throw new ValidationError ( ` Path Validation Error: At least one directory or file path is required ` ) ;
}
}
function checkKey ( key ) {
if ( key . length > 512 ) {
throw new ValidationError ( ` Key Validation Error: ${ key } cannot be larger than 512 characters. ` ) ;
}
const regex = /^[^,]*$/ ;
if ( ! regex . test ( key ) ) {
throw new ValidationError ( ` Key Validation Error: ${ key } cannot contain commas. ` ) ;
}
}
/ * *
* Restores cache from keys
*
* @ param paths a list of file paths to restore from the cache
* @ param primaryKey an explicit key for restoring the cache
* @ param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @ returns string returns the key for the cache hit , otherwise returns undefined
* /
function restoreCache ( paths , primaryKey , restoreKeys ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
checkPaths ( paths ) ;
restoreKeys = restoreKeys || [ ] ;
const keys = [ primaryKey , ... restoreKeys ] ;
core . debug ( 'Resolved Keys:' ) ;
core . debug ( JSON . stringify ( keys ) ) ;
if ( keys . length > 10 ) {
throw new ValidationError ( ` Key Validation Error: Keys are limited to a maximum of 10. ` ) ;
}
for ( const key of keys ) {
checkKey ( key ) ;
}
const compressionMethod = yield utils . getCompressionMethod ( ) ;
// path are needed to compute version
const cacheEntry = yield cacheHttpClient . getCacheEntry ( keys , paths , {
compressionMethod
} ) ;
if ( ! ( cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry . archiveLocation ) ) {
// Cache not found
return undefined ;
}
const archivePath = path . join ( yield utils . createTempDirectory ( ) , utils . getCacheFileName ( compressionMethod ) ) ;
core . debug ( ` Archive Path: ${ archivePath } ` ) ;
try {
// Download the cache from the cache entry
yield cacheHttpClient . downloadCache ( cacheEntry . archiveLocation , archivePath ) ;
const archiveFileSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
core . info ( ` Cache Size: ~ ${ Math . round ( archiveFileSize / ( 1024 * 1024 ) ) } MB ( ${ archiveFileSize } B) ` ) ;
yield tar _1 . extractTar ( archivePath , compressionMethod ) ;
}
finally {
// Try to delete the archive to save space
try {
yield utils . unlinkFile ( archivePath ) ;
}
catch ( error ) {
core . debug ( ` Failed to delete archive: ${ error } ` ) ;
}
}
return cacheEntry . cacheKey ;
2020-02-13 18:38:56 +01:00
} ) ;
}
2020-05-14 23:27:38 +02:00
exports . restoreCache = restoreCache ;
/ * *
* Saves a list of files with the specified key
*
* @ param paths a list of file paths to be cached
* @ param key an explicit key for restoring the cache
* @ param options cache upload options
* @ returns number returns cacheId if the cache was saved successfully and throws an error if save fails
* /
function saveCache ( paths , key , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
checkPaths ( paths ) ;
checkKey ( key ) ;
const compressionMethod = yield utils . getCompressionMethod ( ) ;
core . debug ( 'Reserving Cache' ) ;
const cacheId = yield cacheHttpClient . reserveCache ( key , paths , {
compressionMethod
} ) ;
if ( cacheId === - 1 ) {
throw new ReserveCacheError ( ` Unable to reserve cache with key ${ key } , another job may be creating this cache. ` ) ;
}
core . debug ( ` Cache ID: ${ cacheId } ` ) ;
const cachePaths = yield utils . resolvePaths ( paths ) ;
core . debug ( 'Cache Paths:' ) ;
core . debug ( ` ${ JSON . stringify ( cachePaths ) } ` ) ;
const archiveFolder = yield utils . createTempDirectory ( ) ;
const archivePath = path . join ( archiveFolder , utils . getCacheFileName ( compressionMethod ) ) ;
core . debug ( ` Archive Path: ${ archivePath } ` ) ;
yield tar _1 . createTar ( archiveFolder , cachePaths , compressionMethod ) ;
const fileSizeLimit = 5 * 1024 * 1024 * 1024 ; // 5GB per repo limit
const archiveFileSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
core . debug ( ` File Size: ${ archiveFileSize } ` ) ;
if ( archiveFileSize > fileSizeLimit ) {
throw new Error ( ` Cache size of ~ ${ Math . round ( archiveFileSize / ( 1024 * 1024 ) ) } MB ( ${ archiveFileSize } B) is over the 5GB limit, not saving cache. ` ) ;
}
core . debug ( ` Saving Cache (ID: ${ cacheId } ) ` ) ;
yield cacheHttpClient . saveCache ( cacheId , archivePath , options ) ;
return cacheId ;
} ) ;
2020-02-13 18:38:56 +01:00
}
2020-05-14 23:27:38 +02:00
exports . saveCache = saveCache ;
//# sourceMappingURL=cache.js.map
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 694 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
exports . RefKey = exports . Events = exports . State = exports . Outputs = exports . Inputs = void 0 ;
2020-02-13 18:38:56 +01:00
var Inputs ;
( function ( Inputs ) {
Inputs [ "Key" ] = "key" ;
Inputs [ "Path" ] = "path" ;
Inputs [ "RestoreKeys" ] = "restore-keys" ;
} ) ( Inputs = exports . Inputs || ( exports . Inputs = { } ) ) ;
var Outputs ;
( function ( Outputs ) {
Outputs [ "CacheHit" ] = "cache-hit" ;
} ) ( Outputs = exports . Outputs || ( exports . Outputs = { } ) ) ;
var State ;
( function ( State ) {
2020-05-19 19:46:58 +02:00
State [ "CachePrimaryKey" ] = "CACHE_KEY" ;
State [ "CacheMatchedKey" ] = "CACHE_RESULT" ;
2020-02-13 18:38:56 +01:00
} ) ( State = exports . State || ( exports . State = { } ) ) ;
var Events ;
( function ( Events ) {
Events [ "Key" ] = "GITHUB_EVENT_NAME" ;
Events [ "Push" ] = "push" ;
Events [ "PullRequest" ] = "pull_request" ;
} ) ( Events = exports . Events || ( exports . Events = { } ) ) ;
2020-04-17 21:46:46 +02:00
exports . RefKey = "GITHUB_REF" ;
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 722 :
/***/ ( function ( module ) {
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
var byteToHex = [ ] ;
for ( var i = 0 ; i < 256 ; ++ i ) {
byteToHex [ i ] = ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ;
}
function bytesToUuid ( buf , offset ) {
var i = offset || 0 ;
var bth = byteToHex ;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
2020-05-19 19:46:58 +02:00
return ( [
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ]
] ) . join ( '' ) ;
2020-02-13 18:38:56 +01:00
}
module . exports = bytesToUuid ;
2020-03-20 21:02:11 +01:00
/***/ } ) ,
/***/ 728 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
class SearchState {
constructor ( path , level ) {
this . path = path ;
this . level = level ;
}
}
exports . SearchState = SearchState ;
//# sourceMappingURL=internal-search-state.js.map
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 778 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-05-19 19:46:58 +02:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-02-13 18:38:56 +01:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-14 23:27:38 +02:00
const cache = _ _importStar ( _ _webpack _require _ _ ( 692 ) ) ;
2020-02-13 18:38:56 +01:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 443 ) ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// Validate inputs, this can cause task failure
if ( ! utils . isValidEvent ( ) ) {
2020-04-17 21:46:46 +02:00
utils . logWarning ( ` Event Validation Error: The event type ${ process . env [ constants _1 . Events . Key ] } is not supported because it's not tied to a branch or tag ref. ` ) ;
2020-02-13 18:38:56 +01:00
return ;
}
const primaryKey = core . getInput ( constants _1 . Inputs . Key , { required : true } ) ;
2020-05-19 19:46:58 +02:00
core . saveState ( constants _1 . State . CachePrimaryKey , primaryKey ) ;
2020-06-02 17:21:03 +02:00
const restoreKeys = utils . getInputAsArray ( constants _1 . Inputs . RestoreKeys ) ;
const cachePaths = utils . getInputAsArray ( constants _1 . Inputs . Path , {
required : true
} ) ;
2020-02-13 18:38:56 +01:00
try {
2020-05-14 23:27:38 +02:00
const cacheKey = yield cache . restoreCache ( cachePaths , primaryKey , restoreKeys ) ;
if ( ! cacheKey ) {
core . info ( ` Cache not found for input keys: ${ [
primaryKey ,
... restoreKeys
] . join ( ", " ) } ` );
2020-02-13 18:38:56 +01:00
return ;
}
2020-05-19 19:46:58 +02:00
// Store the matched cache key
2020-05-14 23:27:38 +02:00
utils . setCacheState ( cacheKey ) ;
const isExactKeyMatch = utils . isExactKeyMatch ( primaryKey , cacheKey ) ;
2020-02-13 18:38:56 +01:00
utils . setCacheHitOutput ( isExactKeyMatch ) ;
2020-05-14 23:27:38 +02:00
core . info ( ` Cache restored from key: ${ cacheKey } ` ) ;
2020-02-13 18:38:56 +01:00
}
catch ( error ) {
2020-05-14 23:27:38 +02:00
if ( error . name === cache . ValidationError . name ) {
throw error ;
}
else {
utils . logWarning ( error . message ) ;
utils . setCacheHitOutput ( false ) ;
}
2020-02-13 18:38:56 +01:00
}
}
catch ( error ) {
core . setFailed ( error . message ) ;
}
} ) ;
}
run ( ) ;
exports . default = run ;
2020-04-29 15:31:53 +02:00
/***/ } ) ,
/***/ 794 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 826 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var rng = _ _webpack _require _ _ ( 139 ) ;
var bytesToUuid = _ _webpack _require _ _ ( 722 ) ;
function v4 ( options , buf , offset ) {
var i = buf && offset || 0 ;
if ( typeof ( options ) == 'string' ) {
buf = options === 'binary' ? new Array ( 16 ) : null ;
options = null ;
}
options = options || { } ;
var rnds = options . random || ( options . rng || rng ) ( ) ;
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = ( rnds [ 6 ] & 0x0f ) | 0x40 ;
rnds [ 8 ] = ( rnds [ 8 ] & 0x3f ) | 0x80 ;
// Copy bytes to buffer, if provided
if ( buf ) {
for ( var ii = 0 ; ii < 16 ; ++ ii ) {
buf [ i + ii ] = rnds [ ii ] ;
}
}
return buf || bytesToUuid ( rnds ) ;
}
module . exports = v4 ;
/***/ } ) ,
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
2020-03-20 21:02:11 +01:00
/***/ 896 :
/***/ ( function ( module ) {
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
2020-05-14 23:27:38 +02:00
/***/ } ) ,
/***/ 898 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var v1 = _ _webpack _require _ _ ( 86 ) ;
var v4 = _ _webpack _require _ _ ( 826 ) ;
var uuid = v4 ;
uuid . v1 = v1 ;
uuid . v4 = v4 ;
module . exports = uuid ;
2020-03-20 21:02:11 +01:00
/***/ } ) ,
/***/ 923 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const minimatch _1 = _ _webpack _require _ _ ( 93 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const internal _path _1 = _ _webpack _require _ _ ( 383 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class Pattern {
constructor ( patternOrNegate , segments ) {
/ * *
* Indicates whether matches should be excluded from the result set
* /
this . negate = false ;
// Pattern overload
let pattern ;
if ( typeof patternOrNegate === 'string' ) {
pattern = patternOrNegate . trim ( ) ;
}
// Segments overload
else {
// Convert to pattern
segments = segments || [ ] ;
assert ( segments . length , ` Parameter 'segments' must not empty ` ) ;
const root = Pattern . getLiteral ( segments [ 0 ] ) ;
assert ( root && pathHelper . hasAbsoluteRoot ( root ) , ` Parameter 'segments' first element must be a root path ` ) ;
pattern = new internal _path _1 . Path ( segments ) . toString ( ) . trim ( ) ;
if ( patternOrNegate ) {
pattern = ` ! ${ pattern } ` ;
}
}
// Negate
while ( pattern . startsWith ( '!' ) ) {
this . negate = ! this . negate ;
pattern = pattern . substr ( 1 ) . trim ( ) ;
}
// Normalize slashes and ensures absolute root
pattern = Pattern . fixupPattern ( pattern ) ;
// Segments
this . segments = new internal _path _1 . Path ( pattern ) . segments ;
// Trailing slash indicates the pattern should only match directories, not regular files
this . trailingSeparator = pathHelper
. normalizeSeparators ( pattern )
. endsWith ( path . sep ) ;
pattern = pathHelper . safeTrimTrailingSeparator ( pattern ) ;
// Search path (literal path prior to the first glob segment)
let foundGlob = false ;
const searchSegments = this . segments
. map ( x => Pattern . getLiteral ( x ) )
. filter ( x => ! foundGlob && ! ( foundGlob = x === '' ) ) ;
this . searchPath = new internal _path _1 . Path ( searchSegments ) . toString ( ) ;
// Root RegExp (required when determining partial match)
this . rootRegExp = new RegExp ( Pattern . regExpEscape ( searchSegments [ 0 ] ) , IS _WINDOWS ? 'i' : '' ) ;
// Create minimatch
const minimatchOptions = {
dot : true ,
nobrace : true ,
nocase : IS _WINDOWS ,
nocomment : true ,
noext : true ,
nonegate : true
} ;
pattern = IS _WINDOWS ? pattern . replace ( /\\/g , '/' ) : pattern ;
this . minimatch = new minimatch _1 . Minimatch ( pattern , minimatchOptions ) ;
}
/ * *
* Matches the pattern against the specified path
* /
match ( itemPath ) {
// Last segment is globstar?
if ( this . segments [ this . segments . length - 1 ] === '**' ) {
// Normalize slashes
itemPath = pathHelper . normalizeSeparators ( itemPath ) ;
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
if ( ! itemPath . endsWith ( path . sep ) ) {
// Note, this is safe because the constructor ensures the pattern has an absolute root.
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
itemPath = ` ${ itemPath } ${ path . sep } ` ;
}
}
else {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
}
// Match
if ( this . minimatch . match ( itemPath ) ) {
return this . trailingSeparator ? internal _match _kind _1 . MatchKind . Directory : internal _match _kind _1 . MatchKind . All ;
}
return internal _match _kind _1 . MatchKind . None ;
}
/ * *
* Indicates whether the pattern may match descendants of the specified path
* /
partialMatch ( itemPath ) {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// matchOne does not handle root path correctly
if ( pathHelper . dirname ( itemPath ) === itemPath ) {
return this . rootRegExp . test ( itemPath ) ;
}
return this . minimatch . matchOne ( itemPath . split ( IS _WINDOWS ? /\\+/ : /\/+/ ) , this . minimatch . set [ 0 ] , true ) ;
}
/ * *
* Escapes glob patterns within a path
* /
static globEscape ( s ) {
return ( IS _WINDOWS ? s : s . replace ( /\\/g , '\\\\' ) ) // escape '\' on Linux/macOS
. replace ( /(\[)(?=[^/]+\])/g , '[[]' ) // escape '[' when ']' follows within the path segment
. replace ( /\?/g , '[?]' ) // escape '?'
. replace ( /\*/g , '[*]' ) ; // escape '*'
}
/ * *
* Normalizes slashes and ensures absolute root
* /
static fixupPattern ( pattern ) {
// Empty
assert ( pattern , 'pattern cannot be empty' ) ;
// Must not contain `.` segment, unless first segment
// Must not contain `..` segment
const literalSegments = new internal _path _1 . Path ( pattern ) . segments . map ( x => Pattern . getLiteral ( x ) ) ;
assert ( literalSegments . every ( ( x , i ) => ( x !== '.' || i === 0 ) && x !== '..' ) , ` Invalid pattern ' ${ pattern } '. Relative pathing '.' and '..' is not allowed. ` ) ;
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
assert ( ! pathHelper . hasRoot ( pattern ) || literalSegments [ 0 ] , ` Invalid pattern ' ${ pattern } '. Root segment must not contain globs. ` ) ;
// Normalize slashes
pattern = pathHelper . normalizeSeparators ( pattern ) ;
// Replace leading `.` segment
if ( pattern === '.' || pattern . startsWith ( ` . ${ path . sep } ` ) ) {
pattern = Pattern . globEscape ( process . cwd ( ) ) + pattern . substr ( 1 ) ;
}
// Replace leading `~` segment
else if ( pattern === '~' || pattern . startsWith ( ` ~ ${ path . sep } ` ) ) {
const homedir = os . homedir ( ) ;
assert ( homedir , 'Unable to determine HOME directory' ) ;
assert ( pathHelper . hasAbsoluteRoot ( homedir ) , ` Expected HOME directory to be a rooted path. Actual ' ${ homedir } ' ` ) ;
pattern = Pattern . globEscape ( homedir ) + pattern . substr ( 1 ) ;
}
// Replace relative drive root, e.g. pattern is C: or C:foo
else if ( IS _WINDOWS &&
( pattern . match ( /^[A-Z]:$/i ) || pattern . match ( /^[A-Z]:[^\\]/i ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , pattern . substr ( 0 , 2 ) ) ;
if ( pattern . length > 2 && ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 2 ) ;
}
// Replace relative root, e.g. pattern is \ or \foo
else if ( IS _WINDOWS && ( pattern === '\\' || pattern . match ( /^\\[^\\]/ ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , '\\' ) ;
if ( ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 1 ) ;
}
// Otherwise ensure absolute root
else {
pattern = pathHelper . ensureAbsoluteRoot ( Pattern . globEscape ( process . cwd ( ) ) , pattern ) ;
}
return pathHelper . normalizeSeparators ( pattern ) ;
}
/ * *
* Attempts to unescape a pattern segment to create a literal path segment .
* Otherwise returns empty string .
* /
static getLiteral ( segment ) {
let literal = '' ;
for ( let i = 0 ; i < segment . length ; i ++ ) {
const c = segment [ i ] ;
// Escape
if ( c === '\\' && ! IS _WINDOWS && i + 1 < segment . length ) {
literal += segment [ ++ i ] ;
continue ;
}
// Wildcard
else if ( c === '*' || c === '?' ) {
return '' ;
}
// Character set
else if ( c === '[' && i + 1 < segment . length ) {
let set = '' ;
let closed = - 1 ;
for ( let i2 = i + 1 ; i2 < segment . length ; i2 ++ ) {
const c2 = segment [ i2 ] ;
// Escape
if ( c2 === '\\' && ! IS _WINDOWS && i2 + 1 < segment . length ) {
set += segment [ ++ i2 ] ;
continue ;
}
// Closed
else if ( c2 === ']' ) {
closed = i2 ;
break ;
}
// Otherwise
else {
set += c2 ;
}
}
// Closed?
if ( closed >= 0 ) {
// Cannot convert
if ( set . length > 1 ) {
return '' ;
}
// Convert to literal
if ( set ) {
literal += set ;
i = closed ;
continue ;
}
}
// Otherwise fall thru
}
// Append
literal += c ;
}
return literal ;
}
/ * *
* Escapes regexp special characters
* https : //javascript.info/regexp-escaping
* /
static regExpEscape ( s ) {
return s . replace ( /[[\\^$.|?*+()]/g , '\\$&' ) ;
}
}
exports . Pattern = Pattern ;
//# sourceMappingURL=internal-pattern.js.map
/***/ } ) ,
2020-05-14 23:27:38 +02:00
/***/ 931 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-02-13 18:38:56 +01:00
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-14 23:27:38 +02:00
var CacheFilename ;
( function ( CacheFilename ) {
CacheFilename [ "Gzip" ] = "cache.tgz" ;
CacheFilename [ "Zstd" ] = "cache.tzst" ;
} ) ( CacheFilename = exports . CacheFilename || ( exports . CacheFilename = { } ) ) ;
var CompressionMethod ;
( function ( CompressionMethod ) {
CompressionMethod [ "Gzip" ] = "gzip" ;
2020-05-19 19:46:58 +02:00
// Long range mode was added to zstd in v1.3.2.
// This enum is for earlier version of zstd that does not have --long support
CompressionMethod [ "ZstdWithoutLong" ] = "zstd-without-long" ;
2020-05-14 23:27:38 +02:00
CompressionMethod [ "Zstd" ] = "zstd" ;
} ) ( CompressionMethod = exports . CompressionMethod || ( exports . CompressionMethod = { } ) ) ;
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports . SocketTimeout = 5000 ;
//# sourceMappingURL=constants.js.map
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 950 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
2020-05-11 16:49:48 +02:00
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
2020-02-13 18:38:56 +01:00
}
else {
2020-05-11 16:49:48 +02:00
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
2020-02-13 18:38:56 +01:00
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
2020-05-11 16:49:48 +02:00
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
2020-02-13 18:38:56 +01:00
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
2020-05-11 16:49:48 +02:00
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
2020-02-13 18:38:56 +01:00
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
2020-03-20 21:02:11 +01:00
/***/ } ) ,
/***/ 972 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Similar to path . dirname except normalizes the path separators and slightly better handling for Windows UNC paths .
*
* For example , on Linux / macOS :
* - ` / => / `
* - ` /hello => / `
*
* For example , on Windows :
* - ` C: \ => C: \`
* - ` C: \h ello => C: \`
* - ` C: => C: `
* - ` C:hello => C: `
* - ` \ => \`
* - ` \h ello => \`
* - ` \\ hello => \\ hello `
* - ` \\ hello \w orld => \\ hello \w orld `
* /
function dirname ( p ) {
// Normalize slashes and trim unnecessary trailing slash
p = safeTrimTrailingSeparator ( p ) ;
// Windows UNC root, e.g. \\hello or \\hello\world
if ( IS _WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/ . test ( p ) ) {
return p ;
}
// Get dirname
let result = path . dirname ( p ) ;
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
if ( IS _WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/ . test ( result ) ) {
result = safeTrimTrailingSeparator ( result ) ;
}
return result ;
}
exports . dirname = dirname ;
/ * *
* Roots the path if not already rooted . On Windows , relative roots like ` \`
* or ` C: ` are expanded based on the current working directory .
* /
function ensureAbsoluteRoot ( root , itemPath ) {
assert ( root , ` ensureAbsoluteRoot parameter 'root' must not be empty ` ) ;
assert ( itemPath , ` ensureAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Already rooted
if ( hasAbsoluteRoot ( itemPath ) ) {
return itemPath ;
}
// Windows
if ( IS _WINDOWS ) {
// Check for itemPath like C: or C:foo
if ( itemPath . match ( /^[A-Z]:[^\\/]|^[A-Z]:$/i ) ) {
let cwd = process . cwd ( ) ;
assert ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
// Drive letter matches cwd? Expand to cwd
if ( itemPath [ 0 ] . toUpperCase ( ) === cwd [ 0 ] . toUpperCase ( ) ) {
// Drive only, e.g. C:
if ( itemPath . length === 2 ) {
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ` ;
}
// Drive + path, e.g. C:foo
else {
if ( ! cwd . endsWith ( '\\' ) ) {
cwd += '\\' ;
}
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ${ itemPath . substr ( 2 ) } ` ;
}
}
// Different drive
else {
return ` ${ itemPath [ 0 ] } : \\ ${ itemPath . substr ( 2 ) } ` ;
}
}
// Check for itemPath like \ or \foo
else if ( normalizeSeparators ( itemPath ) . match ( /^\\$|^\\[^\\]/ ) ) {
const cwd = process . cwd ( ) ;
assert ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
return ` ${ cwd [ 0 ] } : \\ ${ itemPath . substr ( 1 ) } ` ;
}
}
assert ( hasAbsoluteRoot ( root ) , ` ensureAbsoluteRoot parameter 'root' must have an absolute root ` ) ;
// Otherwise ensure root ends with a separator
if ( root . endsWith ( '/' ) || ( IS _WINDOWS && root . endsWith ( '\\' ) ) ) {
// Intentionally empty
}
else {
// Append separator
root += path . sep ;
}
return root + itemPath ;
}
exports . ensureAbsoluteRoot = ensureAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \\ hello \s hare ` and ` C: \h ello ` ( and using alternate separator ) .
* /
function hasAbsoluteRoot ( itemPath ) {
assert ( itemPath , ` hasAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \\hello\share or C:\hello
return itemPath . startsWith ( '\\\\' ) || /^[A-Z]:\\/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasAbsoluteRoot = hasAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \` , ` \ hello ` , ` \ \ hello \ share ` , ` C : ` , and ` C : \ hello ` (and using alternate separator).
* /
function hasRoot ( itemPath ) {
assert ( itemPath , ` isRooted parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \ or \hello or \\hello
// E.g. C: or C:\hello
return itemPath . startsWith ( '\\' ) || /^[A-Z]:/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasRoot = hasRoot ;
/ * *
* Removes redundant slashes and converts ` / ` to ` \` on Windows
* /
function normalizeSeparators ( p ) {
p = p || '' ;
// Windows
if ( IS _WINDOWS ) {
// Convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// Remove redundant slashes
const isUnc = /^\\\\+[^\\]/ . test ( p ) ; // e.g. \\hello
return ( isUnc ? '\\' : '' ) + p . replace ( /\\\\+/g , '\\' ) ; // preserve leading \\ for UNC
}
// Remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
exports . normalizeSeparators = normalizeSeparators ;
/ * *
* Normalizes the path separators and trims the trailing separator ( when safe ) .
* For example , ` /foo/ => /foo ` but ` / => / `
* /
function safeTrimTrailingSeparator ( p ) {
// Short-circuit if empty
if ( ! p ) {
return '' ;
}
// Normalize separators
p = normalizeSeparators ( p ) ;
// No trailing slash
if ( ! p . endsWith ( path . sep ) ) {
return p ;
}
// Check '/' on Linux/macOS and '\' on Windows
if ( p === path . sep ) {
return p ;
}
// On Windows check if drive root. E.g. C:\
if ( IS _WINDOWS && /^[A-Z]:\\$/i . test ( p ) ) {
return p ;
}
// Otherwise trim trailing slash
return p . substr ( 0 , p . length - 1 ) ;
}
exports . safeTrimTrailingSeparator = safeTrimTrailingSeparator ;
//# sourceMappingURL=internal-path-helper.js.map
2020-02-13 18:38:56 +01:00
/***/ } ) ,
/***/ 986 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
2020-05-19 19:46:58 +02:00
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
2020-02-13 18:38:56 +01:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-19 19:46:58 +02:00
const tr = _ _importStar ( _ _webpack _require _ _ ( 9 ) ) ;
2020-02-13 18:38:56 +01:00
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
//# sourceMappingURL=exec.js.map
/***/ } )
/******/ } ) ;