2020-02-13 12:38:56 -05:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
/******/ return _ _webpack _require _ _ ( 778 ) ;
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
/******/ ( {
/***/ 1 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const childProcess = _ _webpack _require _ _ ( 129 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const ioUtil = _ _webpack _require _ _ ( 672 ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
try {
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ioUtil . mkdirP ( fsPath ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
}
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
}
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
}
} ) ;
}
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
/***/ 9 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const events = _ _webpack _require _ _ ( 614 ) ;
const child = _ _webpack _require _ _ ( 129 ) ;
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
strBuffer = s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return new Promise ( ( resolve , reject ) => {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
const stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
const errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
} ) ;
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 15 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 633 ) ) ;
const exec = _ _importStar ( _ _webpack _require _ _ ( 986 ) ) ;
const glob = _ _importStar ( _ _webpack _require _ _ ( 281 ) ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 1 ) ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const util = _ _importStar ( _ _webpack _require _ _ ( 669 ) ) ;
const uuid _1 = _ _webpack _require _ _ ( 898 ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const IS _WINDOWS = process . platform === 'win32' ;
let tempDirectory = process . env [ 'RUNNER_TEMP' ] || '' ;
if ( ! tempDirectory ) {
let baseLocation ;
if ( IS _WINDOWS ) {
// On Windows use the USERPROFILE env variable
baseLocation = process . env [ 'USERPROFILE' ] || 'C:\\' ;
}
else {
if ( process . platform === 'darwin' ) {
baseLocation = '/Users' ;
}
else {
baseLocation = '/home' ;
}
}
tempDirectory = path . join ( baseLocation , 'actions' , 'temp' ) ;
}
const dest = path . join ( tempDirectory , uuid _1 . v4 ( ) ) ;
yield io . mkdirP ( dest ) ;
return dest ;
} ) ;
}
exports . createTempDirectory = createTempDirectory ;
function getArchiveFileSizeIsBytes ( filePath ) {
return fs . statSync ( filePath ) . size ;
}
exports . getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes ;
function resolvePaths ( patterns ) {
var e _1 , _a ;
var _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const paths = [ ] ;
const workspace = ( _b = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _b !== void 0 ? _b : process . cwd ( ) ;
const globber = yield glob . create ( patterns . join ( '\n' ) , {
implicitDescendants : false
} ) ;
try {
for ( var _c = _ _asyncValues ( globber . globGenerator ( ) ) , _d ; _d = yield _c . next ( ) , ! _d . done ; ) {
const file = _d . value ;
const relativeFile = path . relative ( workspace , file ) ;
core . debug ( ` Matched: ${ relativeFile } ` ) ;
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths . push ( ` ${ relativeFile } ` ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( _d && ! _d . done && ( _a = _c . return ) ) yield _a . call ( _c ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
return paths ;
} ) ;
}
exports . resolvePaths = resolvePaths ;
function unlinkFile ( filePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return util . promisify ( fs . unlink ) ( filePath ) ;
} ) ;
}
exports . unlinkFile = unlinkFile ;
function getVersion ( app ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . debug ( ` Checking ${ app } --version ` ) ;
let versionOutput = '' ;
try {
yield exec . exec ( ` ${ app } --version ` , [ ] , {
ignoreReturnCode : true ,
silent : true ,
listeners : {
stdout : ( data ) => ( versionOutput += data . toString ( ) ) ,
stderr : ( data ) => ( versionOutput += data . toString ( ) )
}
} ) ;
}
catch ( err ) {
core . debug ( err . message ) ;
}
versionOutput = versionOutput . trim ( ) ;
core . debug ( versionOutput ) ;
return versionOutput ;
} ) ;
}
// Use zstandard if possible to maximize cache performance
function getCompressionMethod ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const versionOutput = yield getVersion ( 'zstd' ) ;
return versionOutput . toLowerCase ( ) . includes ( 'zstd command line interface' )
? constants _1 . CompressionMethod . Zstd
: constants _1 . CompressionMethod . Gzip ;
} ) ;
}
exports . getCompressionMethod = getCompressionMethod ;
function getCacheFileName ( compressionMethod ) {
return compressionMethod === constants _1 . CompressionMethod . Zstd
? constants _1 . CacheFilename . Zstd
: constants _1 . CacheFilename . Gzip ;
}
exports . getCacheFileName = getCacheFileName ;
function useGnuTar ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const versionOutput = yield getVersion ( 'tar' ) ;
return versionOutput . toLowerCase ( ) . includes ( 'gnu tar' ) ;
} ) ;
}
exports . useGnuTar = useGnuTar ;
//# sourceMappingURL=cacheUtils.js.map
/***/ } ) ,
2020-02-13 12:38:56 -05:00
/***/ 16 :
/***/ ( function ( module ) {
module . exports = require ( "tls" ) ;
2020-05-14 17:27:38 -04:00
/***/ } ) ,
/***/ 86 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var rng = _ _webpack _require _ _ ( 139 ) ;
var bytesToUuid = _ _webpack _require _ _ ( 722 ) ;
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
var _nodeId ;
var _clockseq ;
// Previous uuid creation time
var _lastMSecs = 0 ;
var _lastNSecs = 0 ;
// See https://github.com/broofa/node-uuid for API details
function v1 ( options , buf , offset ) {
var i = buf && offset || 0 ;
var b = buf || [ ] ;
options = options || { } ;
var node = options . node || _nodeId ;
var clockseq = options . clockseq !== undefined ? options . clockseq : _clockseq ;
// node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if ( node == null || clockseq == null ) {
var seedBytes = rng ( ) ;
if ( node == null ) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [
seedBytes [ 0 ] | 0x01 ,
seedBytes [ 1 ] , seedBytes [ 2 ] , seedBytes [ 3 ] , seedBytes [ 4 ] , seedBytes [ 5 ]
] ;
}
if ( clockseq == null ) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = ( seedBytes [ 6 ] << 8 | seedBytes [ 7 ] ) & 0x3fff ;
}
}
// UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
var msecs = options . msecs !== undefined ? options . msecs : new Date ( ) . getTime ( ) ;
// Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
var nsecs = options . nsecs !== undefined ? options . nsecs : _lastNSecs + 1 ;
// Time since last uuid creation (in msecs)
var dt = ( msecs - _lastMSecs ) + ( nsecs - _lastNSecs ) / 10000 ;
// Per 4.2.1.2, Bump clockseq on clock regression
if ( dt < 0 && options . clockseq === undefined ) {
clockseq = clockseq + 1 & 0x3fff ;
}
// Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ( ( dt < 0 || msecs > _lastMSecs ) && options . nsecs === undefined ) {
nsecs = 0 ;
}
// Per 4.2.1.2 Throw error if too many uuids are requested
if ( nsecs >= 10000 ) {
throw new Error ( 'uuid.v1(): Can\'t create more than 10M uuids/sec' ) ;
}
_lastMSecs = msecs ;
_lastNSecs = nsecs ;
_clockseq = clockseq ;
// Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000 ;
// `time_low`
var tl = ( ( msecs & 0xfffffff ) * 10000 + nsecs ) % 0x100000000 ;
b [ i ++ ] = tl >>> 24 & 0xff ;
b [ i ++ ] = tl >>> 16 & 0xff ;
b [ i ++ ] = tl >>> 8 & 0xff ;
b [ i ++ ] = tl & 0xff ;
// `time_mid`
var tmh = ( msecs / 0x100000000 * 10000 ) & 0xfffffff ;
b [ i ++ ] = tmh >>> 8 & 0xff ;
b [ i ++ ] = tmh & 0xff ;
// `time_high_and_version`
b [ i ++ ] = tmh >>> 24 & 0xf | 0x10 ; // include version
b [ i ++ ] = tmh >>> 16 & 0xff ;
// `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b [ i ++ ] = clockseq >>> 8 | 0x80 ;
// `clock_seq_low`
b [ i ++ ] = clockseq & 0xff ;
// `node`
for ( var n = 0 ; n < 6 ; ++ n ) {
b [ i + n ] = node [ n ] ;
}
return buf ? buf : bytesToUuid ( b ) ;
}
module . exports = v1 ;
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 93 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = minimatch
minimatch . Minimatch = Minimatch
var path = { sep : '/' }
try {
path = _ _webpack _require _ _ ( 622 )
} catch ( er ) { }
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _webpack _require _ _ ( 306 )
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
}
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
// normalizes slashes.
var slashSplit = /\/+/
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
}
function ext ( a , b ) {
a = a || { }
b = b || { }
var t = { }
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
return t
}
minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return minimatch
var orig = minimatch
var m = function minimatch ( p , pattern , options ) {
return orig . minimatch ( p , pattern , ext ( def , options ) )
}
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
return m
}
Minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return Minimatch
return minimatch . defaults ( def ) . Minimatch
}
function minimatch ( p , pattern , options ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
}
// "" only matches ""
if ( pattern . trim ( ) === '' ) return p === ''
return new Minimatch ( pattern , options ) . match ( p )
}
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
}
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
if ( ! options ) options = { }
pattern = pattern . trim ( )
// windows support: need to use /, not \
if ( path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
}
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
// make the set of regexps etc.
this . make ( )
}
Minimatch . prototype . debug = function ( ) { }
Minimatch . prototype . make = make
function make ( ) {
// don't do it more than once.
if ( this . _made ) return
var pattern = this . pattern
var options = this . options
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
}
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = console . error
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
} )
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
}
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
if ( options . nonegate ) return
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
}
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
if ( typeof pattern === 'undefined' ) {
throw new TypeError ( 'undefined pattern' )
}
if ( options . nobrace ||
! pattern . match ( /\{.*\}/ ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
if ( pattern . length > 1024 * 64 ) {
throw new TypeError ( 'pattern is too long' )
}
var options = this . options
// shortcuts
if ( ! options . noglobstar && pattern === '**' ) return GLOBSTAR
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
case '/' :
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
}
if ( ! stateChar ) {
re += '\\('
continue
}
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
}
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
}
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
}
clearStateChar ( )
re += '|'
continue
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
if ( inClass ) {
re += '\\' + c
continue
}
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if ( inClass ) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
}
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '.' :
case '[' :
case '(' : addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
if ( addPatternStart ) {
re = patternStart + re
}
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
}
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
regExp . _glob = pattern
regExp . _src = re
return regExp
}
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
}
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) {
this . regexp = false
}
return this . regexp
}
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
Minimatch . prototype . match = match
function match ( f , partial ) {
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
if ( f === '/' && partial ) return true
var options = this . options
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
var set = this . set
this . debug ( this . pattern , 'set' , set )
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
if ( options . nocase ) {
hit = f . toLowerCase ( ) === p . toLowerCase ( )
} else {
hit = f === p
}
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = ( fi === fl - 1 ) && ( file [ fi ] === '' )
return emptyFileEnd
}
// should be unreachable.
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
}
2020-02-13 12:38:56 -05:00
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 114 :
2020-02-13 12:38:56 -05:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-05-14 17:27:38 -04:00
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 633 ) ) ;
const http _client _1 = _ _webpack _require _ _ ( 539 ) ;
const auth _1 = _ _webpack _require _ _ ( 226 ) ;
const crypto = _ _importStar ( _ _webpack _require _ _ ( 417 ) ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const stream = _ _importStar ( _ _webpack _require _ _ ( 794 ) ) ;
const util = _ _importStar ( _ _webpack _require _ _ ( 669 ) ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
const versionSalt = '1.0' ;
function isSuccessStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
return statusCode >= 200 && statusCode < 300 ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
function isServerErrorStatusCode ( statusCode ) {
if ( ! statusCode ) {
return true ;
}
return statusCode >= 500 ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
function isRetryableStatusCode ( statusCode ) {
if ( ! statusCode ) {
return false ;
}
const retryableStatusCodes = [
http _client _1 . HttpCodes . BadGateway ,
http _client _1 . HttpCodes . ServiceUnavailable ,
http _client _1 . HttpCodes . GatewayTimeout
] ;
return retryableStatusCodes . includes ( statusCode ) ;
}
function getCacheApiUrl ( resource ) {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = ( process . env [ 'ACTIONS_CACHE_URL' ] ||
process . env [ 'ACTIONS_RUNTIME_URL' ] ||
'' ) . replace ( 'pipelines' , 'artifactcache' ) ;
if ( ! baseUrl ) {
throw new Error ( 'Cache Service Url not found, unable to restore cache.' ) ;
}
const url = ` ${ baseUrl } _apis/artifactcache/ ${ resource } ` ;
core . debug ( ` Resource Url: ${ url } ` ) ;
return url ;
}
function createAcceptHeader ( type , apiVersion ) {
return ` ${ type } ;api-version= ${ apiVersion } ` ;
}
function getRequestOptions ( ) {
const requestOptions = {
headers : {
Accept : createAcceptHeader ( 'application/json' , '6.0-preview.1' )
}
} ;
return requestOptions ;
}
function createHttpClient ( ) {
const token = process . env [ 'ACTIONS_RUNTIME_TOKEN' ] || '' ;
const bearerCredentialHandler = new auth _1 . BearerCredentialHandler ( token ) ;
return new http _client _1 . HttpClient ( 'actions/cache' , [ bearerCredentialHandler ] , getRequestOptions ( ) ) ;
}
function getCacheVersion ( paths , compressionMethod ) {
const components = paths . concat ( compressionMethod === constants _1 . CompressionMethod . Zstd ? [ compressionMethod ] : [ ] ) ;
// Add salt to cache version to support breaking changes in cache entry
components . push ( versionSalt ) ;
return crypto
. createHash ( 'sha256' )
. update ( components . join ( '|' ) )
. digest ( 'hex' ) ;
}
exports . getCacheVersion = getCacheVersion ;
function retry ( name , method , getStatusCode , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let response = undefined ;
let statusCode = undefined ;
let isRetryable = false ;
let errorMessage = '' ;
let attempt = 1 ;
while ( attempt <= maxAttempts ) {
try {
response = yield method ( ) ;
statusCode = getStatusCode ( response ) ;
if ( ! isServerErrorStatusCode ( statusCode ) ) {
return response ;
}
isRetryable = isRetryableStatusCode ( statusCode ) ;
errorMessage = ` Cache service responded with ${ statusCode } ` ;
}
catch ( error ) {
isRetryable = true ;
errorMessage = error . message ;
}
core . debug ( ` ${ name } - Attempt ${ attempt } of ${ maxAttempts } failed with error: ${ errorMessage } ` ) ;
if ( ! isRetryable ) {
core . debug ( ` ${ name } - Error is not retryable ` ) ;
break ;
}
attempt ++ ;
}
throw Error ( ` ${ name } failed: ${ errorMessage } ` ) ;
} ) ;
}
exports . retry = retry ;
function retryTypedResponse ( name , method , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , ( response ) => response . statusCode , maxAttempts ) ;
} ) ;
}
exports . retryTypedResponse = retryTypedResponse ;
function retryHttpClientResponse ( name , method , maxAttempts = 2 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield retry ( name , method , ( response ) => response . message . statusCode , maxAttempts ) ;
} ) ;
}
exports . retryHttpClientResponse = retryHttpClientResponse ;
function getCacheEntry ( keys , paths , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
const version = getCacheVersion ( paths , options === null || options === void 0 ? void 0 : options . compressionMethod ) ;
const resource = ` cache?keys= ${ encodeURIComponent ( keys . join ( ',' ) ) } &version= ${ version } ` ;
const response = yield retryTypedResponse ( 'getCacheEntry' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return httpClient . getJson ( getCacheApiUrl ( resource ) ) ; } ) ) ;
if ( response . statusCode === 204 ) {
return null ;
}
if ( ! isSuccessStatusCode ( response . statusCode ) ) {
throw new Error ( ` Cache service responded with ${ response . statusCode } ` ) ;
}
const cacheResult = response . result ;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult . archiveLocation ;
if ( ! cacheDownloadUrl ) {
throw new Error ( 'Cache not found.' ) ;
}
core . setSecret ( cacheDownloadUrl ) ;
core . debug ( ` Cache Result: ` ) ;
core . debug ( JSON . stringify ( cacheResult ) ) ;
return cacheResult ;
} ) ;
}
exports . getCacheEntry = getCacheEntry ;
function pipeResponseToStream ( response , output ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const pipeline = util . promisify ( stream . pipeline ) ;
yield pipeline ( response . message , output ) ;
} ) ;
}
function downloadCache ( archiveLocation , archivePath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const writeStream = fs . createWriteStream ( archivePath ) ;
const httpClient = new http _client _1 . HttpClient ( 'actions/cache' ) ;
const downloadResponse = yield retryHttpClientResponse ( 'downloadCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) { return httpClient . get ( archiveLocation ) ; } ) ) ;
// Abort download if no traffic received over the socket.
downloadResponse . message . socket . setTimeout ( constants _1 . SocketTimeout , ( ) => {
downloadResponse . message . destroy ( ) ;
core . debug ( ` Aborting download, socket timed out after ${ constants _1 . SocketTimeout } ms ` ) ;
} ) ;
yield pipeResponseToStream ( downloadResponse , writeStream ) ;
// Validate download size.
const contentLengthHeader = downloadResponse . message . headers [ 'content-length' ] ;
if ( contentLengthHeader ) {
const expectedLength = parseInt ( contentLengthHeader ) ;
const actualLength = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
if ( actualLength !== expectedLength ) {
throw new Error ( ` Incomplete download. Expected file size: ${ expectedLength } , actual file size: ${ actualLength } ` ) ;
}
}
else {
core . debug ( 'Unable to validate download, no Content-Length header' ) ;
}
} ) ;
}
exports . downloadCache = downloadCache ;
// Reserve Cache
function reserveCache ( key , paths , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
const version = getCacheVersion ( paths , options === null || options === void 0 ? void 0 : options . compressionMethod ) ;
const reserveCacheRequest = {
key ,
version
} ;
const response = yield retryTypedResponse ( 'reserveCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . postJson ( getCacheApiUrl ( 'caches' ) , reserveCacheRequest ) ;
} ) ) ;
return ( _b = ( _a = response === null || response === void 0 ? void 0 : response . result ) === null || _a === void 0 ? void 0 : _a . cacheId ) !== null && _b !== void 0 ? _b : - 1 ;
} ) ;
}
exports . reserveCache = reserveCache ;
function getContentRange ( start , end ) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return ` bytes ${ start } - ${ end } /* ` ;
}
function uploadChunk ( httpClient , resourceUrl , openStream , start , end ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
core . debug ( ` Uploading chunk of size ${ end -
start +
1 } bytes at offset $ { start } with content range : $ { getContentRange ( start , end ) } ` );
const additionalHeaders = {
'Content-Type' : 'application/octet-stream' ,
'Content-Range' : getContentRange ( start , end )
} ;
yield retryHttpClientResponse ( ` uploadChunk (start: ${ start } , end: ${ end } ) ` , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . sendStream ( 'PATCH' , resourceUrl , openStream ( ) , additionalHeaders ) ;
} ) ) ;
} ) ;
}
function uploadFile ( httpClient , cacheId , archivePath , options ) {
var _a , _b ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Upload Chunks
const fileSize = fs . statSync ( archivePath ) . size ;
const resourceUrl = getCacheApiUrl ( ` caches/ ${ cacheId . toString ( ) } ` ) ;
const fd = fs . openSync ( archivePath , 'r' ) ;
const concurrency = ( _a = options === null || options === void 0 ? void 0 : options . uploadConcurrency ) !== null && _a !== void 0 ? _a : 4 ; // # of HTTP requests in parallel
const MAX _CHUNK _SIZE = ( _b = options === null || options === void 0 ? void 0 : options . uploadChunkSize ) !== null && _b !== void 0 ? _b : 32 * 1024 * 1024 ; // 32 MB Chunks
core . debug ( ` Concurrency: ${ concurrency } and Chunk Size: ${ MAX _CHUNK _SIZE } ` ) ;
const parallelUploads = [ ... new Array ( concurrency ) . keys ( ) ] ;
core . debug ( 'Awaiting all uploads' ) ;
let offset = 0 ;
try {
yield Promise . all ( parallelUploads . map ( ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
while ( offset < fileSize ) {
const chunkSize = Math . min ( fileSize - offset , MAX _CHUNK _SIZE ) ;
const start = offset ;
const end = offset + chunkSize - 1 ;
offset += MAX _CHUNK _SIZE ;
yield uploadChunk ( httpClient , resourceUrl , ( ) => fs
. createReadStream ( archivePath , {
fd ,
start ,
end ,
autoClose : false
} )
. on ( 'error' , error => {
throw new Error ( ` Cache upload failed because file read failed with ${ error . Message } ` ) ;
} ) , start , end ) ;
}
} ) ) ) ;
}
finally {
fs . closeSync ( fd ) ;
}
return ;
} ) ;
}
function commitCache ( httpClient , cacheId , filesize ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commitCacheRequest = { size : filesize } ;
return yield retryTypedResponse ( 'commitCache' , ( ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return httpClient . postJson ( getCacheApiUrl ( ` caches/ ${ cacheId . toString ( ) } ` ) , commitCacheRequest ) ;
} ) ) ;
} ) ;
}
function saveCache ( cacheId , archivePath , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const httpClient = createHttpClient ( ) ;
core . debug ( 'Upload cache' ) ;
yield uploadFile ( httpClient , cacheId , archivePath , options ) ;
// Commit Cache
core . debug ( 'Commiting cache' ) ;
const cacheSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
const commitCacheResponse = yield commitCache ( httpClient , cacheId , cacheSize ) ;
if ( ! isSuccessStatusCode ( commitCacheResponse . statusCode ) ) {
throw new Error ( ` Cache service responded with ${ commitCacheResponse . statusCode } during commit cache. ` ) ;
}
core . info ( 'Cache saved successfully' ) ;
} ) ;
}
exports . saveCache = saveCache ;
//# sourceMappingURL=cacheHttpClient.js.map
/***/ } ) ,
/***/ 129 :
/***/ ( function ( module ) {
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 139 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = _ _webpack _require _ _ ( 417 ) ;
module . exports = function nodeRNG ( ) {
return crypto . randomBytes ( 16 ) ;
} ;
/***/ } ) ,
/***/ 141 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 16 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
2020-02-13 12:38:56 -05:00
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
2020-05-14 17:27:38 -04:00
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
2020-02-13 12:38:56 -05:00
} ) ;
2020-05-14 17:27:38 -04:00
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
}
}
}
return target ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
exports . debug = debug ; // for test
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 226 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
class BasicCredentialHandler {
constructor ( username , password ) {
this . username = username ;
this . password = password ;
}
prepareRequest ( options ) {
2020-05-11 10:49:48 -04:00
options . headers [ 'Authorization' ] =
'Basic ' +
Buffer . from ( this . username + ':' + this . password ) . toString ( 'base64' ) ;
2020-02-13 12:38:56 -05:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BasicCredentialHandler = BasicCredentialHandler ;
class BearerCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
options . headers [ 'Authorization' ] = 'Bearer ' + this . token ;
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . BearerCredentialHandler = BearerCredentialHandler ;
class PersonalAccessTokenCredentialHandler {
constructor ( token ) {
this . token = token ;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest ( options ) {
2020-05-11 10:49:48 -04:00
options . headers [ 'Authorization' ] =
'Basic ' + Buffer . from ( 'PAT:' + this . token ) . toString ( 'base64' ) ;
2020-02-13 12:38:56 -05:00
}
// This handler cannot handle 401
canHandleAuthentication ( response ) {
return false ;
}
2020-03-20 16:02:11 -04:00
handleAuthentication ( httpClient , requestInfo , objs ) {
return null ;
}
}
exports . PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler ;
/***/ } ) ,
/***/ 281 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const internal _globber _1 = _ _webpack _require _ _ ( 297 ) ;
/ * *
* Constructs a globber
*
* @ param patterns Patterns separated by newlines
* @ param options Glob options
* /
function create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield internal _globber _1 . DefaultGlobber . create ( patterns , options ) ;
} ) ;
}
exports . create = create ;
//# sourceMappingURL=glob.js.map
/***/ } ) ,
/***/ 297 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _asyncValues = ( this && this . _ _asyncValues ) || function ( o ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var m = o [ Symbol . asyncIterator ] , i ;
return m ? m . call ( o ) : ( o = typeof _ _values === "function" ? _ _values ( o ) : o [ Symbol . iterator ] ( ) , i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ) ;
function verb ( n ) { i [ n ] = o [ n ] && function ( v ) { return new Promise ( function ( resolve , reject ) { v = o [ n ] ( v ) , settle ( resolve , reject , v . done , v . value ) ; } ) ; } ; }
function settle ( resolve , reject , d , v ) { Promise . resolve ( v ) . then ( function ( v ) { resolve ( { value : v , done : d } ) ; } , reject ) ; }
} ;
var _ _await = ( this && this . _ _await ) || function ( v ) { return this instanceof _ _await ? ( this . v = v , this ) : new _ _await ( v ) ; }
var _ _asyncGenerator = ( this && this . _ _asyncGenerator ) || function ( thisArg , _arguments , generator ) {
if ( ! Symbol . asyncIterator ) throw new TypeError ( "Symbol.asyncIterator is not defined." ) ;
var g = generator . apply ( thisArg , _arguments || [ ] ) , i , q = [ ] ;
return i = { } , verb ( "next" ) , verb ( "throw" ) , verb ( "return" ) , i [ Symbol . asyncIterator ] = function ( ) { return this ; } , i ;
function verb ( n ) { if ( g [ n ] ) i [ n ] = function ( v ) { return new Promise ( function ( a , b ) { q . push ( [ n , v , a , b ] ) > 1 || resume ( n , v ) ; } ) ; } ; }
function resume ( n , v ) { try { step ( g [ n ] ( v ) ) ; } catch ( e ) { settle ( q [ 0 ] [ 3 ] , e ) ; } }
function step ( r ) { r . value instanceof _ _await ? Promise . resolve ( r . value . v ) . then ( fulfill , reject ) : settle ( q [ 0 ] [ 2 ] , r ) ; }
function fulfill ( value ) { resume ( "next" , value ) ; }
function reject ( value ) { resume ( "throw" , value ) ; }
function settle ( f , v ) { if ( f ( v ) , q . shift ( ) , q . length ) resume ( q [ 0 ] [ 0 ] , q [ 0 ] [ 1 ] ) ; }
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _webpack _require _ _ ( 470 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const globOptionsHelper = _ _webpack _require _ _ ( 601 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const patternHelper = _ _webpack _require _ _ ( 597 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const internal _pattern _1 = _ _webpack _require _ _ ( 923 ) ;
const internal _search _state _1 = _ _webpack _require _ _ ( 728 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class DefaultGlobber {
constructor ( options ) {
this . patterns = [ ] ;
this . searchPaths = [ ] ;
this . options = globOptionsHelper . getOptions ( options ) ;
}
getSearchPaths ( ) {
// Return a copy
return this . searchPaths . slice ( ) ;
}
glob ( ) {
var e _1 , _a ;
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = [ ] ;
try {
for ( var _b = _ _asyncValues ( this . globGenerator ( ) ) , _c ; _c = yield _b . next ( ) , ! _c . done ; ) {
const itemPath = _c . value ;
result . push ( itemPath ) ;
}
}
catch ( e _1 _1 ) { e _1 = { error : e _1 _1 } ; }
finally {
try {
if ( _c && ! _c . done && ( _a = _b . return ) ) yield _a . call ( _b ) ;
}
finally { if ( e _1 ) throw e _1 . error ; }
}
return result ;
} ) ;
}
globGenerator ( ) {
return _ _asyncGenerator ( this , arguments , function * globGenerator _1 ( ) {
// Fill in defaults options
const options = globOptionsHelper . getOptions ( this . options ) ;
// Implicit descendants?
const patterns = [ ] ;
for ( const pattern of this . patterns ) {
patterns . push ( pattern ) ;
if ( options . implicitDescendants &&
( pattern . trailingSeparator ||
pattern . segments [ pattern . segments . length - 1 ] !== '**' ) ) {
patterns . push ( new internal _pattern _1 . Pattern ( pattern . negate , pattern . segments . concat ( '**' ) ) ) ;
}
}
// Push the search paths
const stack = [ ] ;
for ( const searchPath of patternHelper . getSearchPaths ( patterns ) ) {
core . debug ( ` Search path ' ${ searchPath } ' ` ) ;
// Exists?
try {
// Intentionally using lstat. Detection for broken symlink
// will be performed later (if following symlinks).
yield _ _await ( fs . promises . lstat ( searchPath ) ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
continue ;
}
throw err ;
}
stack . unshift ( new internal _search _state _1 . SearchState ( searchPath , 1 ) ) ;
}
// Search
const traversalChain = [ ] ; // used to detect cycles
while ( stack . length ) {
// Pop
const item = stack . pop ( ) ;
// Match?
const match = patternHelper . match ( patterns , item . path ) ;
const partialMatch = ! ! match || patternHelper . partialMatch ( patterns , item . path ) ;
if ( ! match && ! partialMatch ) {
continue ;
}
// Stat
const stats = yield _ _await ( DefaultGlobber . stat ( item , options , traversalChain )
// Broken symlink, or symlink cycle detected, or no longer exists
) ;
// Broken symlink, or symlink cycle detected, or no longer exists
if ( ! stats ) {
continue ;
}
// Directory
if ( stats . isDirectory ( ) ) {
// Matched
if ( match & internal _match _kind _1 . MatchKind . Directory ) {
yield yield _ _await ( item . path ) ;
}
// Descend?
else if ( ! partialMatch ) {
continue ;
}
// Push the child items in reverse
const childLevel = item . level + 1 ;
const childItems = ( yield _ _await ( fs . promises . readdir ( item . path ) ) ) . map ( x => new internal _search _state _1 . SearchState ( path . join ( item . path , x ) , childLevel ) ) ;
stack . push ( ... childItems . reverse ( ) ) ;
}
// File
else if ( match & internal _match _kind _1 . MatchKind . File ) {
yield yield _ _await ( item . path ) ;
}
}
} ) ;
}
/ * *
* Constructs a DefaultGlobber
* /
static create ( patterns , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const result = new DefaultGlobber ( options ) ;
if ( IS _WINDOWS ) {
patterns = patterns . replace ( /\r\n/g , '\n' ) ;
patterns = patterns . replace ( /\r/g , '\n' ) ;
}
const lines = patterns . split ( '\n' ) . map ( x => x . trim ( ) ) ;
for ( const line of lines ) {
// Empty or comment
if ( ! line || line . startsWith ( '#' ) ) {
continue ;
}
// Pattern
else {
result . patterns . push ( new internal _pattern _1 . Pattern ( line ) ) ;
}
}
result . searchPaths . push ( ... patternHelper . getSearchPaths ( result . patterns ) ) ;
return result ;
} ) ;
}
static stat ( item , options , traversalChain ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Note:
// `stat` returns info about the target of a symlink (or symlink chain)
// `lstat` returns info about a symlink itself
let stats ;
if ( options . followSymbolicLinks ) {
try {
// Use `stat` (following symlinks)
stats = yield fs . promises . stat ( item . path ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
if ( options . omitBrokenSymbolicLinks ) {
core . debug ( ` Broken symlink ' ${ item . path } ' ` ) ;
return undefined ;
}
throw new Error ( ` No information found for the path ' ${ item . path } '. This may indicate a broken symbolic link. ` ) ;
}
throw err ;
}
}
else {
// Use `lstat` (not following symlinks)
stats = yield fs . promises . lstat ( item . path ) ;
}
// Note, isDirectory() returns false for the lstat of a symlink
if ( stats . isDirectory ( ) && options . followSymbolicLinks ) {
// Get the realpath
const realPath = yield fs . promises . realpath ( item . path ) ;
// Fixup the traversal chain to match the item level
while ( traversalChain . length >= item . level ) {
traversalChain . pop ( ) ;
}
// Test for a cycle
if ( traversalChain . some ( ( x ) => x === realPath ) ) {
core . debug ( ` Symlink cycle detected for path ' ${ item . path } ' and realpath ' ${ realPath } ' ` ) ;
return undefined ;
}
// Update the traversal chain
traversalChain . push ( realPath ) ;
}
return stats ;
} ) ;
}
}
exports . DefaultGlobber = DefaultGlobber ;
//# sourceMappingURL=internal-globber.js.map
/***/ } ) ,
/***/ 306 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var concatMap = _ _webpack _require _ _ ( 896 ) ;
var balanced = _ _webpack _require _ _ ( 621 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 327 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
/ * *
* Indicates whether a pattern matches a path
* /
var MatchKind ;
( function ( MatchKind ) {
/** Not matched */
MatchKind [ MatchKind [ "None" ] = 0 ] = "None" ;
/** Matched if the path is a directory */
MatchKind [ MatchKind [ "Directory" ] = 1 ] = "Directory" ;
/** Matched if the path is a regular file */
MatchKind [ MatchKind [ "File" ] = 2 ] = "File" ;
/** Matched */
MatchKind [ MatchKind [ "All" ] = 3 ] = "All" ;
} ) ( MatchKind = exports . MatchKind || ( exports . MatchKind = { } ) ) ;
//# sourceMappingURL=internal-match-kind.js.map
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 383 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Helper class for parsing paths into segments
* /
class Path {
/ * *
* Constructs a Path
* @ param itemPath Path or array of segments
* /
constructor ( itemPath ) {
this . segments = [ ] ;
// String
if ( typeof itemPath === 'string' ) {
assert ( itemPath , ` Parameter 'itemPath' must not be empty ` ) ;
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// Not rooted
if ( ! pathHelper . hasRoot ( itemPath ) ) {
this . segments = itemPath . split ( path . sep ) ;
}
// Rooted
else {
// Add all segments, while not at the root
let remaining = itemPath ;
let dir = pathHelper . dirname ( remaining ) ;
while ( dir !== remaining ) {
// Add the segment
const basename = path . basename ( remaining ) ;
this . segments . unshift ( basename ) ;
// Truncate the last segment
remaining = dir ;
dir = pathHelper . dirname ( remaining ) ;
}
// Remainder is the root
this . segments . unshift ( remaining ) ;
}
}
// Array
else {
// Must not be empty
assert ( itemPath . length > 0 , ` Parameter 'itemPath' must not be an empty array ` ) ;
// Each segment
for ( let i = 0 ; i < itemPath . length ; i ++ ) {
let segment = itemPath [ i ] ;
// Must not be empty
assert ( segment , ` Parameter 'itemPath' must not contain any empty segments ` ) ;
// Normalize slashes
segment = pathHelper . normalizeSeparators ( itemPath [ i ] ) ;
// Root segment
if ( i === 0 && pathHelper . hasRoot ( segment ) ) {
segment = pathHelper . safeTrimTrailingSeparator ( segment ) ;
assert ( segment === pathHelper . dirname ( segment ) , ` Parameter 'itemPath' root segment contains information for multiple segments ` ) ;
this . segments . push ( segment ) ;
}
// All other segments
else {
// Must not contain slash
assert ( ! segment . includes ( path . sep ) , ` Parameter 'itemPath' contains unexpected path separators ` ) ;
this . segments . push ( segment ) ;
}
}
}
}
/ * *
* Converts the path to it ' s string representation
* /
toString ( ) {
// First segment
let result = this . segments [ 0 ] ;
// All others
let skipSlash = result . endsWith ( path . sep ) || ( IS _WINDOWS && /^[A-Z]:$/i . test ( result ) ) ;
for ( let i = 1 ; i < this . segments . length ; i ++ ) {
if ( skipSlash ) {
skipSlash = false ;
}
else {
result += path . sep ;
}
result += this . segments [ i ] ;
}
return result ;
2020-02-13 12:38:56 -05:00
}
}
2020-03-20 16:02:11 -04:00
exports . Path = Path ;
//# sourceMappingURL=internal-path.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 413 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 141 ) ;
/***/ } ) ,
/***/ 417 :
/***/ ( function ( module ) {
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 431 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
/ * *
* Commands
*
* Command Format :
* # # [ name key = value ; key = value ] message
*
* Examples :
* # # [ warning ] This is the user warning message
* # # [ set - secret name = mypassword ] definitelyNotAPassword !
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
// safely append the val - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
cmdStr += ` ${ key } = ${ escape ( ` ${ val || '' } ` ) } , ` ;
}
}
}
}
cmdStr += CMD _STRING ;
// safely append the message - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
const message = ` ${ this . message || '' } ` ;
cmdStr += escapeData ( message ) ;
return cmdStr ;
}
}
function escapeData ( s ) {
return s . replace ( /\r/g , '%0D' ) . replace ( /\n/g , '%0A' ) ;
}
function escape ( s ) {
return s
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /]/g , '%5D' )
. replace ( /;/g , '%3B' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 434 :
2020-02-13 12:38:56 -05:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-14 17:27:38 -04:00
const exec _1 = _ _webpack _require _ _ ( 986 ) ;
2020-04-08 10:58:38 -04:00
const io = _ _importStar ( _ _webpack _require _ _ ( 1 ) ) ;
2020-05-14 17:27:38 -04:00
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
2020-02-13 12:38:56 -05:00
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
2020-05-14 17:27:38 -04:00
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 931 ) ;
function getTarPath ( args ) {
2020-02-13 12:38:56 -05:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-05-14 17:27:38 -04:00
// Explicitly use BSD Tar on Windows
const IS _WINDOWS = process . platform === 'win32' ;
if ( IS _WINDOWS ) {
const systemTar = ` ${ process . env [ 'windir' ] } \\ System32 \\ tar.exe ` ;
if ( fs _1 . existsSync ( systemTar ) ) {
return systemTar ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
else if ( yield utils . useGnuTar ( ) ) {
args . push ( '--force-local' ) ;
2020-02-13 12:38:56 -05:00
}
}
2020-05-14 17:27:38 -04:00
return yield io . which ( 'tar' , true ) ;
2020-02-13 12:38:56 -05:00
} ) ;
}
2020-05-14 17:27:38 -04:00
function execTar ( args , cwd ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exec _1 . exec ( ` " ${ yield getTarPath ( args ) } " ` , args , { cwd } ) ;
}
catch ( error ) {
throw new Error ( ` Tar failed with error: ${ error === null || error === void 0 ? void 0 : error . message } ` ) ;
}
} ) ;
}
function getWorkingDirectory ( ) {
var _a ;
return ( _a = process . env [ 'GITHUB_WORKSPACE' ] ) !== null && _a !== void 0 ? _a : process . cwd ( ) ;
}
function extractTar ( archivePath , compressionMethod ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory ( ) ;
yield io . mkdirP ( workingDirectory ) ;
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const args = [
... ( compressionMethod === constants _1 . CompressionMethod . Zstd
? [ '--use-compress-program' , 'zstd -d --long=30' ]
: [ '-z' ] ) ,
'-xf' ,
archivePath . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'-P' ,
'-C' ,
workingDirectory . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' )
] ;
yield execTar ( args ) ;
} ) ;
}
exports . extractTar = extractTar ;
function createTar ( archiveFolder , sourceDirectories , compressionMethod ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = 'manifest.txt' ;
const cacheFileName = utils . getCacheFileName ( compressionMethod ) ;
fs _1 . writeFileSync ( path . join ( archiveFolder , manifestFilename ) , sourceDirectories . join ( '\n' ) ) ;
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const workingDirectory = getWorkingDirectory ( ) ;
const args = [
... ( compressionMethod === constants _1 . CompressionMethod . Zstd
? [ '--use-compress-program' , 'zstd -T0 --long=30' ]
: [ '-z' ] ) ,
'-cf' ,
cacheFileName . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'-P' ,
'-C' ,
workingDirectory . replace ( new RegExp ( ` \\ ${ path . sep } ` , 'g' ) , '/' ) ,
'--files-from' ,
manifestFilename
] ;
yield execTar ( args , archiveFolder ) ;
} ) ;
}
exports . createTar = createTar ;
//# sourceMappingURL=tar.js.map
/***/ } ) ,
/***/ 443 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
function isExactKeyMatch ( key , cacheKey ) {
return ! ! ( cacheKey &&
cacheKey . localeCompare ( key , undefined , {
2020-02-13 12:38:56 -05:00
sensitivity : "accent"
} ) === 0 ) ;
}
exports . isExactKeyMatch = isExactKeyMatch ;
function setCacheState ( state ) {
2020-05-14 17:27:38 -04:00
core . saveState ( constants _1 . State . CacheResult , state ) ;
2020-02-13 12:38:56 -05:00
}
exports . setCacheState = setCacheState ;
function setCacheHitOutput ( isCacheHit ) {
core . setOutput ( constants _1 . Outputs . CacheHit , isCacheHit . toString ( ) ) ;
}
exports . setCacheHitOutput = setCacheHitOutput ;
2020-05-14 17:27:38 -04:00
function setOutputAndState ( key , cacheKey ) {
setCacheHitOutput ( isExactKeyMatch ( key , cacheKey ) ) ;
2020-02-13 12:38:56 -05:00
// Store the cache result if it exists
2020-05-14 17:27:38 -04:00
cacheKey && setCacheState ( cacheKey ) ;
2020-02-13 12:38:56 -05:00
}
exports . setOutputAndState = setOutputAndState ;
function getCacheState ( ) {
2020-05-14 17:27:38 -04:00
const cacheKey = core . getState ( constants _1 . State . CacheResult ) ;
if ( cacheKey ) {
core . debug ( ` Cache state/key: ${ cacheKey } ` ) ;
return cacheKey ;
2020-02-13 12:38:56 -05:00
}
return undefined ;
}
exports . getCacheState = getCacheState ;
function logWarning ( message ) {
const warningPrefix = "[warning]" ;
core . info ( ` ${ warningPrefix } ${ message } ` ) ;
}
exports . logWarning = logWarning ;
2020-04-17 15:46:46 -04:00
// Cache token authorized for all events that are tied to a ref
2020-02-13 12:38:56 -05:00
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent ( ) {
2020-04-20 13:44:37 -04:00
return constants _1 . RefKey in process . env && Boolean ( process . env [ constants _1 . RefKey ] ) ;
2020-02-13 12:38:56 -05:00
}
exports . isValidEvent = isValidEvent ;
/***/ } ) ,
/***/ 470 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 431 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable
* /
function exportVariable ( name , val ) {
process . env [ name ] = val ;
command _1 . issueCommand ( 'set-env' , { name } , val ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store
* /
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message
* /
function error ( message ) {
command _1 . issue ( 'error' , message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store
* /
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 539 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 950 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
2020-05-11 10:49:48 -04:00
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
2020-02-13 12:38:56 -05:00
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
/ * *
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
}
exports . getProxyUrl = getProxyUrl ;
2020-05-11 10:49:48 -04:00
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
2020-02-13 12:38:56 -05:00
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
}
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
}
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
}
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
}
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
}
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
}
}
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
2020-05-11 10:49:48 -04:00
throw new Error ( 'Client has already been disposed.' ) ;
2020-02-13 12:38:56 -05:00
}
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
2020-05-11 10:49:48 -04:00
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
2020-02-13 12:38:56 -05:00
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
2020-05-11 10:49:48 -04:00
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
2020-02-13 12:38:56 -05:00
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
}
}
let redirectsRemaining = this . _maxRedirects ;
2020-05-11 10:49:48 -04:00
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
2020-02-13 12:38:56 -05:00
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
2020-05-11 10:49:48 -04:00
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
2020-02-13 12:38:56 -05:00
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
2020-05-11 10:49:48 -04:00
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
}
}
2020-02-13 12:38:56 -05:00
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
}
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
}
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
}
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
}
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
2020-05-11 10:49:48 -04:00
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
2020-02-13 12:38:56 -05:00
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
}
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
2020-05-11 10:49:48 -04:00
req . on ( 'socket' , sock => {
2020-02-13 12:38:56 -05:00
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
}
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
2020-05-11 10:49:48 -04:00
if ( data && typeof data === 'string' ) {
2020-02-13 12:38:56 -05:00
req . write ( data , 'utf8' ) ;
}
2020-05-11 10:49:48 -04:00
if ( data && typeof data !== 'string' ) {
2020-02-13 12:38:56 -05:00
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
}
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
2020-05-11 10:49:48 -04:00
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
2020-02-13 12:38:56 -05:00
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
2020-05-11 10:49:48 -04:00
info . options . headers [ 'user-agent' ] = this . userAgent ;
2020-02-13 12:38:56 -05:00
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
2020-05-11 10:49:48 -04:00
this . handlers . forEach ( handler => {
2020-02-13 12:38:56 -05:00
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
2020-05-11 10:49:48 -04:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-02-13 12:38:56 -05:00
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
2020-05-11 10:49:48 -04:00
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
2020-02-13 12:38:56 -05:00
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _webpack _require _ _ ( 413 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
2020-05-11 10:49:48 -04:00
}
2020-02-13 12:38:56 -05:00
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
}
else {
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
}
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
2020-05-11 10:49:48 -04:00
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
2020-02-13 12:38:56 -05:00
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
}
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
}
response . headers = res . message . headers ;
}
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
2020-05-11 10:49:48 -04:00
msg = 'Failed request: (' + statusCode + ')' ;
2020-02-13 12:38:56 -05:00
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
}
exports . HttpClient = HttpClient ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 597 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Given an array of patterns , returns an array of paths to search .
* Duplicates and paths under other included paths are filtered out .
* /
function getSearchPaths ( patterns ) {
// Ignore negate patterns
patterns = patterns . filter ( x => ! x . negate ) ;
// Create a map of all search paths
const searchPathMap = { } ;
for ( const pattern of patterns ) {
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
searchPathMap [ key ] = 'candidate' ;
}
const result = [ ] ;
for ( const pattern of patterns ) {
// Check if already included
const key = IS _WINDOWS
? pattern . searchPath . toUpperCase ( )
: pattern . searchPath ;
if ( searchPathMap [ key ] === 'included' ) {
continue ;
}
// Check for an ancestor search path
let foundAncestor = false ;
let tempKey = key ;
let parent = pathHelper . dirname ( tempKey ) ;
while ( parent !== tempKey ) {
if ( searchPathMap [ parent ] ) {
foundAncestor = true ;
break ;
}
tempKey = parent ;
parent = pathHelper . dirname ( tempKey ) ;
}
// Include the search pattern in the result
if ( ! foundAncestor ) {
result . push ( pattern . searchPath ) ;
searchPathMap [ key ] = 'included' ;
}
}
return result ;
}
exports . getSearchPaths = getSearchPaths ;
/ * *
* Matches the patterns against the path
* /
function match ( patterns , itemPath ) {
let result = internal _match _kind _1 . MatchKind . None ;
for ( const pattern of patterns ) {
if ( pattern . negate ) {
result &= ~ pattern . match ( itemPath ) ;
}
else {
result |= pattern . match ( itemPath ) ;
}
}
return result ;
}
exports . match = match ;
/ * *
* Checks whether to descend further into the directory
* /
function partialMatch ( patterns , itemPath ) {
return patterns . some ( x => ! x . negate && x . partialMatch ( itemPath ) ) ;
}
exports . partialMatch = partialMatch ;
//# sourceMappingURL=internal-pattern-helper.js.map
/***/ } ) ,
/***/ 601 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _webpack _require _ _ ( 470 ) ;
/ * *
* Returns a copy with defaults filled in .
* /
function getOptions ( copy ) {
const result = {
followSymbolicLinks : true ,
implicitDescendants : true ,
omitBrokenSymbolicLinks : true
} ;
if ( copy ) {
if ( typeof copy . followSymbolicLinks === 'boolean' ) {
result . followSymbolicLinks = copy . followSymbolicLinks ;
core . debug ( ` followSymbolicLinks ' ${ result . followSymbolicLinks } ' ` ) ;
}
if ( typeof copy . implicitDescendants === 'boolean' ) {
result . implicitDescendants = copy . implicitDescendants ;
core . debug ( ` implicitDescendants ' ${ result . implicitDescendants } ' ` ) ;
}
if ( typeof copy . omitBrokenSymbolicLinks === 'boolean' ) {
result . omitBrokenSymbolicLinks = copy . omitBrokenSymbolicLinks ;
core . debug ( ` omitBrokenSymbolicLinks ' ${ result . omitBrokenSymbolicLinks } ' ` ) ;
}
}
return result ;
}
exports . getOptions = getOptions ;
//# sourceMappingURL=internal-glob-options-helper.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 621 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 633 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 734 ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = command _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
2020-02-13 12:38:56 -05:00
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 672 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert _1 = _ _webpack _require _ _ ( 357 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Recursively create a directory at ` fsPath ` .
*
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
* /
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
2020-05-14 17:27:38 -04:00
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
}
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
//# sourceMappingURL=io-util.js.map
/***/ } ) ,
/***/ 692 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 633 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 15 ) ) ;
const cacheHttpClient = _ _importStar ( _ _webpack _require _ _ ( 114 ) ) ;
const tar _1 = _ _webpack _require _ _ ( 434 ) ;
class ValidationError extends Error {
constructor ( message ) {
super ( message ) ;
this . name = 'ValidationError' ;
Object . setPrototypeOf ( this , ValidationError . prototype ) ;
}
}
exports . ValidationError = ValidationError ;
class ReserveCacheError extends Error {
constructor ( message ) {
super ( message ) ;
this . name = 'ReserveCacheError' ;
Object . setPrototypeOf ( this , ReserveCacheError . prototype ) ;
}
}
exports . ReserveCacheError = ReserveCacheError ;
function checkPaths ( paths ) {
if ( ! paths || paths . length === 0 ) {
throw new ValidationError ( ` Path Validation Error: At least one directory or file path is required ` ) ;
}
}
function checkKey ( key ) {
if ( key . length > 512 ) {
throw new ValidationError ( ` Key Validation Error: ${ key } cannot be larger than 512 characters. ` ) ;
}
const regex = /^[^,]*$/ ;
if ( ! regex . test ( key ) ) {
throw new ValidationError ( ` Key Validation Error: ${ key } cannot contain commas. ` ) ;
}
}
/ * *
* Restores cache from keys
*
* @ param paths a list of file paths to restore from the cache
* @ param primaryKey an explicit key for restoring the cache
* @ param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
* @ returns string returns the key for the cache hit , otherwise returns undefined
* /
function restoreCache ( paths , primaryKey , restoreKeys ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
checkPaths ( paths ) ;
restoreKeys = restoreKeys || [ ] ;
const keys = [ primaryKey , ... restoreKeys ] ;
core . debug ( 'Resolved Keys:' ) ;
core . debug ( JSON . stringify ( keys ) ) ;
if ( keys . length > 10 ) {
throw new ValidationError ( ` Key Validation Error: Keys are limited to a maximum of 10. ` ) ;
}
for ( const key of keys ) {
checkKey ( key ) ;
}
const compressionMethod = yield utils . getCompressionMethod ( ) ;
// path are needed to compute version
const cacheEntry = yield cacheHttpClient . getCacheEntry ( keys , paths , {
compressionMethod
} ) ;
if ( ! ( cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry . archiveLocation ) ) {
// Cache not found
return undefined ;
}
const archivePath = path . join ( yield utils . createTempDirectory ( ) , utils . getCacheFileName ( compressionMethod ) ) ;
core . debug ( ` Archive Path: ${ archivePath } ` ) ;
try {
// Download the cache from the cache entry
yield cacheHttpClient . downloadCache ( cacheEntry . archiveLocation , archivePath ) ;
const archiveFileSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
core . info ( ` Cache Size: ~ ${ Math . round ( archiveFileSize / ( 1024 * 1024 ) ) } MB ( ${ archiveFileSize } B) ` ) ;
yield tar _1 . extractTar ( archivePath , compressionMethod ) ;
}
finally {
// Try to delete the archive to save space
try {
yield utils . unlinkFile ( archivePath ) ;
}
catch ( error ) {
core . debug ( ` Failed to delete archive: ${ error } ` ) ;
}
}
return cacheEntry . cacheKey ;
2020-02-13 12:38:56 -05:00
} ) ;
}
2020-05-14 17:27:38 -04:00
exports . restoreCache = restoreCache ;
/ * *
* Saves a list of files with the specified key
*
* @ param paths a list of file paths to be cached
* @ param key an explicit key for restoring the cache
* @ param options cache upload options
* @ returns number returns cacheId if the cache was saved successfully and throws an error if save fails
* /
function saveCache ( paths , key , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
checkPaths ( paths ) ;
checkKey ( key ) ;
const compressionMethod = yield utils . getCompressionMethod ( ) ;
core . debug ( 'Reserving Cache' ) ;
const cacheId = yield cacheHttpClient . reserveCache ( key , paths , {
compressionMethod
} ) ;
if ( cacheId === - 1 ) {
throw new ReserveCacheError ( ` Unable to reserve cache with key ${ key } , another job may be creating this cache. ` ) ;
}
core . debug ( ` Cache ID: ${ cacheId } ` ) ;
const cachePaths = yield utils . resolvePaths ( paths ) ;
core . debug ( 'Cache Paths:' ) ;
core . debug ( ` ${ JSON . stringify ( cachePaths ) } ` ) ;
const archiveFolder = yield utils . createTempDirectory ( ) ;
const archivePath = path . join ( archiveFolder , utils . getCacheFileName ( compressionMethod ) ) ;
core . debug ( ` Archive Path: ${ archivePath } ` ) ;
yield tar _1 . createTar ( archiveFolder , cachePaths , compressionMethod ) ;
const fileSizeLimit = 5 * 1024 * 1024 * 1024 ; // 5GB per repo limit
const archiveFileSize = utils . getArchiveFileSizeIsBytes ( archivePath ) ;
core . debug ( ` File Size: ${ archiveFileSize } ` ) ;
if ( archiveFileSize > fileSizeLimit ) {
throw new Error ( ` Cache size of ~ ${ Math . round ( archiveFileSize / ( 1024 * 1024 ) ) } MB ( ${ archiveFileSize } B) is over the 5GB limit, not saving cache. ` ) ;
}
core . debug ( ` Saving Cache (ID: ${ cacheId } ) ` ) ;
yield cacheHttpClient . saveCache ( cacheId , archivePath , options ) ;
return cacheId ;
} ) ;
2020-02-13 12:38:56 -05:00
}
2020-05-14 17:27:38 -04:00
exports . saveCache = saveCache ;
//# sourceMappingURL=cache.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 694 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
var Inputs ;
( function ( Inputs ) {
Inputs [ "Key" ] = "key" ;
Inputs [ "Path" ] = "path" ;
Inputs [ "RestoreKeys" ] = "restore-keys" ;
} ) ( Inputs = exports . Inputs || ( exports . Inputs = { } ) ) ;
var Outputs ;
( function ( Outputs ) {
Outputs [ "CacheHit" ] = "cache-hit" ;
} ) ( Outputs = exports . Outputs || ( exports . Outputs = { } ) ) ;
var State ;
( function ( State ) {
State [ "CacheKey" ] = "CACHE_KEY" ;
State [ "CacheResult" ] = "CACHE_RESULT" ;
} ) ( State = exports . State || ( exports . State = { } ) ) ;
var Events ;
( function ( Events ) {
Events [ "Key" ] = "GITHUB_EVENT_NAME" ;
Events [ "Push" ] = "push" ;
Events [ "PullRequest" ] = "pull_request" ;
} ) ( Events = exports . Events || ( exports . Events = { } ) ) ;
2020-04-17 15:46:46 -04:00
exports . RefKey = "GITHUB_REF" ;
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 722 :
/***/ ( function ( module ) {
/ * *
* Convert array of 16 byte values to UUID string format of the form :
* XXXXXXXX - XXXX - XXXX - XXXX - XXXXXXXXXXXX
* /
var byteToHex = [ ] ;
for ( var i = 0 ; i < 256 ; ++ i ) {
byteToHex [ i ] = ( i + 0x100 ) . toString ( 16 ) . substr ( 1 ) ;
}
function bytesToUuid ( buf , offset ) {
var i = offset || 0 ;
var bth = byteToHex ;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
return ( [ bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] , '-' ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ,
bth [ buf [ i ++ ] ] , bth [ buf [ i ++ ] ] ] ) . join ( '' ) ;
}
module . exports = bytesToUuid ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 728 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
class SearchState {
constructor ( path , level ) {
this . path = path ;
this . level = level ;
}
}
exports . SearchState = SearchState ;
//# sourceMappingURL=internal-search-state.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 734 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
function escapeData ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
2020-02-13 12:38:56 -05:00
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 778 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-14 17:27:38 -04:00
const cache = _ _importStar ( _ _webpack _require _ _ ( 692 ) ) ;
2020-02-13 12:38:56 -05:00
const core = _ _importStar ( _ _webpack _require _ _ ( 470 ) ) ;
const constants _1 = _ _webpack _require _ _ ( 694 ) ;
const utils = _ _importStar ( _ _webpack _require _ _ ( 443 ) ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
// Validate inputs, this can cause task failure
if ( ! utils . isValidEvent ( ) ) {
2020-04-17 15:46:46 -04:00
utils . logWarning ( ` Event Validation Error: The event type ${ process . env [ constants _1 . Events . Key ] } is not supported because it's not tied to a branch or tag ref. ` ) ;
2020-02-13 12:38:56 -05:00
return ;
}
const primaryKey = core . getInput ( constants _1 . Inputs . Key , { required : true } ) ;
core . saveState ( constants _1 . State . CacheKey , primaryKey ) ;
const restoreKeys = core
. getInput ( constants _1 . Inputs . RestoreKeys )
. split ( "\n" )
. filter ( x => x !== "" ) ;
2020-05-14 17:27:38 -04:00
const cachePaths = core
. getInput ( constants _1 . Inputs . Path , { required : true } )
. split ( "\n" )
. filter ( x => x !== "" ) ;
2020-02-13 12:38:56 -05:00
try {
2020-05-14 17:27:38 -04:00
const cacheKey = yield cache . restoreCache ( cachePaths , primaryKey , restoreKeys ) ;
if ( ! cacheKey ) {
core . info ( ` Cache not found for input keys: ${ [
primaryKey ,
... restoreKeys
] . join ( ", " ) } ` );
2020-02-13 12:38:56 -05:00
return ;
}
// Store the cache result
2020-05-14 17:27:38 -04:00
utils . setCacheState ( cacheKey ) ;
const isExactKeyMatch = utils . isExactKeyMatch ( primaryKey , cacheKey ) ;
2020-02-13 12:38:56 -05:00
utils . setCacheHitOutput ( isExactKeyMatch ) ;
2020-05-14 17:27:38 -04:00
core . info ( ` Cache restored from key: ${ cacheKey } ` ) ;
2020-02-13 12:38:56 -05:00
}
catch ( error ) {
2020-05-14 17:27:38 -04:00
if ( error . name === cache . ValidationError . name ) {
throw error ;
}
else {
utils . logWarning ( error . message ) ;
utils . setCacheHitOutput ( false ) ;
}
2020-02-13 12:38:56 -05:00
}
}
catch ( error ) {
core . setFailed ( error . message ) ;
}
} ) ;
}
run ( ) ;
exports . default = run ;
2020-04-29 09:31:53 -04:00
/***/ } ) ,
/***/ 794 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 826 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var rng = _ _webpack _require _ _ ( 139 ) ;
var bytesToUuid = _ _webpack _require _ _ ( 722 ) ;
function v4 ( options , buf , offset ) {
var i = buf && offset || 0 ;
if ( typeof ( options ) == 'string' ) {
buf = options === 'binary' ? new Array ( 16 ) : null ;
options = null ;
}
options = options || { } ;
var rnds = options . random || ( options . rng || rng ) ( ) ;
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds [ 6 ] = ( rnds [ 6 ] & 0x0f ) | 0x40 ;
rnds [ 8 ] = ( rnds [ 8 ] & 0x3f ) | 0x80 ;
// Copy bytes to buffer, if provided
if ( buf ) {
for ( var ii = 0 ; ii < 16 ; ++ ii ) {
buf [ i + ii ] = rnds [ ii ] ;
}
}
return buf || bytesToUuid ( rnds ) ;
}
module . exports = v4 ;
/***/ } ) ,
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
2020-03-20 16:02:11 -04:00
/***/ 896 :
/***/ ( function ( module ) {
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
2020-05-14 17:27:38 -04:00
/***/ } ) ,
/***/ 898 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var v1 = _ _webpack _require _ _ ( 86 ) ;
var v4 = _ _webpack _require _ _ ( 826 ) ;
var uuid = v4 ;
uuid . v1 = v1 ;
uuid . v4 = v4 ;
module . exports = uuid ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 923 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const pathHelper = _ _webpack _require _ _ ( 972 ) ;
const minimatch _1 = _ _webpack _require _ _ ( 93 ) ;
const internal _match _kind _1 = _ _webpack _require _ _ ( 327 ) ;
const internal _path _1 = _ _webpack _require _ _ ( 383 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
class Pattern {
constructor ( patternOrNegate , segments ) {
/ * *
* Indicates whether matches should be excluded from the result set
* /
this . negate = false ;
// Pattern overload
let pattern ;
if ( typeof patternOrNegate === 'string' ) {
pattern = patternOrNegate . trim ( ) ;
}
// Segments overload
else {
// Convert to pattern
segments = segments || [ ] ;
assert ( segments . length , ` Parameter 'segments' must not empty ` ) ;
const root = Pattern . getLiteral ( segments [ 0 ] ) ;
assert ( root && pathHelper . hasAbsoluteRoot ( root ) , ` Parameter 'segments' first element must be a root path ` ) ;
pattern = new internal _path _1 . Path ( segments ) . toString ( ) . trim ( ) ;
if ( patternOrNegate ) {
pattern = ` ! ${ pattern } ` ;
}
}
// Negate
while ( pattern . startsWith ( '!' ) ) {
this . negate = ! this . negate ;
pattern = pattern . substr ( 1 ) . trim ( ) ;
}
// Normalize slashes and ensures absolute root
pattern = Pattern . fixupPattern ( pattern ) ;
// Segments
this . segments = new internal _path _1 . Path ( pattern ) . segments ;
// Trailing slash indicates the pattern should only match directories, not regular files
this . trailingSeparator = pathHelper
. normalizeSeparators ( pattern )
. endsWith ( path . sep ) ;
pattern = pathHelper . safeTrimTrailingSeparator ( pattern ) ;
// Search path (literal path prior to the first glob segment)
let foundGlob = false ;
const searchSegments = this . segments
. map ( x => Pattern . getLiteral ( x ) )
. filter ( x => ! foundGlob && ! ( foundGlob = x === '' ) ) ;
this . searchPath = new internal _path _1 . Path ( searchSegments ) . toString ( ) ;
// Root RegExp (required when determining partial match)
this . rootRegExp = new RegExp ( Pattern . regExpEscape ( searchSegments [ 0 ] ) , IS _WINDOWS ? 'i' : '' ) ;
// Create minimatch
const minimatchOptions = {
dot : true ,
nobrace : true ,
nocase : IS _WINDOWS ,
nocomment : true ,
noext : true ,
nonegate : true
} ;
pattern = IS _WINDOWS ? pattern . replace ( /\\/g , '/' ) : pattern ;
this . minimatch = new minimatch _1 . Minimatch ( pattern , minimatchOptions ) ;
}
/ * *
* Matches the pattern against the specified path
* /
match ( itemPath ) {
// Last segment is globstar?
if ( this . segments [ this . segments . length - 1 ] === '**' ) {
// Normalize slashes
itemPath = pathHelper . normalizeSeparators ( itemPath ) ;
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
if ( ! itemPath . endsWith ( path . sep ) ) {
// Note, this is safe because the constructor ensures the pattern has an absolute root.
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
itemPath = ` ${ itemPath } ${ path . sep } ` ;
}
}
else {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
}
// Match
if ( this . minimatch . match ( itemPath ) ) {
return this . trailingSeparator ? internal _match _kind _1 . MatchKind . Directory : internal _match _kind _1 . MatchKind . All ;
}
return internal _match _kind _1 . MatchKind . None ;
}
/ * *
* Indicates whether the pattern may match descendants of the specified path
* /
partialMatch ( itemPath ) {
// Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper . safeTrimTrailingSeparator ( itemPath ) ;
// matchOne does not handle root path correctly
if ( pathHelper . dirname ( itemPath ) === itemPath ) {
return this . rootRegExp . test ( itemPath ) ;
}
return this . minimatch . matchOne ( itemPath . split ( IS _WINDOWS ? /\\+/ : /\/+/ ) , this . minimatch . set [ 0 ] , true ) ;
}
/ * *
* Escapes glob patterns within a path
* /
static globEscape ( s ) {
return ( IS _WINDOWS ? s : s . replace ( /\\/g , '\\\\' ) ) // escape '\' on Linux/macOS
. replace ( /(\[)(?=[^/]+\])/g , '[[]' ) // escape '[' when ']' follows within the path segment
. replace ( /\?/g , '[?]' ) // escape '?'
. replace ( /\*/g , '[*]' ) ; // escape '*'
}
/ * *
* Normalizes slashes and ensures absolute root
* /
static fixupPattern ( pattern ) {
// Empty
assert ( pattern , 'pattern cannot be empty' ) ;
// Must not contain `.` segment, unless first segment
// Must not contain `..` segment
const literalSegments = new internal _path _1 . Path ( pattern ) . segments . map ( x => Pattern . getLiteral ( x ) ) ;
assert ( literalSegments . every ( ( x , i ) => ( x !== '.' || i === 0 ) && x !== '..' ) , ` Invalid pattern ' ${ pattern } '. Relative pathing '.' and '..' is not allowed. ` ) ;
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
assert ( ! pathHelper . hasRoot ( pattern ) || literalSegments [ 0 ] , ` Invalid pattern ' ${ pattern } '. Root segment must not contain globs. ` ) ;
// Normalize slashes
pattern = pathHelper . normalizeSeparators ( pattern ) ;
// Replace leading `.` segment
if ( pattern === '.' || pattern . startsWith ( ` . ${ path . sep } ` ) ) {
pattern = Pattern . globEscape ( process . cwd ( ) ) + pattern . substr ( 1 ) ;
}
// Replace leading `~` segment
else if ( pattern === '~' || pattern . startsWith ( ` ~ ${ path . sep } ` ) ) {
const homedir = os . homedir ( ) ;
assert ( homedir , 'Unable to determine HOME directory' ) ;
assert ( pathHelper . hasAbsoluteRoot ( homedir ) , ` Expected HOME directory to be a rooted path. Actual ' ${ homedir } ' ` ) ;
pattern = Pattern . globEscape ( homedir ) + pattern . substr ( 1 ) ;
}
// Replace relative drive root, e.g. pattern is C: or C:foo
else if ( IS _WINDOWS &&
( pattern . match ( /^[A-Z]:$/i ) || pattern . match ( /^[A-Z]:[^\\]/i ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , pattern . substr ( 0 , 2 ) ) ;
if ( pattern . length > 2 && ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 2 ) ;
}
// Replace relative root, e.g. pattern is \ or \foo
else if ( IS _WINDOWS && ( pattern === '\\' || pattern . match ( /^\\[^\\]/ ) ) ) {
let root = pathHelper . ensureAbsoluteRoot ( 'C:\\dummy-root' , '\\' ) ;
if ( ! root . endsWith ( '\\' ) ) {
root += '\\' ;
}
pattern = Pattern . globEscape ( root ) + pattern . substr ( 1 ) ;
}
// Otherwise ensure absolute root
else {
pattern = pathHelper . ensureAbsoluteRoot ( Pattern . globEscape ( process . cwd ( ) ) , pattern ) ;
}
return pathHelper . normalizeSeparators ( pattern ) ;
}
/ * *
* Attempts to unescape a pattern segment to create a literal path segment .
* Otherwise returns empty string .
* /
static getLiteral ( segment ) {
let literal = '' ;
for ( let i = 0 ; i < segment . length ; i ++ ) {
const c = segment [ i ] ;
// Escape
if ( c === '\\' && ! IS _WINDOWS && i + 1 < segment . length ) {
literal += segment [ ++ i ] ;
continue ;
}
// Wildcard
else if ( c === '*' || c === '?' ) {
return '' ;
}
// Character set
else if ( c === '[' && i + 1 < segment . length ) {
let set = '' ;
let closed = - 1 ;
for ( let i2 = i + 1 ; i2 < segment . length ; i2 ++ ) {
const c2 = segment [ i2 ] ;
// Escape
if ( c2 === '\\' && ! IS _WINDOWS && i2 + 1 < segment . length ) {
set += segment [ ++ i2 ] ;
continue ;
}
// Closed
else if ( c2 === ']' ) {
closed = i2 ;
break ;
}
// Otherwise
else {
set += c2 ;
}
}
// Closed?
if ( closed >= 0 ) {
// Cannot convert
if ( set . length > 1 ) {
return '' ;
}
// Convert to literal
if ( set ) {
literal += set ;
i = closed ;
continue ;
}
}
// Otherwise fall thru
}
// Append
literal += c ;
}
return literal ;
}
/ * *
* Escapes regexp special characters
* https : //javascript.info/regexp-escaping
* /
static regExpEscape ( s ) {
return s . replace ( /[[\\^$.|?*+()]/g , '\\$&' ) ;
}
}
exports . Pattern = Pattern ;
//# sourceMappingURL=internal-pattern.js.map
/***/ } ) ,
2020-05-14 17:27:38 -04:00
/***/ 931 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-02-13 12:38:56 -05:00
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-05-14 17:27:38 -04:00
var CacheFilename ;
( function ( CacheFilename ) {
CacheFilename [ "Gzip" ] = "cache.tgz" ;
CacheFilename [ "Zstd" ] = "cache.tzst" ;
} ) ( CacheFilename = exports . CacheFilename || ( exports . CacheFilename = { } ) ) ;
var CompressionMethod ;
( function ( CompressionMethod ) {
CompressionMethod [ "Gzip" ] = "gzip" ;
CompressionMethod [ "Zstd" ] = "zstd" ;
} ) ( CompressionMethod = exports . CompressionMethod || ( exports . CompressionMethod = { } ) ) ;
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports . SocketTimeout = 5000 ;
//# sourceMappingURL=constants.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 950 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
2020-05-11 10:49:48 -04:00
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
2020-02-13 12:38:56 -05:00
}
else {
2020-05-11 10:49:48 -04:00
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
2020-02-13 12:38:56 -05:00
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
2020-05-11 10:49:48 -04:00
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
2020-02-13 12:38:56 -05:00
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
2020-05-11 10:49:48 -04:00
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
2020-02-13 12:38:56 -05:00
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
2020-03-20 16:02:11 -04:00
/***/ } ) ,
/***/ 972 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert = _ _webpack _require _ _ ( 357 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const IS _WINDOWS = process . platform === 'win32' ;
/ * *
* Similar to path . dirname except normalizes the path separators and slightly better handling for Windows UNC paths .
*
* For example , on Linux / macOS :
* - ` / => / `
* - ` /hello => / `
*
* For example , on Windows :
* - ` C: \ => C: \`
* - ` C: \h ello => C: \`
* - ` C: => C: `
* - ` C:hello => C: `
* - ` \ => \`
* - ` \h ello => \`
* - ` \\ hello => \\ hello `
* - ` \\ hello \w orld => \\ hello \w orld `
* /
function dirname ( p ) {
// Normalize slashes and trim unnecessary trailing slash
p = safeTrimTrailingSeparator ( p ) ;
// Windows UNC root, e.g. \\hello or \\hello\world
if ( IS _WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/ . test ( p ) ) {
return p ;
}
// Get dirname
let result = path . dirname ( p ) ;
// Trim trailing slash for Windows UNC root, e.g. \\hello\world\
if ( IS _WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/ . test ( result ) ) {
result = safeTrimTrailingSeparator ( result ) ;
}
return result ;
}
exports . dirname = dirname ;
/ * *
* Roots the path if not already rooted . On Windows , relative roots like ` \`
* or ` C: ` are expanded based on the current working directory .
* /
function ensureAbsoluteRoot ( root , itemPath ) {
assert ( root , ` ensureAbsoluteRoot parameter 'root' must not be empty ` ) ;
assert ( itemPath , ` ensureAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Already rooted
if ( hasAbsoluteRoot ( itemPath ) ) {
return itemPath ;
}
// Windows
if ( IS _WINDOWS ) {
// Check for itemPath like C: or C:foo
if ( itemPath . match ( /^[A-Z]:[^\\/]|^[A-Z]:$/i ) ) {
let cwd = process . cwd ( ) ;
assert ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
// Drive letter matches cwd? Expand to cwd
if ( itemPath [ 0 ] . toUpperCase ( ) === cwd [ 0 ] . toUpperCase ( ) ) {
// Drive only, e.g. C:
if ( itemPath . length === 2 ) {
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ` ;
}
// Drive + path, e.g. C:foo
else {
if ( ! cwd . endsWith ( '\\' ) ) {
cwd += '\\' ;
}
// Preserve specified drive letter case (upper or lower)
return ` ${ itemPath [ 0 ] } : \\ ${ cwd . substr ( 3 ) } ${ itemPath . substr ( 2 ) } ` ;
}
}
// Different drive
else {
return ` ${ itemPath [ 0 ] } : \\ ${ itemPath . substr ( 2 ) } ` ;
}
}
// Check for itemPath like \ or \foo
else if ( normalizeSeparators ( itemPath ) . match ( /^\\$|^\\[^\\]/ ) ) {
const cwd = process . cwd ( ) ;
assert ( cwd . match ( /^[A-Z]:\\/i ) , ` Expected current directory to start with an absolute drive root. Actual ' ${ cwd } ' ` ) ;
return ` ${ cwd [ 0 ] } : \\ ${ itemPath . substr ( 1 ) } ` ;
}
}
assert ( hasAbsoluteRoot ( root ) , ` ensureAbsoluteRoot parameter 'root' must have an absolute root ` ) ;
// Otherwise ensure root ends with a separator
if ( root . endsWith ( '/' ) || ( IS _WINDOWS && root . endsWith ( '\\' ) ) ) {
// Intentionally empty
}
else {
// Append separator
root += path . sep ;
}
return root + itemPath ;
}
exports . ensureAbsoluteRoot = ensureAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \\ hello \s hare ` and ` C: \h ello ` ( and using alternate separator ) .
* /
function hasAbsoluteRoot ( itemPath ) {
assert ( itemPath , ` hasAbsoluteRoot parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \\hello\share or C:\hello
return itemPath . startsWith ( '\\\\' ) || /^[A-Z]:\\/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasAbsoluteRoot = hasAbsoluteRoot ;
/ * *
* On Linux / macOS , true if path starts with ` / ` . On Windows , true for paths like :
* ` \` , ` \ hello ` , ` \ \ hello \ share ` , ` C : ` , and ` C : \ hello ` (and using alternate separator).
* /
function hasRoot ( itemPath ) {
assert ( itemPath , ` isRooted parameter 'itemPath' must not be empty ` ) ;
// Normalize separators
itemPath = normalizeSeparators ( itemPath ) ;
// Windows
if ( IS _WINDOWS ) {
// E.g. \ or \hello or \\hello
// E.g. C: or C:\hello
return itemPath . startsWith ( '\\' ) || /^[A-Z]:/i . test ( itemPath ) ;
}
// E.g. /hello
return itemPath . startsWith ( '/' ) ;
}
exports . hasRoot = hasRoot ;
/ * *
* Removes redundant slashes and converts ` / ` to ` \` on Windows
* /
function normalizeSeparators ( p ) {
p = p || '' ;
// Windows
if ( IS _WINDOWS ) {
// Convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// Remove redundant slashes
const isUnc = /^\\\\+[^\\]/ . test ( p ) ; // e.g. \\hello
return ( isUnc ? '\\' : '' ) + p . replace ( /\\\\+/g , '\\' ) ; // preserve leading \\ for UNC
}
// Remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
}
exports . normalizeSeparators = normalizeSeparators ;
/ * *
* Normalizes the path separators and trims the trailing separator ( when safe ) .
* For example , ` /foo/ => /foo ` but ` / => / `
* /
function safeTrimTrailingSeparator ( p ) {
// Short-circuit if empty
if ( ! p ) {
return '' ;
}
// Normalize separators
p = normalizeSeparators ( p ) ;
// No trailing slash
if ( ! p . endsWith ( path . sep ) ) {
return p ;
}
// Check '/' on Linux/macOS and '\' on Windows
if ( p === path . sep ) {
return p ;
}
// On Windows check if drive root. E.g. C:\
if ( IS _WINDOWS && /^[A-Z]:\\$/i . test ( p ) ) {
return p ;
}
// Otherwise trim trailing slash
return p . substr ( 0 , p . length - 1 ) ;
}
exports . safeTrimTrailingSeparator = safeTrimTrailingSeparator ;
//# sourceMappingURL=internal-path-helper.js.map
2020-02-13 12:38:56 -05:00
/***/ } ) ,
/***/ 986 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const tr = _ _webpack _require _ _ ( 9 ) ;
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
//# sourceMappingURL=exec.js.map
/***/ } )
/******/ } ) ;