mirror of
https://github.com/actions/setup-java.git
synced 2024-12-02 21:30:58 -05:00
Merge branch 'actions:main' into main
This commit is contained in:
commit
3653181378
6 changed files with 525 additions and 94 deletions
BIN
.licenses/npm/undici.dep.yml
generated
BIN
.licenses/npm/undici.dep.yml
generated
Binary file not shown.
|
@ -115,7 +115,8 @@ describe('findPackageForDownload', () => {
|
|||
const expectedUrl = `https://download.oracle.com/java/18/archive/jdk-18_${osType}-${distroArch}_bin.${archiveType}`;
|
||||
|
||||
expect(result.url).toBe(expectedUrl);
|
||||
}
|
||||
},
|
||||
10000
|
||||
);
|
||||
|
||||
it('should throw an error', async () => {
|
||||
|
|
297
dist/cleanup/index.js
vendored
297
dist/cleanup/index.js
vendored
|
@ -66943,6 +66943,132 @@ function onConnectTimeout (socket) {
|
|||
module.exports = buildConnector
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4462:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
/** @type {Record<string, string | undefined>} */
|
||||
const headerNameLowerCasedRecord = {}
|
||||
|
||||
// https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||
const wellknownHeaderNames = [
|
||||
'Accept',
|
||||
'Accept-Encoding',
|
||||
'Accept-Language',
|
||||
'Accept-Ranges',
|
||||
'Access-Control-Allow-Credentials',
|
||||
'Access-Control-Allow-Headers',
|
||||
'Access-Control-Allow-Methods',
|
||||
'Access-Control-Allow-Origin',
|
||||
'Access-Control-Expose-Headers',
|
||||
'Access-Control-Max-Age',
|
||||
'Access-Control-Request-Headers',
|
||||
'Access-Control-Request-Method',
|
||||
'Age',
|
||||
'Allow',
|
||||
'Alt-Svc',
|
||||
'Alt-Used',
|
||||
'Authorization',
|
||||
'Cache-Control',
|
||||
'Clear-Site-Data',
|
||||
'Connection',
|
||||
'Content-Disposition',
|
||||
'Content-Encoding',
|
||||
'Content-Language',
|
||||
'Content-Length',
|
||||
'Content-Location',
|
||||
'Content-Range',
|
||||
'Content-Security-Policy',
|
||||
'Content-Security-Policy-Report-Only',
|
||||
'Content-Type',
|
||||
'Cookie',
|
||||
'Cross-Origin-Embedder-Policy',
|
||||
'Cross-Origin-Opener-Policy',
|
||||
'Cross-Origin-Resource-Policy',
|
||||
'Date',
|
||||
'Device-Memory',
|
||||
'Downlink',
|
||||
'ECT',
|
||||
'ETag',
|
||||
'Expect',
|
||||
'Expect-CT',
|
||||
'Expires',
|
||||
'Forwarded',
|
||||
'From',
|
||||
'Host',
|
||||
'If-Match',
|
||||
'If-Modified-Since',
|
||||
'If-None-Match',
|
||||
'If-Range',
|
||||
'If-Unmodified-Since',
|
||||
'Keep-Alive',
|
||||
'Last-Modified',
|
||||
'Link',
|
||||
'Location',
|
||||
'Max-Forwards',
|
||||
'Origin',
|
||||
'Permissions-Policy',
|
||||
'Pragma',
|
||||
'Proxy-Authenticate',
|
||||
'Proxy-Authorization',
|
||||
'RTT',
|
||||
'Range',
|
||||
'Referer',
|
||||
'Referrer-Policy',
|
||||
'Refresh',
|
||||
'Retry-After',
|
||||
'Sec-WebSocket-Accept',
|
||||
'Sec-WebSocket-Extensions',
|
||||
'Sec-WebSocket-Key',
|
||||
'Sec-WebSocket-Protocol',
|
||||
'Sec-WebSocket-Version',
|
||||
'Server',
|
||||
'Server-Timing',
|
||||
'Service-Worker-Allowed',
|
||||
'Service-Worker-Navigation-Preload',
|
||||
'Set-Cookie',
|
||||
'SourceMap',
|
||||
'Strict-Transport-Security',
|
||||
'Supports-Loading-Mode',
|
||||
'TE',
|
||||
'Timing-Allow-Origin',
|
||||
'Trailer',
|
||||
'Transfer-Encoding',
|
||||
'Upgrade',
|
||||
'Upgrade-Insecure-Requests',
|
||||
'User-Agent',
|
||||
'Vary',
|
||||
'Via',
|
||||
'WWW-Authenticate',
|
||||
'X-Content-Type-Options',
|
||||
'X-DNS-Prefetch-Control',
|
||||
'X-Frame-Options',
|
||||
'X-Permitted-Cross-Domain-Policies',
|
||||
'X-Powered-By',
|
||||
'X-Requested-With',
|
||||
'X-XSS-Protection'
|
||||
]
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = wellknownHeaderNames[i]
|
||||
const lowerCasedKey = key.toLowerCase()
|
||||
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
|
||||
lowerCasedKey
|
||||
}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||
|
||||
module.exports = {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8045:
|
||||
|
@ -67775,6 +67901,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045)
|
|||
const { Blob } = __nccwpck_require__(4300)
|
||||
const nodeUtil = __nccwpck_require__(3837)
|
||||
const { stringify } = __nccwpck_require__(3477)
|
||||
const { headerNameLowerCasedRecord } = __nccwpck_require__(4462)
|
||||
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||
|
||||
|
@ -67984,6 +68111,15 @@ function parseKeepAliveTimeout (val) {
|
|||
return m ? parseInt(m[1], 10) * 1000 : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a header name and returns its lowercase value.
|
||||
* @param {string | Buffer} value Header name
|
||||
* @returns {string}
|
||||
*/
|
||||
function headerNameToString (value) {
|
||||
return headerNameLowerCasedRecord[value] || value.toLowerCase()
|
||||
}
|
||||
|
||||
function parseHeaders (headers, obj = {}) {
|
||||
// For H2 support
|
||||
if (!Array.isArray(headers)) return headers
|
||||
|
@ -68255,6 +68391,7 @@ module.exports = {
|
|||
isIterable,
|
||||
isAsyncIterable,
|
||||
isDestroyed,
|
||||
headerNameToString,
|
||||
parseRawHeaders,
|
||||
parseHeaders,
|
||||
parseKeepAliveTimeout,
|
||||
|
@ -74902,14 +75039,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983
|
|||
const assert = __nccwpck_require__(9491)
|
||||
const { isUint8Array } = __nccwpck_require__(9830)
|
||||
|
||||
let supportedHashes = []
|
||||
|
||||
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
||||
/** @type {import('crypto')|undefined} */
|
||||
let crypto
|
||||
|
||||
try {
|
||||
crypto = __nccwpck_require__(6113)
|
||||
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
||||
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
||||
/* c8 ignore next 3 */
|
||||
} catch {
|
||||
|
||||
}
|
||||
|
||||
function responseURL (response) {
|
||||
|
@ -75437,66 +75578,56 @@ function bytesMatch (bytes, metadataList) {
|
|||
return true
|
||||
}
|
||||
|
||||
// 3. If parsedMetadata is the empty set, return true.
|
||||
// 3. If response is not eligible for integrity validation, return false.
|
||||
// TODO
|
||||
|
||||
// 4. If parsedMetadata is the empty set, return true.
|
||||
if (parsedMetadata.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
// 4. Let metadata be the result of getting the strongest
|
||||
// 5. Let metadata be the result of getting the strongest
|
||||
// metadata from parsedMetadata.
|
||||
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
|
||||
// get the strongest algorithm
|
||||
const strongest = list[0].algo
|
||||
// get all entries that use the strongest algorithm; ignore weaker
|
||||
const metadata = list.filter((item) => item.algo === strongest)
|
||||
const strongest = getStrongestMetadata(parsedMetadata)
|
||||
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
||||
|
||||
// 5. For each item in metadata:
|
||||
// 6. For each item in metadata:
|
||||
for (const item of metadata) {
|
||||
// 1. Let algorithm be the alg component of item.
|
||||
const algorithm = item.algo
|
||||
|
||||
// 2. Let expectedValue be the val component of item.
|
||||
let expectedValue = item.hash
|
||||
const expectedValue = item.hash
|
||||
|
||||
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
||||
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
||||
|
||||
if (expectedValue.endsWith('==')) {
|
||||
expectedValue = expectedValue.slice(0, -2)
|
||||
}
|
||||
|
||||
// 3. Let actualValue be the result of applying algorithm to bytes.
|
||||
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
||||
|
||||
if (actualValue.endsWith('==')) {
|
||||
actualValue = actualValue.slice(0, -2)
|
||||
if (actualValue[actualValue.length - 1] === '=') {
|
||||
if (actualValue[actualValue.length - 2] === '=') {
|
||||
actualValue = actualValue.slice(0, -2)
|
||||
} else {
|
||||
actualValue = actualValue.slice(0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
// 4. If actualValue is a case-sensitive match for expectedValue,
|
||||
// return true.
|
||||
if (actualValue === expectedValue) {
|
||||
return true
|
||||
}
|
||||
|
||||
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
|
||||
|
||||
if (actualBase64URL.endsWith('==')) {
|
||||
actualBase64URL = actualBase64URL.slice(0, -2)
|
||||
}
|
||||
|
||||
if (actualBase64URL === expectedValue) {
|
||||
if (compareBase64Mixed(actualValue, expectedValue)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Return false.
|
||||
// 7. Return false.
|
||||
return false
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
||||
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
||||
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
||||
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
|
||||
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
||||
|
@ -75510,8 +75641,6 @@ function parseMetadata (metadata) {
|
|||
// 2. Let empty be equal to true.
|
||||
let empty = true
|
||||
|
||||
const supportedHashes = crypto.getHashes()
|
||||
|
||||
// 3. For each token returned by splitting metadata on spaces:
|
||||
for (const token of metadata.split(' ')) {
|
||||
// 1. Set empty to false.
|
||||
|
@ -75521,7 +75650,11 @@ function parseMetadata (metadata) {
|
|||
const parsedToken = parseHashWithOptions.exec(token)
|
||||
|
||||
// 3. If token does not parse, continue to the next token.
|
||||
if (parsedToken === null || parsedToken.groups === undefined) {
|
||||
if (
|
||||
parsedToken === null ||
|
||||
parsedToken.groups === undefined ||
|
||||
parsedToken.groups.algo === undefined
|
||||
) {
|
||||
// Note: Chromium blocks the request at this point, but Firefox
|
||||
// gives a warning that an invalid integrity was given. The
|
||||
// correct behavior is to ignore these, and subsequently not
|
||||
|
@ -75530,11 +75663,11 @@ function parseMetadata (metadata) {
|
|||
}
|
||||
|
||||
// 4. Let algorithm be the hash-algo component of token.
|
||||
const algorithm = parsedToken.groups.algo
|
||||
const algorithm = parsedToken.groups.algo.toLowerCase()
|
||||
|
||||
// 5. If algorithm is a hash function recognized by the user
|
||||
// agent, add the parsed token to result.
|
||||
if (supportedHashes.includes(algorithm.toLowerCase())) {
|
||||
if (supportedHashes.includes(algorithm)) {
|
||||
result.push(parsedToken.groups)
|
||||
}
|
||||
}
|
||||
|
@ -75547,6 +75680,82 @@ function parseMetadata (metadata) {
|
|||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
||||
*/
|
||||
function getStrongestMetadata (metadataList) {
|
||||
// Let algorithm be the algo component of the first item in metadataList.
|
||||
// Can be sha256
|
||||
let algorithm = metadataList[0].algo
|
||||
// If the algorithm is sha512, then it is the strongest
|
||||
// and we can return immediately
|
||||
if (algorithm[3] === '5') {
|
||||
return algorithm
|
||||
}
|
||||
|
||||
for (let i = 1; i < metadataList.length; ++i) {
|
||||
const metadata = metadataList[i]
|
||||
// If the algorithm is sha512, then it is the strongest
|
||||
// and we can break the loop immediately
|
||||
if (metadata.algo[3] === '5') {
|
||||
algorithm = 'sha512'
|
||||
break
|
||||
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
||||
} else if (algorithm[3] === '3') {
|
||||
continue
|
||||
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
||||
// the strongest
|
||||
} else if (metadata.algo[3] === '3') {
|
||||
algorithm = 'sha384'
|
||||
}
|
||||
}
|
||||
return algorithm
|
||||
}
|
||||
|
||||
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
||||
if (metadataList.length === 1) {
|
||||
return metadataList
|
||||
}
|
||||
|
||||
let pos = 0
|
||||
for (let i = 0; i < metadataList.length; ++i) {
|
||||
if (metadataList[i].algo === algorithm) {
|
||||
metadataList[pos++] = metadataList[i]
|
||||
}
|
||||
}
|
||||
|
||||
metadataList.length = pos
|
||||
|
||||
return metadataList
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two base64 strings, allowing for base64url
|
||||
* in the second string.
|
||||
*
|
||||
* @param {string} actualValue always base64
|
||||
* @param {string} expectedValue base64 or base64url
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function compareBase64Mixed (actualValue, expectedValue) {
|
||||
if (actualValue.length !== expectedValue.length) {
|
||||
return false
|
||||
}
|
||||
for (let i = 0; i < actualValue.length; ++i) {
|
||||
if (actualValue[i] !== expectedValue[i]) {
|
||||
if (
|
||||
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
||||
(actualValue[i] === '/' && expectedValue[i] === '_')
|
||||
) {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
||||
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
||||
// TODO
|
||||
|
@ -75962,7 +76171,8 @@ module.exports = {
|
|||
urlHasHttpsScheme,
|
||||
urlIsHttpHttpsScheme,
|
||||
readAllBytes,
|
||||
normalizeMethodRecord
|
||||
normalizeMethodRecord,
|
||||
parseMetadata
|
||||
}
|
||||
|
||||
|
||||
|
@ -78049,12 +78259,17 @@ function parseLocation (statusCode, headers) {
|
|||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
||||
return (
|
||||
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
|
||||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
|
||||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
|
||||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
|
||||
)
|
||||
if (header.length === 4) {
|
||||
return util.headerNameToString(header) === 'host'
|
||||
}
|
||||
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
||||
return true
|
||||
}
|
||||
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
||||
const name = util.headerNameToString(header)
|
||||
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
|
|
297
dist/setup/index.js
vendored
297
dist/setup/index.js
vendored
|
@ -91797,6 +91797,132 @@ function onConnectTimeout (socket) {
|
|||
module.exports = buildConnector
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 14462:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
/** @type {Record<string, string | undefined>} */
|
||||
const headerNameLowerCasedRecord = {}
|
||||
|
||||
// https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||
const wellknownHeaderNames = [
|
||||
'Accept',
|
||||
'Accept-Encoding',
|
||||
'Accept-Language',
|
||||
'Accept-Ranges',
|
||||
'Access-Control-Allow-Credentials',
|
||||
'Access-Control-Allow-Headers',
|
||||
'Access-Control-Allow-Methods',
|
||||
'Access-Control-Allow-Origin',
|
||||
'Access-Control-Expose-Headers',
|
||||
'Access-Control-Max-Age',
|
||||
'Access-Control-Request-Headers',
|
||||
'Access-Control-Request-Method',
|
||||
'Age',
|
||||
'Allow',
|
||||
'Alt-Svc',
|
||||
'Alt-Used',
|
||||
'Authorization',
|
||||
'Cache-Control',
|
||||
'Clear-Site-Data',
|
||||
'Connection',
|
||||
'Content-Disposition',
|
||||
'Content-Encoding',
|
||||
'Content-Language',
|
||||
'Content-Length',
|
||||
'Content-Location',
|
||||
'Content-Range',
|
||||
'Content-Security-Policy',
|
||||
'Content-Security-Policy-Report-Only',
|
||||
'Content-Type',
|
||||
'Cookie',
|
||||
'Cross-Origin-Embedder-Policy',
|
||||
'Cross-Origin-Opener-Policy',
|
||||
'Cross-Origin-Resource-Policy',
|
||||
'Date',
|
||||
'Device-Memory',
|
||||
'Downlink',
|
||||
'ECT',
|
||||
'ETag',
|
||||
'Expect',
|
||||
'Expect-CT',
|
||||
'Expires',
|
||||
'Forwarded',
|
||||
'From',
|
||||
'Host',
|
||||
'If-Match',
|
||||
'If-Modified-Since',
|
||||
'If-None-Match',
|
||||
'If-Range',
|
||||
'If-Unmodified-Since',
|
||||
'Keep-Alive',
|
||||
'Last-Modified',
|
||||
'Link',
|
||||
'Location',
|
||||
'Max-Forwards',
|
||||
'Origin',
|
||||
'Permissions-Policy',
|
||||
'Pragma',
|
||||
'Proxy-Authenticate',
|
||||
'Proxy-Authorization',
|
||||
'RTT',
|
||||
'Range',
|
||||
'Referer',
|
||||
'Referrer-Policy',
|
||||
'Refresh',
|
||||
'Retry-After',
|
||||
'Sec-WebSocket-Accept',
|
||||
'Sec-WebSocket-Extensions',
|
||||
'Sec-WebSocket-Key',
|
||||
'Sec-WebSocket-Protocol',
|
||||
'Sec-WebSocket-Version',
|
||||
'Server',
|
||||
'Server-Timing',
|
||||
'Service-Worker-Allowed',
|
||||
'Service-Worker-Navigation-Preload',
|
||||
'Set-Cookie',
|
||||
'SourceMap',
|
||||
'Strict-Transport-Security',
|
||||
'Supports-Loading-Mode',
|
||||
'TE',
|
||||
'Timing-Allow-Origin',
|
||||
'Trailer',
|
||||
'Transfer-Encoding',
|
||||
'Upgrade',
|
||||
'Upgrade-Insecure-Requests',
|
||||
'User-Agent',
|
||||
'Vary',
|
||||
'Via',
|
||||
'WWW-Authenticate',
|
||||
'X-Content-Type-Options',
|
||||
'X-DNS-Prefetch-Control',
|
||||
'X-Frame-Options',
|
||||
'X-Permitted-Cross-Domain-Policies',
|
||||
'X-Powered-By',
|
||||
'X-Requested-With',
|
||||
'X-XSS-Protection'
|
||||
]
|
||||
|
||||
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||
const key = wellknownHeaderNames[i]
|
||||
const lowerCasedKey = key.toLowerCase()
|
||||
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
|
||||
lowerCasedKey
|
||||
}
|
||||
|
||||
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||
|
||||
module.exports = {
|
||||
wellknownHeaderNames,
|
||||
headerNameLowerCasedRecord
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 48045:
|
||||
|
@ -92629,6 +92755,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
|
|||
const { Blob } = __nccwpck_require__(14300)
|
||||
const nodeUtil = __nccwpck_require__(73837)
|
||||
const { stringify } = __nccwpck_require__(63477)
|
||||
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
|
||||
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||
|
||||
|
@ -92838,6 +92965,15 @@ function parseKeepAliveTimeout (val) {
|
|||
return m ? parseInt(m[1], 10) * 1000 : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a header name and returns its lowercase value.
|
||||
* @param {string | Buffer} value Header name
|
||||
* @returns {string}
|
||||
*/
|
||||
function headerNameToString (value) {
|
||||
return headerNameLowerCasedRecord[value] || value.toLowerCase()
|
||||
}
|
||||
|
||||
function parseHeaders (headers, obj = {}) {
|
||||
// For H2 support
|
||||
if (!Array.isArray(headers)) return headers
|
||||
|
@ -93109,6 +93245,7 @@ module.exports = {
|
|||
isIterable,
|
||||
isAsyncIterable,
|
||||
isDestroyed,
|
||||
headerNameToString,
|
||||
parseRawHeaders,
|
||||
parseHeaders,
|
||||
parseKeepAliveTimeout,
|
||||
|
@ -99756,14 +99893,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
|
|||
const assert = __nccwpck_require__(39491)
|
||||
const { isUint8Array } = __nccwpck_require__(29830)
|
||||
|
||||
let supportedHashes = []
|
||||
|
||||
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
||||
/** @type {import('crypto')|undefined} */
|
||||
let crypto
|
||||
|
||||
try {
|
||||
crypto = __nccwpck_require__(6113)
|
||||
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
||||
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
||||
/* c8 ignore next 3 */
|
||||
} catch {
|
||||
|
||||
}
|
||||
|
||||
function responseURL (response) {
|
||||
|
@ -100291,66 +100432,56 @@ function bytesMatch (bytes, metadataList) {
|
|||
return true
|
||||
}
|
||||
|
||||
// 3. If parsedMetadata is the empty set, return true.
|
||||
// 3. If response is not eligible for integrity validation, return false.
|
||||
// TODO
|
||||
|
||||
// 4. If parsedMetadata is the empty set, return true.
|
||||
if (parsedMetadata.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
// 4. Let metadata be the result of getting the strongest
|
||||
// 5. Let metadata be the result of getting the strongest
|
||||
// metadata from parsedMetadata.
|
||||
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
|
||||
// get the strongest algorithm
|
||||
const strongest = list[0].algo
|
||||
// get all entries that use the strongest algorithm; ignore weaker
|
||||
const metadata = list.filter((item) => item.algo === strongest)
|
||||
const strongest = getStrongestMetadata(parsedMetadata)
|
||||
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
||||
|
||||
// 5. For each item in metadata:
|
||||
// 6. For each item in metadata:
|
||||
for (const item of metadata) {
|
||||
// 1. Let algorithm be the alg component of item.
|
||||
const algorithm = item.algo
|
||||
|
||||
// 2. Let expectedValue be the val component of item.
|
||||
let expectedValue = item.hash
|
||||
const expectedValue = item.hash
|
||||
|
||||
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
||||
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
||||
|
||||
if (expectedValue.endsWith('==')) {
|
||||
expectedValue = expectedValue.slice(0, -2)
|
||||
}
|
||||
|
||||
// 3. Let actualValue be the result of applying algorithm to bytes.
|
||||
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
||||
|
||||
if (actualValue.endsWith('==')) {
|
||||
actualValue = actualValue.slice(0, -2)
|
||||
if (actualValue[actualValue.length - 1] === '=') {
|
||||
if (actualValue[actualValue.length - 2] === '=') {
|
||||
actualValue = actualValue.slice(0, -2)
|
||||
} else {
|
||||
actualValue = actualValue.slice(0, -1)
|
||||
}
|
||||
}
|
||||
|
||||
// 4. If actualValue is a case-sensitive match for expectedValue,
|
||||
// return true.
|
||||
if (actualValue === expectedValue) {
|
||||
return true
|
||||
}
|
||||
|
||||
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
|
||||
|
||||
if (actualBase64URL.endsWith('==')) {
|
||||
actualBase64URL = actualBase64URL.slice(0, -2)
|
||||
}
|
||||
|
||||
if (actualBase64URL === expectedValue) {
|
||||
if (compareBase64Mixed(actualValue, expectedValue)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Return false.
|
||||
// 7. Return false.
|
||||
return false
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
||||
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
||||
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
||||
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
|
||||
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
||||
|
||||
/**
|
||||
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
||||
|
@ -100364,8 +100495,6 @@ function parseMetadata (metadata) {
|
|||
// 2. Let empty be equal to true.
|
||||
let empty = true
|
||||
|
||||
const supportedHashes = crypto.getHashes()
|
||||
|
||||
// 3. For each token returned by splitting metadata on spaces:
|
||||
for (const token of metadata.split(' ')) {
|
||||
// 1. Set empty to false.
|
||||
|
@ -100375,7 +100504,11 @@ function parseMetadata (metadata) {
|
|||
const parsedToken = parseHashWithOptions.exec(token)
|
||||
|
||||
// 3. If token does not parse, continue to the next token.
|
||||
if (parsedToken === null || parsedToken.groups === undefined) {
|
||||
if (
|
||||
parsedToken === null ||
|
||||
parsedToken.groups === undefined ||
|
||||
parsedToken.groups.algo === undefined
|
||||
) {
|
||||
// Note: Chromium blocks the request at this point, but Firefox
|
||||
// gives a warning that an invalid integrity was given. The
|
||||
// correct behavior is to ignore these, and subsequently not
|
||||
|
@ -100384,11 +100517,11 @@ function parseMetadata (metadata) {
|
|||
}
|
||||
|
||||
// 4. Let algorithm be the hash-algo component of token.
|
||||
const algorithm = parsedToken.groups.algo
|
||||
const algorithm = parsedToken.groups.algo.toLowerCase()
|
||||
|
||||
// 5. If algorithm is a hash function recognized by the user
|
||||
// agent, add the parsed token to result.
|
||||
if (supportedHashes.includes(algorithm.toLowerCase())) {
|
||||
if (supportedHashes.includes(algorithm)) {
|
||||
result.push(parsedToken.groups)
|
||||
}
|
||||
}
|
||||
|
@ -100401,6 +100534,82 @@ function parseMetadata (metadata) {
|
|||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
||||
*/
|
||||
function getStrongestMetadata (metadataList) {
|
||||
// Let algorithm be the algo component of the first item in metadataList.
|
||||
// Can be sha256
|
||||
let algorithm = metadataList[0].algo
|
||||
// If the algorithm is sha512, then it is the strongest
|
||||
// and we can return immediately
|
||||
if (algorithm[3] === '5') {
|
||||
return algorithm
|
||||
}
|
||||
|
||||
for (let i = 1; i < metadataList.length; ++i) {
|
||||
const metadata = metadataList[i]
|
||||
// If the algorithm is sha512, then it is the strongest
|
||||
// and we can break the loop immediately
|
||||
if (metadata.algo[3] === '5') {
|
||||
algorithm = 'sha512'
|
||||
break
|
||||
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
||||
} else if (algorithm[3] === '3') {
|
||||
continue
|
||||
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
||||
// the strongest
|
||||
} else if (metadata.algo[3] === '3') {
|
||||
algorithm = 'sha384'
|
||||
}
|
||||
}
|
||||
return algorithm
|
||||
}
|
||||
|
||||
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
||||
if (metadataList.length === 1) {
|
||||
return metadataList
|
||||
}
|
||||
|
||||
let pos = 0
|
||||
for (let i = 0; i < metadataList.length; ++i) {
|
||||
if (metadataList[i].algo === algorithm) {
|
||||
metadataList[pos++] = metadataList[i]
|
||||
}
|
||||
}
|
||||
|
||||
metadataList.length = pos
|
||||
|
||||
return metadataList
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two base64 strings, allowing for base64url
|
||||
* in the second string.
|
||||
*
|
||||
* @param {string} actualValue always base64
|
||||
* @param {string} expectedValue base64 or base64url
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function compareBase64Mixed (actualValue, expectedValue) {
|
||||
if (actualValue.length !== expectedValue.length) {
|
||||
return false
|
||||
}
|
||||
for (let i = 0; i < actualValue.length; ++i) {
|
||||
if (actualValue[i] !== expectedValue[i]) {
|
||||
if (
|
||||
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
||||
(actualValue[i] === '/' && expectedValue[i] === '_')
|
||||
) {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
||||
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
||||
// TODO
|
||||
|
@ -100816,7 +101025,8 @@ module.exports = {
|
|||
urlHasHttpsScheme,
|
||||
urlIsHttpHttpsScheme,
|
||||
readAllBytes,
|
||||
normalizeMethodRecord
|
||||
normalizeMethodRecord,
|
||||
parseMetadata
|
||||
}
|
||||
|
||||
|
||||
|
@ -102903,12 +103113,17 @@ function parseLocation (statusCode, headers) {
|
|||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
||||
return (
|
||||
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
|
||||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
|
||||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
|
||||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
|
||||
)
|
||||
if (header.length === 4) {
|
||||
return util.headerNameToString(header) === 'host'
|
||||
}
|
||||
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
||||
return true
|
||||
}
|
||||
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
||||
const name = util.headerNameToString(header)
|
||||
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||
|
|
|
@ -545,7 +545,7 @@ steps:
|
|||
Supported files are .java-version and .tool-versions.
|
||||
In .java-version file, only the version should be specified (e.g., 17.0.7).
|
||||
In .tool-versions file, java version should be preceded by the java keyword (e.g., java 17.0.7).
|
||||
The `.java-version` file recognizes all variants of the version description according to [jenv](https://github.com/jenv/jenv). Similarly, the `.tool-versions` file supports version specifications in accordance with [asdf](https://github.com/asdf-vm/asdf) standards, adhering to Semantic Versioning (semver).
|
||||
The `.java-version` file recognizes all variants of the version description according to [jenv](https://github.com/jenv/jenv). Similarly, the `.tool-versions` file supports version specifications in accordance with [asdf](https://github.com/asdf-vm/asdf) standards, adhering to Semantic Versioning ([semver](https://semver.org/)).
|
||||
|
||||
If both java-version and java-version-file inputs are provided, the java-version input will be used.
|
||||
|
||||
|
|
20
package-lock.json
generated
20
package-lock.json
generated
|
@ -2249,12 +2249,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fill-range": "^7.0.1"
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
|
@ -3102,9 +3102,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
|
@ -5338,9 +5338,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "5.28.3",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz",
|
||||
"integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==",
|
||||
"version": "5.28.4",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
|
||||
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
|
|
Loading…
Reference in a new issue