diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 829b406..0d93a1a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -69,106 +69,3 @@ jobs: - name: Repository Integrity Check run: | git diff --quiet dist - test: - if: > - !failure() && - !cancelled() - needs: - - integrity - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.ref }} - - - name: Request Postman Echo GET - uses: ./ - with: - url: 'https://postman-echo.com/get' - method: 'GET' - - - name: Request Postman Echo POST - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - data: '{ "key": "value" }' - - - name: Request Postman Echo POST with Unescaped Newline - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - escapeData: 'true' - data: >- - { - "key":"multi line\ntest - text" - } - - - name: Request Postman Echo BasicAuth - uses: ./ - with: - url: 'https://postman-echo.com/basic-auth' - method: 'GET' - username: 'postman' - password: 'password' - - - name: Request Postman Echo with 404 Response and ignore failure code - uses: ./ - with: - url: 'https://postman-echo.com/status/404' - method: 'GET' - ignoreStatusCodes: '404' - - - name: Create Test File - run: | - echo "test" > testfile.txt - - name: Request Postman Echo POST Multipart - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - data: '{ "key": "value" }' - files: '{ "file": "${{ github.workspace }}/testfile.txt" }' - - - name: Request Postman Echo POST and persist response - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - file: "${{ github.workspace }}/testfile.txt" - responseFile: "${{ github.workspace }}/response.json" - - name: Output responseFile - run: | - cat "${{ github.workspace }}/response.json" - - - name: Request Postman Echo POST Multipart without data - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - files: '{ "file": "${{ github.workspace }}/testfile.txt" }' - - - name: Request Postman Echo POST single file - uses: ./ - with: - url: 'https://postman-echo.com/post' - method: 'POST' - file: "${{ github.workspace }}/testfile.txt" - - - name: Request Postman Echo POST URLEncoded string data - uses: ./ - with: - url: 'https://postman-echo.com/post' - contentType : 'application/x-www-form-urlencoded' - method: 'POST' - data: 'key=value' - - - name: Request Postman Echo POST URLEncoded json data - uses: ./ - with: - url: 'https://postman-echo.com/post' - contentType : 'application/x-www-form-urlencoded' - method: 'POST' - data: '{"key":"value"}' diff --git a/.github/workflows/it-tests.yml b/.github/workflows/it-tests.yml new file mode 100644 index 0000000..9055ac2 --- /dev/null +++ b/.github/workflows/it-tests.yml @@ -0,0 +1,337 @@ +on: + pull_request: + push: + branches: [main] + +jobs: + + test-method-get-on-existing-url: + name: "IT Test - Request Postman Echo GET" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with GET and a valid URL + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/get' + method: 'GET' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-method-post-on-existing-url: + name: "IT Test - Request Postman Echo POST" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with POST using valid data + id: execution + uses: ./ + with: + url: "https://postman-echo.com/post" + method: 'POST' + data: '{ "key": "value" }' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-method-post-on-existing-url-with-unescaped-newline: + name: "IT Test - Request Postman Echo POST with Unescaped Newline" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with POST using unescaped new line within data + id: execution + uses: ./ + with: + url: "https://postman-echo.com/post" + method: 'POST' + escapeData: 'true' + data: >- + { + "key":"multi line\ntest + text" + } + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-basic-auth: + name: "IT Test - Request Postman Echo BasicAuth" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with valid BasicAuth parameters + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/basic-auth' + method: 'GET' + username: 'postman' + password: 'password' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 as this URL exists + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-input-ignore-failure-code-404: + name: "IT Test - Request Postman Echo with 404 Response and ignore failure code" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with unknown URL and ignoreStatusCode 404 for it + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/status/404' + method: 'GET' + ignoreStatusCodes: '404' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must 404 + with: + expected: '404' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then the outcome value must be success as the error 404 is ignored + with: + expected: 'success' + actual: ${{ steps.execution.outcome }} + comparison: exact + + test-input-ignore-failure-multiple-codes-401-404: + name: "IT Test - Request Postman Echo with 404 Response and ignore failure code" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with unknown URL and ignore status codes 401,404 + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/status/404' + method: 'GET' + ignoreStatusCodes: '401,404' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 404 + with: + expected: '404' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then the outcome value must be success as the error 404 is ignored + with: + expected: 'success' + actual: ${{ steps.execution.outcome }} + comparison: exact + + test-input-files: + name: "IT Test - Request Postman Echo POST Multipart" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Create Test File + run: | + echo "test" > testfile.txt + - name: Given the gh-action is used with file and data + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + method: 'POST' + data: '{ "key": "value" }' + files: '{ "file": "${{ github.workspace }}/testfile.txt" }' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-input-responseFile: + name: "IT Test - Request Postman Echo POST and persist response" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Create Test File + run: | + echo "test" > testfile2.txt + - name: Given the gh-action is used with file and responseFile + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + method: 'POST' + file: "${{ github.workspace }}/testfile2.txt" + responseFile: "${{ github.workspace }}/response.json" + + - name: Output responseFile + id: execution-response-file + run: | + echo "response_content=$(cat ${{ github.workspace }}/response.json)" >> $GITHUB_OUTPUT + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.response_content value must include + with: + expected: '{"args":{},"data":"test\n","files":{},"form":{},"headers":{"host":"postman-echo.com",' + actual: ${{ steps.execution-response-file.outputs.response_content }} + comparison: contains + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.response_content value must include + with: + expected: '"accept":"application/json, text/plain, */*","content-type":"application/json","user-agent"' + actual: ${{ steps.execution-response-file.outputs.response_content }} + comparison: contains + + test-input-files-without-data: + name: "IT Test - Request Postman Echo POST Multipart without data" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Create Test File + run: | + echo "test" > testfile3.txt + - name: Given the gh-action is used with file + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + method: 'POST' + files: '{ "file": "${{ github.workspace }}/testfile3.txt" }' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-input-file-with-single-file: + name: "IT Test - Request Postman Echo POST single file" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Create Test File + run: | + echo "test" > testfile4.txt + - name: Given the gh-action is used with file + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + method: 'POST' + file: "${{ github.workspace }}/testfile4.txt" + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-input-data-with-url-encoded-string: + name: "IT Test - Request Postman Echo POST URLEncoded string data" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with data form url encoded + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + contentType : 'application/x-www-form-urlencoded' + method: 'POST' + data: 'key=value' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-input-data-with-url-encoded-json-data: + name: "IT Test - Request Postman Echo POST URLEncoded json data" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with json data + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/post' + contentType : 'application/x-www-form-urlencoded' + method: 'POST' + data: '{"key":"value"}' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + test-delete-http-method: + name: "IT Test - Request Postman Echo DELETE" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Given the gh-action is used with method DELETE + id: execution + uses: ./ + with: + url: 'https://postman-echo.com/delete' + contentType : 'application/json' + method: 'DELETE' + data: '{"key":"value"}' + + - uses: nick-fields/assert-action@aa0067e01f0f6545c31755d6ca128c5a3a14f6bf # v2 + name: Then outputs.status value must be 200 + with: + expected: '200' + actual: ${{ steps.execution.outputs.status }} + comparison: exact + + it-tests: + name: "All IT Tests have to pass" + runs-on: ubuntu-latest + if: always() + needs: + # Add your tests here so that they prevent the merge of broken changes + - test-method-get-on-existing-url + - test-method-post-on-existing-url + - test-method-post-on-existing-url-with-unescaped-newline + - test-basic-auth + - test-input-ignore-failure-code-404 + - test-input-ignore-failure-multiple-codes-401-404 + - test-input-files + - test-input-responseFile + - test-input-files-without-data + - test-input-file-with-single-file + - test-input-data-with-url-encoded-string + - test-input-data-with-url-encoded-json-data + - test-delete-http-method + steps: + - uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 + with: + jobs: ${{ toJSON(needs) }} diff --git a/README.md b/README.md index 7e1e8fc..43f326b 100644 --- a/README.md +++ b/README.md @@ -45,9 +45,10 @@ jobs: |httpsCert| Client Certificate as string || |httpsKey| Client Certificate Key as string || |responseFile| Persist the response data to the specified file path || -|markResponse| If set to true, the response will be masked in the logs of the action |'false'| +|maskResponse| If set to true, the response will be masked in the logs of the action |'false'| |retry| optional amount of retries if the request is failing, does not retry if the status code is ignored || |retryWait| time between each retry in millseconds | 3000 | +|ignoreSsl| ignore ssl verify (rejectUnauthorized: false) | false | ### Response @@ -55,6 +56,7 @@ jobs: |---|---| `response` | Response as JSON String `headers` | Headers +`status` | HTTP status message To display HTTP response data in the GitHub Actions log give the request an `id` and access its `outputs`. You can also access specific field from the response data using [fromJson()](https://docs.github.com/en/actions/learn-github-actions/expressions#fromjson) expression. @@ -69,6 +71,7 @@ steps: run: | echo ${{ steps.myRequest.outputs.response }} echo ${{ steps.myRequest.outputs.headers }} + echo ${{ steps.myRequest.outputs.status }} echo ${{ fromJson(steps.myRequest.outputs.response).field_you_want_to_access }} ``` @@ -100,4 +103,4 @@ Optionen: --bearerToken bearer token without Bearer prefix, added as Authorization header [string] --timeout request timeout [number] [default: 5000] -``` \ No newline at end of file +``` diff --git a/action.yml b/action.yml index 9bb1b59..409db61 100644 --- a/action.yml +++ b/action.yml @@ -29,7 +29,7 @@ inputs: description: 'Auth Password' required: false timeout: - description: 'Request Timeout in Sec' + description: 'Request Timeout in milliseconds' required: false default: '5000' bearerToken: @@ -69,11 +69,16 @@ inputs: retryWait: description: 'wait time between retries in milliseconds' required: false + ignoreSsl: + description: 'ignore ssl verify (rejectUnauthorized: false)' + default: 'false' outputs: response: description: 'HTTP Response Content' headers: description: 'HTTP Response Headers' + status: + description: 'HTTP status message' runs: - using: 'node16' + using: 'node20' main: 'dist/index.js' diff --git a/dist/index.js b/dist/index.js index 7a54fba..5f02e3a 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1590,7 +1590,7 @@ class HttpClient { if (this._keepAlive && useProxy) { agent = this._proxyAgent; } - if (this._keepAlive && !useProxy) { + if (!useProxy) { agent = this._agent; } // if agent is already assigned use that agent. @@ -1622,16 +1622,12 @@ class HttpClient { agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { + // if tunneling agent isn't assigned create a new agent + if (!agent) { const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options @@ -1653,7 +1649,7 @@ class HttpClient { } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { @@ -1767,11 +1763,11 @@ function getProxyUrl(reqUrl) { })(); if (proxyVar) { try { - return new URL(proxyVar); + return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); + return new DecodedURL(`http://${proxyVar}`); } } else { @@ -1830,6 +1826,19 @@ function isLoopbackAddress(host) { hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); + } + get username() { + return this._decodedUsername; + } + get password() { + return this._decodedPassword; + } +} //# sourceMappingURL=proxy.js.map /***/ }), @@ -2637,6 +2646,41 @@ var Writable = (__nccwpck_require__(2781).Writable); var assert = __nccwpck_require__(9491); var debug = __nccwpck_require__(1133); +// Preventive platform detection +// istanbul ignore next +(function detectUnsupportedEnvironment() { + var looksLikeNode = typeof process !== "undefined"; + var looksLikeBrowser = typeof window !== "undefined" && typeof document !== "undefined"; + var looksLikeV8 = isFunction(Error.captureStackTrace); + if (!looksLikeNode && (looksLikeBrowser || !looksLikeV8)) { + console.warn("The follow-redirects package should be excluded from browser builds."); + } +}()); + +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL("")); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; +} + +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; + // Create handlers that pass events from native requests var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; var eventHandlers = Object.create(null); @@ -2646,19 +2690,20 @@ events.forEach(function (event) { }; }); +// Error types with codes var InvalidUrlError = createErrorType( "ERR_INVALID_URL", "Invalid URL", TypeError ); -// Error types with codes var RedirectionError = createErrorType( "ERR_FR_REDIRECTION_FAILURE", "Redirected request failed" ); var TooManyRedirectsError = createErrorType( "ERR_FR_TOO_MANY_REDIRECTS", - "Maximum number of redirects exceeded" + "Maximum number of redirects exceeded", + RedirectionError ); var MaxBodyLengthExceededError = createErrorType( "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", @@ -2693,7 +2738,13 @@ function RedirectableRequest(options, responseCallback) { // React to responses of native requests var self = this; this._onNativeResponse = function (response) { - self._processResponse(response); + try { + self._processResponse(response); + } + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } }; // Perform the first request @@ -2911,8 +2962,7 @@ RedirectableRequest.prototype._performRequest = function () { var protocol = this._options.protocol; var nativeProtocol = this._options.nativeProtocols[protocol]; if (!nativeProtocol) { - this.emit("error", new TypeError("Unsupported protocol " + protocol)); - return; + throw new TypeError("Unsupported protocol " + protocol); } // If specified, use the agent corresponding to the protocol @@ -2947,17 +2997,17 @@ RedirectableRequest.prototype._performRequest = function () { var buffers = this._requestBodyBuffers; (function writeNext(error) { // Only write if this request has not been redirected yet - /* istanbul ignore else */ + // istanbul ignore else if (request === self._currentRequest) { // Report any write errors - /* istanbul ignore if */ + // istanbul ignore if if (error) { self.emit("error", error); } // Write the next buffer if there are still left else if (i < buffers.length) { var buffer = buffers[i++]; - /* istanbul ignore else */ + // istanbul ignore else if (!request.finished) { request.write(buffer.data, buffer.encoding, writeNext); } @@ -3011,8 +3061,7 @@ RedirectableRequest.prototype._processResponse = function (response) { // RFC7231§6.4: A client SHOULD detect and intervene // in cyclical redirections (i.e., "infinite" redirection loops). if (++this._redirectCount > this._options.maxRedirects) { - this.emit("error", new TooManyRedirectsError()); - return; + throw new TooManyRedirectsError(); } // Store the request headers if applicable @@ -3046,34 +3095,24 @@ RedirectableRequest.prototype._processResponse = function (response) { var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); // If the redirect is relative, carry over the host of the last request - var currentUrlParts = url.parse(this._currentUrl); + var currentUrlParts = parseUrl(this._currentUrl); var currentHost = currentHostHeader || currentUrlParts.host; var currentUrl = /^\w+:/.test(location) ? this._currentUrl : url.format(Object.assign(currentUrlParts, { host: currentHost })); - // Determine the URL of the redirection - var redirectUrl; - try { - redirectUrl = url.resolve(currentUrl, location); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - return; - } - // Create the redirected request - debug("redirecting to", redirectUrl); + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); this._isRedirect = true; - var redirectUrlParts = url.parse(redirectUrl); - Object.assign(this._options, redirectUrlParts); + spreadUrlObject(redirectUrl, this._options); // Drop confidential headers when redirecting to a less secure protocol // or to a different domain that is not a superdomain - if (redirectUrlParts.protocol !== currentUrlParts.protocol && - redirectUrlParts.protocol !== "https:" || - redirectUrlParts.host !== currentHost && - !isSubdomain(redirectUrlParts.host, currentHost)) { - removeMatchingHeaders(/^(?:authorization|cookie)$/i, this._options.headers); + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); } // Evaluate the beforeRedirect callback @@ -3087,23 +3126,12 @@ RedirectableRequest.prototype._processResponse = function (response) { method: method, headers: requestHeaders, }; - try { - beforeRedirect(this._options, responseDetails, requestDetails); - } - catch (err) { - this.emit("error", err); - return; - } + beforeRedirect(this._options, responseDetails, requestDetails); this._sanitizeOptions(this._options); } // Perform the redirected request - try { - this._performRequest(); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - } + this._performRequest(); }; // Wraps the key/value object of protocols with redirect functionality @@ -3123,27 +3151,16 @@ function wrap(protocols) { // Executes a request, following redirects function request(input, options, callback) { - // Parse parameters - if (isString(input)) { - var parsed; - try { - parsed = urlToOptions(new URL(input)); - } - catch (err) { - /* istanbul ignore next */ - parsed = url.parse(input); - } - if (!isString(parsed.protocol)) { - throw new InvalidUrlError({ input }); - } - input = parsed; + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); } - else if (URL && (input instanceof URL)) { - input = urlToOptions(input); + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); } else { callback = options; - options = input; + options = validateUrl(input); input = { protocol: protocol }; } if (isFunction(options)) { @@ -3182,27 +3199,57 @@ function wrap(protocols) { return exports; } -/* istanbul ignore next */ function noop() { /* empty */ } -// from https://github.com/nodejs/node/blob/master/lib/internal/url.js -function urlToOptions(urlObject) { - var options = { - protocol: urlObject.protocol, - hostname: urlObject.hostname.startsWith("[") ? - /* istanbul ignore next */ - urlObject.hostname.slice(1, -1) : - urlObject.hostname, - hash: urlObject.hash, - search: urlObject.search, - pathname: urlObject.pathname, - path: urlObject.pathname + urlObject.search, - href: urlObject.href, - }; - if (urlObject.port !== "") { - options.port = Number(urlObject.port); +function parseUrl(input) { + var parsed; + // istanbul ignore else + if (useNativeURL) { + parsed = new URL(input); } - return options; + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); + } + } + return parsed; +} + +function resolveUrl(relative, base) { + // istanbul ignore next + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); +} + +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); + } + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); + } + return input; +} + +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; + } + + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); + } + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); + } + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; + + return spread; } function removeMatchingHeaders(regex, headers) { @@ -3220,7 +3267,10 @@ function removeMatchingHeaders(regex, headers) { function createErrorType(code, message, baseClass) { // Create constructor function CustomError(properties) { - Error.captureStackTrace(this, this.constructor); + // istanbul ignore else + if (isFunction(Error.captureStackTrace)) { + Error.captureStackTrace(this, this.constructor); + } Object.assign(this, properties || {}); this.code = code; this.message = this.cause ? message + ": " + this.cause.message : message; @@ -3228,8 +3278,16 @@ function createErrorType(code, message, baseClass) { // Attach constructor and set default properties CustomError.prototype = new (baseClass || Error)(); - CustomError.prototype.constructor = CustomError; - CustomError.prototype.name = "Error [" + code + "]"; + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, + }, + name: { + value: "Error [" + code + "]", + enumerable: false, + }, + }); return CustomError; } @@ -3259,6 +3317,10 @@ function isBuffer(value) { return typeof value === "object" && ("length" in value); } +function isURL(value) { + return URL && value instanceof URL; +} + // Exports module.exports = wrap({ http: http, https: https }); module.exports.wrap = wrap; @@ -4423,6 +4485,7 @@ const MockAgent = __nccwpck_require__(6771) const MockPool = __nccwpck_require__(6193) const mockErrors = __nccwpck_require__(888) const ProxyAgent = __nccwpck_require__(7858) +const RetryHandler = __nccwpck_require__(2286) const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) const DecoratorHandler = __nccwpck_require__(6930) const RedirectHandler = __nccwpck_require__(2860) @@ -4444,6 +4507,7 @@ module.exports.Pool = Pool module.exports.BalancedPool = BalancedPool module.exports.Agent = Agent module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler module.exports.DecoratorHandler = DecoratorHandler module.exports.RedirectHandler = RedirectHandler @@ -5344,6 +5408,7 @@ function request (opts, callback) { } module.exports = request +module.exports.RequestHandler = RequestHandler /***/ }), @@ -5726,6 +5791,8 @@ const kBody = Symbol('kBody') const kAbort = Symbol('abort') const kContentType = Symbol('kContentType') +const noop = () => {} + module.exports = class BodyReadable extends Readable { constructor ({ resume, @@ -5859,37 +5926,50 @@ module.exports = class BodyReadable extends Readable { return this[kBody] } - async dump (opts) { + dump (opts) { let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 const signal = opts && opts.signal - const abortFn = () => { - this.destroy() - } - let signalListenerCleanup + if (signal) { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new InvalidArgumentError('signal must be an AbortSignal') - } - util.throwIfAborted(signal) - signalListenerCleanup = util.addAbortListener(signal, abortFn) - } - try { - for await (const chunk of this) { - util.throwIfAborted(signal) - limit -= Buffer.byteLength(chunk) - if (limit < 0) { - return + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') } - } - } catch { - util.throwIfAborted(signal) - } finally { - if (typeof signalListenerCleanup === 'function') { - signalListenerCleanup() - } else if (signalListenerCleanup) { - signalListenerCleanup[Symbol.dispose]() + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) } } + + if (this.closed) { + return Promise.resolve(null) + } + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) } } @@ -7269,13 +7349,13 @@ module.exports = { /***/ }), /***/ 9174: -/***/ ((module) => { +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { - kConstruct: Symbol('constructable') + kConstruct: (__nccwpck_require__(2785).kConstruct) } @@ -8261,11 +8341,9 @@ class Parser { socket[kReset] = true } - let pause - try { - pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false - } catch (err) { - util.destroy(socket, err) + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { return -1 } @@ -8312,13 +8390,8 @@ class Parser { this.bytesRead += buf.length - try { - if (request.onData(buf) === false) { - return constants.ERROR.PAUSED - } - } catch (err) { - util.destroy(socket, err) - return -1 + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED } } @@ -8359,11 +8432,7 @@ class Parser { return -1 } - try { - request.onComplete(headers) - } catch (err) { - errorRequest(client, request, err) - } + request.onComplete(headers) client[kQueue][client[kRunningIdx]++] = null @@ -8527,7 +8596,7 @@ async function connect (client) { const idx = hostname.indexOf(']') assert(idx !== -1) - const ip = hostname.substr(1, idx - 1) + const ip = hostname.substring(1, idx) assert(net.isIP(ip)) hostname = ip @@ -9026,6 +9095,7 @@ function writeH2 (client, session, request) { return false } + /** @type {import('node:http2').ClientHttp2Stream} */ let stream const h2State = client[kHTTP2SessionState] @@ -9121,14 +9191,10 @@ function writeH2 (client, session, request) { const shouldEndStream = method === 'GET' || method === 'HEAD' if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = '100-continue' - /** - * @type {import('node:http2').ClientHttp2Stream} - */ stream = session.request(headers, { endStream: shouldEndStream, signal }) stream.once('continue', writeBodyH2) } else { - /** @type {import('node:http2').ClientHttp2Stream} */ stream = session.request(headers, { endStream: shouldEndStream, signal @@ -9140,7 +9206,9 @@ function writeH2 (client, session, request) { ++h2State.openStreams stream.once('response', headers => { - if (request.onHeaders(Number(headers[HTTP2_HEADER_STATUS]), headers, stream.resume.bind(stream), '') === false) { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { stream.pause() } }) @@ -9150,13 +9218,17 @@ function writeH2 (client, session, request) { }) stream.on('data', (chunk) => { - if (request.onData(chunk) === false) stream.pause() + if (request.onData(chunk) === false) { + stream.pause() + } }) stream.once('close', () => { h2State.openStreams -= 1 // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) session.unref() + if (h2State.openStreams === 0) { + session.unref() + } }) stream.once('error', function (err) { @@ -9316,7 +9388,11 @@ function writeStream ({ h2stream, body, client, request, socket, contentLength, } } const onAbort = function () { - onFinished(new RequestAbortedError()) + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) } const onFinished = function (err) { if (finished) { @@ -10720,6 +10796,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 4462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 8045: @@ -10921,6 +11123,19 @@ class ResponseExceededMaxSizeError extends UndiciError { } } +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} + module.exports = { HTTPParserError, UndiciError, @@ -10940,7 +11155,8 @@ module.exports = { NotSupportedError, ResponseContentLengthMismatchError, BalancedPoolMissingUpstreamError, - ResponseExceededMaxSizeError + ResponseExceededMaxSizeError, + RequestRetryError } @@ -11182,9 +11398,9 @@ class Request { onBodySent (chunk) { if (this[kHandler].onBodySent) { try { - this[kHandler].onBodySent(chunk) + return this[kHandler].onBodySent(chunk) } catch (err) { - this.onError(err) + this.abort(err) } } } @@ -11196,9 +11412,9 @@ class Request { if (this[kHandler].onRequestSent) { try { - this[kHandler].onRequestSent() + return this[kHandler].onRequestSent() } catch (err) { - this.onError(err) + this.abort(err) } } } @@ -11223,14 +11439,23 @@ class Request { channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) } - return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } } onData (chunk) { assert(!this.aborted) assert(!this.completed) - return this[kHandler].onData(chunk) + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } } onUpgrade (statusCode, headers, socket) { @@ -11249,7 +11474,13 @@ class Request { if (channels.trailers.hasSubscribers) { channels.trailers.publish({ request: this, trailers }) } - return this[kHandler].onComplete(trailers) + + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } } onError (error) { @@ -11263,6 +11494,7 @@ class Request { return } this.aborted = true + return this[kHandler].onError(error) } @@ -11499,7 +11731,9 @@ module.exports = { kHTTP2BuildRequest: Symbol('http2 build request'), kHTTP1BuildRequest: Symbol('http1 build request'), kHTTP2CopyHeaders: Symbol('http2 copy headers'), - kHTTPConnVersion: Symbol('http connection version') + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') } @@ -11520,6 +11754,7 @@ const { InvalidArgumentError } = __nccwpck_require__(8045) const { Blob } = __nccwpck_require__(4300) const nodeUtil = __nccwpck_require__(3837) const { stringify } = __nccwpck_require__(3477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -11636,13 +11871,13 @@ function getHostname (host) { const idx = host.indexOf(']') assert(idx !== -1) - return host.substr(1, idx - 1) + return host.substring(1, idx) } const idx = host.indexOf(':') if (idx === -1) return host - return host.substr(0, idx) + return host.substring(0, idx) } // IP addresses are not valid server names per RFC6066 @@ -11729,6 +11964,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -11739,7 +11983,7 @@ function parseHeaders (headers, obj = {}) { if (!val) { if (Array.isArray(headers[i + 1])) { - obj[key] = headers[i + 1] + obj[key] = headers[i + 1].map(x => x.toString('utf8')) } else { obj[key] = headers[i + 1].toString('utf8') } @@ -11942,16 +12186,7 @@ function throwIfAborted (signal) { } } -let events function addAbortListener (signal, listener) { - if (typeof Symbol.dispose === 'symbol') { - if (!events) { - events = __nccwpck_require__(2361) - } - if (typeof events.addAbortListener === 'function' && 'aborted' in signal) { - return events.addAbortListener(signal, listener) - } - } if ('addEventListener' in signal) { signal.addEventListener('abort', listener, { once: true }) return () => signal.removeEventListener('abort', listener) @@ -11975,6 +12210,21 @@ function toUSVString (val) { return `${val}` } +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null +} + const kEnumerableProperty = Object.create(null) kEnumerableProperty.enumerable = true @@ -11994,6 +12244,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -12008,9 +12259,11 @@ module.exports = { buildURL, throwIfAborted, addAbortListener, + parseRangeHeader, nodeMajor, nodeMinor, - nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13) + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } @@ -13139,17 +13392,14 @@ function dataURLProcessor (dataURL) { * @param {boolean} excludeFragment */ function URLSerializer (url, excludeFragment = false) { - const href = url.href - if (!excludeFragment) { - return href + return url.href } - const hash = href.lastIndexOf('#') - if (hash === -1) { - return href - } - return href.slice(0, hash) + const href = url.href + const hashLength = url.hash.length + + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) } // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points @@ -14333,7 +14583,7 @@ module.exports = { -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const { kGuard } = __nccwpck_require__(5861) const { kEnumerableProperty } = __nccwpck_require__(3983) const { @@ -14347,6 +14597,13 @@ const assert = __nccwpck_require__(9491) const kHeadersMap = Symbol('headers map') const kHeadersSortedMap = Symbol('headers map sorted') +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} + /** * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize * @param {string} potentialValue @@ -14355,12 +14612,12 @@ function headerValueNormalize (potentialValue) { // To normalize a byte sequence potentialValue, remove // any leading and trailing HTTP whitespace bytes from // potentialValue. + let i = 0; let j = potentialValue.length - // Trimming the end with `.replace()` and a RegExp is typically subject to - // ReDoS. This is safer and faster. - let i = potentialValue.length - while (/[\r\n\t ]/.test(potentialValue.charAt(--i))); - return potentialValue.slice(0, i + 1).replace(/^[\r\n\t ]+/, '') + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i + + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) } function fill (headers, object) { @@ -14369,7 +14626,8 @@ function fill (headers, object) { // 1. If object is a sequence, then for each header in object: // Note: webidl conversion to array has already been done. if (Array.isArray(object)) { - for (const header of object) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] // 1. If header does not contain exactly two items, then throw a TypeError. if (header.length !== 2) { throw webidl.errors.exception({ @@ -14379,15 +14637,16 @@ function fill (headers, object) { } // 2. Append (header’s first item, header’s second item) to headers. - headers.append(header[0], header[1]) + appendHeader(headers, header[0], header[1]) } } else if (typeof object === 'object' && object !== null) { // Note: null should throw // 2. Otherwise, object is a record, then for each key → value in object, // append (key, value) to headers - for (const [key, value] of Object.entries(object)) { - headers.append(key, value) + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) } } else { throw webidl.errors.conversionFailed({ @@ -14398,6 +14657,50 @@ function fill (headers, object) { } } +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } + + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } + + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) + + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} + class HeadersList { /** @type {[string, string][]|null} */ cookies = null @@ -14406,7 +14709,7 @@ class HeadersList { if (init instanceof HeadersList) { this[kHeadersMap] = new Map(init[kHeadersMap]) this[kHeadersSortedMap] = init[kHeadersSortedMap] - this.cookies = init.cookies + this.cookies = init.cookies === null ? null : [...init.cookies] } else { this[kHeadersMap] = new Map(init) this[kHeadersSortedMap] = null @@ -14468,7 +14771,7 @@ class HeadersList { // the first such header to value and remove the // others. // 2. Otherwise, append header (name, value) to list. - return this[kHeadersMap].set(lowercaseName, { name, value }) + this[kHeadersMap].set(lowercaseName, { name, value }) } // https://fetch.spec.whatwg.org/#concept-header-list-delete @@ -14481,20 +14784,18 @@ class HeadersList { this.cookies = null } - return this[kHeadersMap].delete(name) + this[kHeadersMap].delete(name) } // https://fetch.spec.whatwg.org/#concept-header-list-get get (name) { - // 1. If list does not contain name, then return null. - if (!this.contains(name)) { - return null - } + const value = this[kHeadersMap].get(name.toLowerCase()) + // 1. If list does not contain name, then return null. // 2. Return the values of all headers in list whose name // is a byte-case-insensitive match for name, // separated from each other by 0x2C 0x20, in order. - return this[kHeadersMap].get(name.toLowerCase())?.value ?? null + return value === undefined ? null : value.value } * [Symbol.iterator] () { @@ -14520,6 +14821,9 @@ class HeadersList { // https://fetch.spec.whatwg.org/#headers-class class Headers { constructor (init = undefined) { + if (init === kConstruct) { + return + } this[kHeadersList] = new HeadersList() // The new Headers(init) constructor steps are: @@ -14543,43 +14847,7 @@ class Headers { name = webidl.converters.ByteString(name) value = webidl.converters.ByteString(value) - // 1. Normalize value. - value = headerValueNormalize(value) - - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value, - type: 'header value' - }) - } - - // 3. If headers’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if headers’s guard is "request" and name is a - // forbidden header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // 5. Otherwise, if headers’s guard is "request-no-cors": - // TODO - } - - // 6. Otherwise, if headers’s guard is "response" and name is a - // forbidden response-header name, return. - - // 7. Append (name, value) to headers’s header list. - // 8. If headers’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from headers - return this[kHeadersList].append(name, value) + return appendHeader(this, name, value) } // https://fetch.spec.whatwg.org/#dom-headers-delete @@ -14624,7 +14892,7 @@ class Headers { // 7. Delete name from this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this. - return this[kHeadersList].delete(name) + this[kHeadersList].delete(name) } // https://fetch.spec.whatwg.org/#dom-headers-get @@ -14717,7 +14985,7 @@ class Headers { // 7. Set (name, value) in this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this - return this[kHeadersList].set(name, value) + this[kHeadersList].set(name, value) } // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie @@ -14753,7 +15021,8 @@ class Headers { const cookies = this[kHeadersList].cookies // 3. For each name of names: - for (const [name, value] of names) { + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] // 1. If name is `set-cookie`, then: if (name === 'set-cookie') { // 1. Let values be a list of all values of headers in list whose name @@ -14761,8 +15030,8 @@ class Headers { // 2. For each value of values: // 1. Append (name, value) to headers. - for (const value of cookies) { - headers.push([name, value]) + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) } } else { // 2. Otherwise: @@ -14786,6 +15055,12 @@ class Headers { keys () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -14796,6 +15071,12 @@ class Headers { values () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -14806,6 +15087,12 @@ class Headers { entries () { webidl.brandCheck(this, Headers) + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } + return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', @@ -15177,7 +15464,7 @@ function finalizeAndReportTiming (response, initiatorType = 'other') { } // 8. If response’s timing allow passed flag is not set, then: - if (!timingInfo.timingAllowPassed) { + if (!response.timingAllowPassed) { // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. timingInfo = createOpaqueTimingInfo({ startTime: timingInfo.startTime @@ -16094,6 +16381,9 @@ function httpRedirectFetch (fetchParams, response) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') @@ -16848,7 +17138,7 @@ async function httpNetworkFetch ( path: url.pathname + url.search, origin: url.origin, method: request.method, - body: fetchParams.controller.dispatcher.isMockActive ? request.body && request.body.source : body, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, headers: request.headersList.entries, maxRedirections: 0, upgrade: request.mode === 'websocket' ? 'websocket' : undefined @@ -16893,7 +17183,7 @@ async function httpNetworkFetch ( location = val } - headers.append(key, val) + headers[kHeadersList].append(key, val) } } else { const keys = Object.keys(headersList) @@ -16907,7 +17197,7 @@ async function httpNetworkFetch ( location = val } - headers.append(key, val) + headers[kHeadersList].append(key, val) } } @@ -17011,7 +17301,7 @@ async function httpNetworkFetch ( const key = headersList[n + 0].toString('latin1') const val = headersList[n + 1].toString('latin1') - headers.append(key, val) + headers[kHeadersList].append(key, val) } resolve({ @@ -17054,7 +17344,8 @@ const { isValidHTTPToken, sameOrigin, normalizeMethod, - makePolicyContainer + makePolicyContainer, + normalizeMethodRecord } = __nccwpck_require__(2538) const { forbiddenMethodsSet, @@ -17071,13 +17362,12 @@ const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) const { webidl } = __nccwpck_require__(1744) const { getGlobalOrigin } = __nccwpck_require__(1246) const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const assert = __nccwpck_require__(9491) const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) let TransformStream = globalThis.TransformStream -const kInit = Symbol('init') const kAbortController = Symbol('abortController') const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { @@ -17088,7 +17378,7 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { class Request { // https://fetch.spec.whatwg.org/#dom-request constructor (input, init = {}) { - if (input === kInit) { + if (input === kConstruct) { return } @@ -17227,8 +17517,10 @@ class Request { urlList: [...request.urlList] }) + const initHasKey = Object.keys(init).length !== 0 + // 13. If init is not empty, then: - if (Object.keys(init).length > 0) { + if (initHasKey) { // 1. If request’s mode is "navigate", then set it to "same-origin". if (request.mode === 'navigate') { request.mode = 'same-origin' @@ -17343,7 +17635,7 @@ class Request { } // 23. If init["integrity"] exists, then set request’s integrity metadata to it. - if (init.integrity !== undefined && init.integrity != null) { + if (init.integrity != null) { request.integrity = String(init.integrity) } @@ -17359,16 +17651,16 @@ class Request { // 2. If method is not a method or method is a forbidden method, then // throw a TypeError. - if (!isValidHTTPToken(init.method)) { - throw TypeError(`'${init.method}' is not a valid HTTP method.`) + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) } if (forbiddenMethodsSet.has(method.toUpperCase())) { - throw TypeError(`'${init.method}' HTTP method is unsupported.`) + throw new TypeError(`'${method}' HTTP method is unsupported.`) } // 3. Normalize method. - method = normalizeMethod(init.method) + method = normalizeMethodRecord[method] ?? normalizeMethod(method) // 4. Set request’s method to method. request.method = method @@ -17439,7 +17731,7 @@ class Request { // 30. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is request’s header list and guard is // "request". - this[kHeaders] = new Headers() + this[kHeaders] = new Headers(kConstruct) this[kHeaders][kHeadersList] = request.headersList this[kHeaders][kGuard] = 'request' this[kHeaders][kRealm] = this[kRealm] @@ -17459,25 +17751,25 @@ class Request { } // 32. If init is not empty, then: - if (Object.keys(init).length !== 0) { + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] // 1. Let headers be a copy of this’s headers and its associated header // list. - let headers = new Headers(this[kHeaders]) - // 2. If init["headers"] exists, then set headers to init["headers"]. - if (init.headers !== undefined) { - headers = init.headers - } + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) // 3. Empty this’s headers’s header list. - this[kHeaders][kHeadersList].clear() + headersList.clear() // 4. If headers is a Headers object, then for each header in its header // list, append header’s name/header’s value to this’s headers. - if (headers.constructor.name === 'Headers') { + if (headers instanceof HeadersList) { for (const [key, val] of headers) { - this[kHeaders].append(key, val) + headersList.append(key, val) } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies } else { // 5. Otherwise, fill this’s headers with headers. fillHeaders(this[kHeaders], headers) @@ -17766,10 +18058,10 @@ class Request { // 3. Let clonedRequestObject be the result of creating a Request object, // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. - const clonedRequestObject = new Request(kInit) + const clonedRequestObject = new Request(kConstruct) clonedRequestObject[kState] = clonedRequest clonedRequestObject[kRealm] = this[kRealm] - clonedRequestObject[kHeaders] = new Headers() + clonedRequestObject[kHeaders] = new Headers(kConstruct) clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] @@ -18019,7 +18311,7 @@ const { webidl } = __nccwpck_require__(1744) const { FormData } = __nccwpck_require__(2015) const { getGlobalOrigin } = __nccwpck_require__(1246) const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList } = __nccwpck_require__(2785) +const { kHeadersList, kConstruct } = __nccwpck_require__(2785) const assert = __nccwpck_require__(9491) const { types } = __nccwpck_require__(3837) @@ -18140,7 +18432,7 @@ class Response { // 2. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is this’s response’s header list and guard // is "response". - this[kHeaders] = new Headers() + this[kHeaders] = new Headers(kConstruct) this[kHeaders][kGuard] = 'response' this[kHeaders][kHeadersList] = this[kState].headersList this[kHeaders][kRealm] = this[kRealm] @@ -18510,11 +18802,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V) { return webidl.converters.Blob(V, { strict: false }) } - if ( - types.isAnyArrayBuffer(V) || - types.isTypedArray(V) || - types.isDataView(V) - ) { + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { return webidl.converters.BufferSource(V) } @@ -18604,14 +18892,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983 const assert = __nccwpck_require__(9491) const { isUint8Array } = __nccwpck_require__(9830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -18700,52 +18992,57 @@ function isValidReasonPhrase (statusText) { return true } -function isTokenChar (c) { - return !( - c >= 0x7f || - c <= 0x20 || - c === '(' || - c === ')' || - c === '<' || - c === '>' || - c === '@' || - c === ',' || - c === ';' || - c === ':' || - c === '\\' || - c === '"' || - c === '/' || - c === '[' || - c === ']' || - c === '?' || - c === '=' || - c === '{' || - c === '}' - ) +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } } -// See RFC 7230, Section 3.2.6. -// https://github.com/chromium/chromium/blob/d7da0240cae77824d1eda25745c4022757499131/third_party/blink/renderer/platform/network/http_parsers.cc#L321 +/** + * @param {string} characters + */ function isValidHTTPToken (characters) { - if (!characters || typeof characters !== 'string') { + if (characters.length === 0) { return false } for (let i = 0; i < characters.length; ++i) { - const c = characters.charCodeAt(i) - if (c > 0x7f || !isTokenChar(c)) { + if (!isTokenCharCode(characters.charCodeAt(i))) { return false } } return true } -// https://fetch.spec.whatwg.org/#header-name -// https://github.com/chromium/chromium/blob/b3d37e6f94f87d59e44662d6078f6a12de845d17/net/http/http_util.cc#L342 +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ function isValidHeaderName (potentialValue) { - if (potentialValue.length === 0) { - return false - } - return isValidHTTPToken(potentialValue) } @@ -19134,66 +19431,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -19207,8 +19494,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -19218,7 +19503,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -19227,11 +19516,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -19244,6 +19533,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -19290,11 +19655,30 @@ function isCancelled (fetchParams) { fetchParams.controller.state === 'terminated' } -// https://fetch.spec.whatwg.org/#concept-method-normalize +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ function normalizeMethod (method) { - return /^(DELETE|GET|HEAD|OPTIONS|POST|PUT)$/i.test(method) - ? method.toUpperCase() - : method + return normalizeMethodRecord[method.toLowerCase()] ?? method } // https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string @@ -19639,7 +20023,9 @@ module.exports = { urlIsLocal, urlHasHttpsScheme, urlIsHttpHttpsScheme, - readAllBytes + readAllBytes, + normalizeMethodRecord, + parseMetadata } @@ -20078,12 +20464,10 @@ webidl.converters.ByteString = function (V) { // 2. If the value of any element of x is greater than // 255, then throw a TypeError. for (let index = 0; index < x.length; index++) { - const charCode = x.charCodeAt(index) - - if (charCode > 255) { + if (x.charCodeAt(index) > 255) { throw new TypeError( 'Cannot convert argument to a ByteString because the character at ' + - `index ${index} has a value of ${charCode} which is greater than 255.` + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` ) } } @@ -21728,12 +22112,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -21760,6 +22149,349 @@ function cleanRequestHeaders (headers, removeContent, unknownOrigin) { module.exports = RedirectHandler +/***/ }), + +/***/ 2286: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(9491) + +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(2785) +const { RequestRetryError } = __nccwpck_require__(8045) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3983) + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) + } + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } + + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state + + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } + + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return + } + + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null + + if (statusCode !== 206) { + return true + } + + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + const { start, size, end = size } = contentRange + + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') + + this.resume = resume + return true + } + + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) + + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const { start, size, end = size } = range + + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) + + this.start = start + this.end = end + } + + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } + + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) + + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null + + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + + this.abort(err) + + return false + } + + onData (chunk) { + this.start += chunk.length + + return this.handler.onData(chunk) + } + + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) + } + + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) + + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } + } + } +} + +module.exports = RetryHandler + + /***/ }), /***/ 8861: @@ -23682,6 +24414,9 @@ class ProxyAgent extends DispatcherBase { this[kProxyTls] = opts.proxyTls this[kProxyHeaders] = opts.headers || {} + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + if (opts.auth && opts.token) { throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') } else if (opts.auth) { @@ -23689,11 +24424,10 @@ class ProxyAgent extends DispatcherBase { this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` } else if (opts.token) { this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` } - const resolvedUrl = new URL(opts.uri) - const { origin, port, host } = resolvedUrl - const connect = buildConnector({ ...opts.proxyTls }) this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) this[kClient] = clientFactory(resolvedUrl, { connect }) @@ -23717,7 +24451,7 @@ class ProxyAgent extends DispatcherBase { }) if (statusCode !== 200) { socket.on('error', () => {}).destroy() - callback(new RequestAbortedError('Proxy response !== 200 when HTTP Tunneling')) + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) } if (opts.protocol !== 'https:') { callback(null, socket) @@ -26612,13 +27346,7 @@ const { GithubActions } = __nccwpck_require__(8169); * @returns {(response: axios.AxiosResponse) => void} */ const createMaskHandler = (actions) => (response) => { - let data = response.data - - if (typeof data == 'object') { - data = JSON.stringify(data) - } - - actions.setSecret(data) + actions.setSecret(JSON.stringify(response.data)) } module.exports = { createMaskHandler } @@ -26640,12 +27368,14 @@ const { GithubActions } = __nccwpck_require__(8169); * @returns {(response: axios.AxiosResponse) => void} */ const createOutputHandler = (actions) => (response) => { - actions.setOutput('response', response.data) + actions.setOutput('response', JSON.stringify(response.data)) actions.setOutput('headers', response.headers) + actions.setOutput('status', response.status) } module.exports = { createOutputHandler } + /***/ }), /***/ 6733: @@ -26671,8 +27401,8 @@ const createPersistHandler = (filePath, actions) => (response) => { data = JSON.stringify(data) } - fs.writeFile(filePath, data, err => { - if (!err) { + fs.writeFile(filePath, data, error => { + if (!error) { actions.info(`response persisted successfully at ${filePath}`) return } @@ -26746,8 +27476,8 @@ const retry = async (callback, options) => { lastErr = err; } - if (i < options.retries) { - options.actions.warning(`#${i + 1} request failed: ${err}`); + if (i < options.retry) { + options.actions.warning(`#${i + 1} request failed: ${lastErr}`); await sleep(options.sleep); } @@ -26870,10 +27600,10 @@ const request = async({ method, instanceConfig, data, files, file, actions, opti } catch(error) { if (error.response && options.ignoredCodes.includes(error.response.status)) { actions.warning(`ignored status code: ${JSON.stringify({ code: error.response.status, message: error.response.data })}`) - - return null + + return error.response } - + if (!error.response && error.request && options.preventFailureOnNoResponse) { actions.warning(`no response received: ${JSON.stringify(error)}`); @@ -26970,9 +27700,9 @@ const updateConfigForFile = (instanceConfig, filePath, actions) => { * @returns {Promise} */ const contentLength = (formData) => new Promise((resolve, reject) => { - formData.getLength((err, length) => { - if (err) { - reject (err) + formData.getLength((error, length) => { + if (error) { + reject(error) return } @@ -27305,7 +28035,7 @@ Dicer.prototype._write = function (data, encoding, cb) { if (this._headerFirst && this._isPreamble) { if (!this._part) { this._part = new PartStream(this._partOpts) - if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } } const r = this._hparser.push(data) if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } @@ -27362,7 +28092,7 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) { } } if (this._dashes === 2) { - if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } this.reset() this._finished = true // no more parts will be added @@ -27380,7 +28110,13 @@ Dicer.prototype._oninfo = function (isMatch, data, start, end) { this._part._read = function (n) { self._unpause() } - if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } if (!this._isPreamble) { this._inHeader = true } } if (data && start < end && !this._ignoreData) { @@ -28063,7 +28799,7 @@ function Multipart (boy, cfg) { ++nfiles - if (!boy._events.file) { + if (boy.listenerCount('file') === 0) { self.parser._ignore() return } @@ -28592,7 +29328,7 @@ const decoders = { if (textDecoders.has(this.toString())) { try { return textDecoders.get(this).decode(data) - } catch (e) { } + } catch {} } return typeof data === 'string' ? data @@ -28844,7 +29580,7 @@ module.exports = parseParams /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// Axios v1.6.2 Copyright (c) 2023 Matt Zabriskie and contributors +// Axios v1.7.7 Copyright (c) 2024 Matt Zabriskie and contributors const FormData$1 = __nccwpck_require__(4334); @@ -28856,7 +29592,7 @@ const util = __nccwpck_require__(3837); const followRedirects = __nccwpck_require__(7707); const zlib = __nccwpck_require__(9796); const stream = __nccwpck_require__(2781); -const EventEmitter = __nccwpck_require__(2361); +const events = __nccwpck_require__(2361); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } @@ -28868,7 +29604,6 @@ const util__default = /*#__PURE__*/_interopDefaultLegacy(util); const followRedirects__default = /*#__PURE__*/_interopDefaultLegacy(followRedirects); const zlib__default = /*#__PURE__*/_interopDefaultLegacy(zlib); const stream__default = /*#__PURE__*/_interopDefaultLegacy(stream); -const EventEmitter__default = /*#__PURE__*/_interopDefaultLegacy(EventEmitter); function bind(fn, thisArg) { return function wrap() { @@ -29083,6 +29818,8 @@ const isFormData = (thing) => { */ const isURLSearchParams = kindOfTest('URLSearchParams'); +const [isReadableStream, isRequest, isResponse, isHeaders] = ['ReadableStream', 'Request', 'Response', 'Headers'].map(kindOfTest); + /** * Trim excess whitespace off the beginning and end of a string * @@ -29471,8 +30208,7 @@ const toObjectSet = (arrayOrString, delimiter) => { const noop = () => {}; const toFiniteNumber = (value, defaultValue) => { - value = +value; - return Number.isFinite(value) ? value : defaultValue; + return value != null && Number.isFinite(value = +value) ? value : defaultValue; }; const ALPHA = 'abcdefghijklmnopqrstuvwxyz'; @@ -29542,6 +30278,36 @@ const isAsyncFn = kindOfTest('AsyncFunction'); const isThenable = (thing) => thing && (isObject(thing) || isFunction(thing)) && isFunction(thing.then) && isFunction(thing.catch); +// original code +// https://github.com/DigitalBrainJS/AxiosPromise/blob/16deab13710ec09779922131f3fa5954320f83ab/lib/utils.js#L11-L34 + +const _setImmediate = ((setImmediateSupported, postMessageSupported) => { + if (setImmediateSupported) { + return setImmediate; + } + + return postMessageSupported ? ((token, callbacks) => { + _global.addEventListener("message", ({source, data}) => { + if (source === _global && data === token) { + callbacks.length && callbacks.shift()(); + } + }, false); + + return (cb) => { + callbacks.push(cb); + _global.postMessage(token, "*"); + } + })(`axios@${Math.random()}`, []) : (cb) => setTimeout(cb); +})( + typeof setImmediate === 'function', + isFunction(_global.postMessage) +); + +const asap = typeof queueMicrotask !== 'undefined' ? + queueMicrotask.bind(_global) : ( typeof process !== 'undefined' && process.nextTick || _setImmediate); + +// ********************* + const utils$1 = { isArray, isArrayBuffer, @@ -29553,6 +30319,10 @@ const utils$1 = { isBoolean, isObject, isPlainObject, + isReadableStream, + isRequest, + isResponse, + isHeaders, isUndefined, isDate, isFile, @@ -29593,7 +30363,9 @@ const utils$1 = { isSpecCompliantForm, toJSONObject, isAsyncFn, - isThenable + isThenable, + setImmediate: _setImmediate, + asap }; /** @@ -29621,7 +30393,10 @@ function AxiosError(message, code, config, request, response) { code && (this.code = code); config && (this.config = config); request && (this.request = request); - response && (this.response = response); + if (response) { + this.response = response; + this.status = response.status ? response.status : null; + } } utils$1.inherits(AxiosError, Error, { @@ -29641,7 +30416,7 @@ utils$1.inherits(AxiosError, Error, { // Axios config: utils$1.toJSONObject(this.config), code: this.code, - status: this.response && this.response.status ? this.response.status : null + status: this.status }; } }); @@ -30102,6 +30877,8 @@ const platform$1 = { const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined'; +const _navigator = typeof navigator === 'object' && navigator || undefined; + /** * Determine if we're running in a standard browser environment * @@ -30119,10 +30896,8 @@ const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'unde * * @returns {boolean} */ -const hasStandardBrowserEnv = ( - (product) => { - return hasBrowserEnv && ['ReactNative', 'NativeScript', 'NS'].indexOf(product) < 0 - })(typeof navigator !== 'undefined' && navigator.product); +const hasStandardBrowserEnv = hasBrowserEnv && + (!_navigator || ['ReactNative', 'NativeScript', 'NS'].indexOf(_navigator.product) < 0); /** * Determine if we're running in a standard browser webWorker environment @@ -30142,11 +30917,15 @@ const hasStandardBrowserWebWorkerEnv = (() => { ); })(); +const origin = hasBrowserEnv && window.location.href || 'http://localhost'; + const utils = /*#__PURE__*/Object.freeze({ __proto__: null, hasBrowserEnv: hasBrowserEnv, hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv, - hasStandardBrowserEnv: hasStandardBrowserEnv + hasStandardBrowserEnv: hasStandardBrowserEnv, + navigator: _navigator, + origin: origin }); const platform = { @@ -30214,6 +30993,9 @@ function arrayToObject(arr) { function formDataToJSON(formData) { function buildPath(path, value, target, index) { let name = path[index++]; + + if (name === '__proto__') return true; + const isNumericKey = Number.isFinite(+name); const isLast = index >= path.length; name = !name && utils$1.isArray(target) ? target.length : name; @@ -30283,7 +31065,7 @@ const defaults = { transitional: transitionalDefaults, - adapter: ['xhr', 'http'], + adapter: ['xhr', 'http', 'fetch'], transformRequest: [function transformRequest(data, headers) { const contentType = headers.getContentType() || ''; @@ -30297,9 +31079,6 @@ const defaults = { const isFormData = utils$1.isFormData(data); if (isFormData) { - if (!hasJSONContentType) { - return data; - } return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data; } @@ -30307,7 +31086,8 @@ const defaults = { utils$1.isBuffer(data) || utils$1.isStream(data) || utils$1.isFile(data) || - utils$1.isBlob(data) + utils$1.isBlob(data) || + utils$1.isReadableStream(data) ) { return data; } @@ -30350,6 +31130,10 @@ const defaults = { const forcedJSONParsing = transitional && transitional.forcedJSONParsing; const JSONRequested = this.responseType === 'json'; + if (utils$1.isResponse(data) || utils$1.isReadableStream(data)) { + return data; + } + if (data && utils$1.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) { const silentJSONParsing = transitional && transitional.silentJSONParsing; const strictJSONParsing = !silentJSONParsing && JSONRequested; @@ -30553,6 +31337,10 @@ class AxiosHeaders { setHeaders(header, valueOrRewrite); } else if(utils$1.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) { setHeaders(parseHeaders(header), valueOrRewrite); + } else if (utils$1.isHeaders(header)) { + for (const [key, value] of header.entries()) { + setHeader(value, key, rewrite); + } } else { header != null && setHeader(valueOrRewrite, header, rewrite); } @@ -30844,7 +31632,7 @@ function isAbsoluteURL(url) { */ function combineURLs(baseURL, relativeURL) { return relativeURL - ? baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '') + ? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '') : baseURL; } @@ -30865,7 +31653,7 @@ function buildFullPath(baseURL, requestedURL) { return requestedURL; } -const VERSION = "1.6.2"; +const VERSION = "1.7.7"; function parseProtocol(url) { const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url); @@ -30920,88 +31708,6 @@ function fromDataURI(uri, asBlob, options) { throw new AxiosError('Unsupported protocol ' + protocol, AxiosError.ERR_NOT_SUPPORT); } -/** - * Throttle decorator - * @param {Function} fn - * @param {Number} freq - * @return {Function} - */ -function throttle(fn, freq) { - let timestamp = 0; - const threshold = 1000 / freq; - let timer = null; - return function throttled(force, args) { - const now = Date.now(); - if (force || now - timestamp > threshold) { - if (timer) { - clearTimeout(timer); - timer = null; - } - timestamp = now; - return fn.apply(null, args); - } - if (!timer) { - timer = setTimeout(() => { - timer = null; - timestamp = Date.now(); - return fn.apply(null, args); - }, threshold - (now - timestamp)); - } - }; -} - -/** - * Calculate data maxRate - * @param {Number} [samplesCount= 10] - * @param {Number} [min= 1000] - * @returns {Function} - */ -function speedometer(samplesCount, min) { - samplesCount = samplesCount || 10; - const bytes = new Array(samplesCount); - const timestamps = new Array(samplesCount); - let head = 0; - let tail = 0; - let firstSampleTS; - - min = min !== undefined ? min : 1000; - - return function push(chunkLength) { - const now = Date.now(); - - const startedAt = timestamps[tail]; - - if (!firstSampleTS) { - firstSampleTS = now; - } - - bytes[head] = chunkLength; - timestamps[head] = now; - - let i = tail; - let bytesCount = 0; - - while (i !== head) { - bytesCount += bytes[i++]; - i = i % samplesCount; - } - - head = (head + 1) % samplesCount; - - if (head === tail) { - tail = (tail + 1) % samplesCount; - } - - if (now - firstSampleTS < min) { - return; - } - - const passed = startedAt && now - startedAt; - - return passed ? Math.round(bytesCount * 1000 / passed) : undefined; - }; -} - const kInternals = Symbol('internals'); class AxiosTransformStream extends stream__default["default"].Transform{ @@ -31021,12 +31727,8 @@ class AxiosTransformStream extends stream__default["default"].Transform{ readableHighWaterMark: options.chunkSize }); - const self = this; - const internals = this[kInternals] = { - length: options.length, timeWindow: options.timeWindow, - ticksRate: options.ticksRate, chunkSize: options.chunkSize, maxRate: options.maxRate, minChunkSize: options.minChunkSize, @@ -31038,8 +31740,6 @@ class AxiosTransformStream extends stream__default["default"].Transform{ onReadCallback: null }; - const _speedometer = speedometer(internals.ticksRate * options.samplesCount, internals.timeWindow); - this.on('newListener', event => { if (event === 'progress') { if (!internals.isCaptured) { @@ -31047,38 +31747,6 @@ class AxiosTransformStream extends stream__default["default"].Transform{ } } }); - - let bytesNotified = 0; - - internals.updateProgress = throttle(function throttledHandler() { - const totalBytes = internals.length; - const bytesTransferred = internals.bytesSeen; - const progressBytes = bytesTransferred - bytesNotified; - if (!progressBytes || self.destroyed) return; - - const rate = _speedometer(progressBytes); - - bytesNotified = bytesTransferred; - - process.nextTick(() => { - self.emit('progress', { - 'loaded': bytesTransferred, - 'total': totalBytes, - 'progress': totalBytes ? (bytesTransferred / totalBytes) : undefined, - 'bytes': progressBytes, - 'rate': rate ? rate : undefined, - 'estimated': rate && totalBytes && bytesTransferred <= totalBytes ? - (totalBytes - bytesTransferred) / rate : undefined - }); - }); - }, internals.ticksRate); - - const onFinish = () => { - internals.updateProgress(true); - }; - - this.once('end', onFinish); - this.once('error', onFinish); } _read(size) { @@ -31092,7 +31760,6 @@ class AxiosTransformStream extends stream__default["default"].Transform{ } _transform(chunk, encoding, callback) { - const self = this; const internals = this[kInternals]; const maxRate = internals.maxRate; @@ -31104,16 +31771,14 @@ class AxiosTransformStream extends stream__default["default"].Transform{ const bytesThreshold = (maxRate / divider); const minChunkSize = internals.minChunkSize !== false ? Math.max(internals.minChunkSize, bytesThreshold * 0.01) : 0; - function pushChunk(_chunk, _callback) { + const pushChunk = (_chunk, _callback) => { const bytes = Buffer.byteLength(_chunk); internals.bytesSeen += bytes; internals.bytes += bytes; - if (internals.isCaptured) { - internals.updateProgress(); - } + internals.isCaptured && this.emit('progress', internals.bytesSeen); - if (self.push(_chunk)) { + if (this.push(_chunk)) { process.nextTick(_callback); } else { internals.onReadCallback = () => { @@ -31121,7 +31786,7 @@ class AxiosTransformStream extends stream__default["default"].Transform{ process.nextTick(_callback); }; } - } + }; const transformChunk = (_chunk, _callback) => { const chunkSize = Buffer.byteLength(_chunk); @@ -31178,11 +31843,6 @@ class AxiosTransformStream extends stream__default["default"].Transform{ } }); } - - setLength(length) { - this[kInternals].length = +length; - return this; - } } const AxiosTransformStream$1 = AxiosTransformStream; @@ -31350,6 +32010,142 @@ const callbackify = (fn, reducer) => { const callbackify$1 = callbackify; +/** + * Calculate data maxRate + * @param {Number} [samplesCount= 10] + * @param {Number} [min= 1000] + * @returns {Function} + */ +function speedometer(samplesCount, min) { + samplesCount = samplesCount || 10; + const bytes = new Array(samplesCount); + const timestamps = new Array(samplesCount); + let head = 0; + let tail = 0; + let firstSampleTS; + + min = min !== undefined ? min : 1000; + + return function push(chunkLength) { + const now = Date.now(); + + const startedAt = timestamps[tail]; + + if (!firstSampleTS) { + firstSampleTS = now; + } + + bytes[head] = chunkLength; + timestamps[head] = now; + + let i = tail; + let bytesCount = 0; + + while (i !== head) { + bytesCount += bytes[i++]; + i = i % samplesCount; + } + + head = (head + 1) % samplesCount; + + if (head === tail) { + tail = (tail + 1) % samplesCount; + } + + if (now - firstSampleTS < min) { + return; + } + + const passed = startedAt && now - startedAt; + + return passed ? Math.round(bytesCount * 1000 / passed) : undefined; + }; +} + +/** + * Throttle decorator + * @param {Function} fn + * @param {Number} freq + * @return {Function} + */ +function throttle(fn, freq) { + let timestamp = 0; + let threshold = 1000 / freq; + let lastArgs; + let timer; + + const invoke = (args, now = Date.now()) => { + timestamp = now; + lastArgs = null; + if (timer) { + clearTimeout(timer); + timer = null; + } + fn.apply(null, args); + }; + + const throttled = (...args) => { + const now = Date.now(); + const passed = now - timestamp; + if ( passed >= threshold) { + invoke(args, now); + } else { + lastArgs = args; + if (!timer) { + timer = setTimeout(() => { + timer = null; + invoke(lastArgs); + }, threshold - passed); + } + } + }; + + const flush = () => lastArgs && invoke(lastArgs); + + return [throttled, flush]; +} + +const progressEventReducer = (listener, isDownloadStream, freq = 3) => { + let bytesNotified = 0; + const _speedometer = speedometer(50, 250); + + return throttle(e => { + const loaded = e.loaded; + const total = e.lengthComputable ? e.total : undefined; + const progressBytes = loaded - bytesNotified; + const rate = _speedometer(progressBytes); + const inRange = loaded <= total; + + bytesNotified = loaded; + + const data = { + loaded, + total, + progress: total ? (loaded / total) : undefined, + bytes: progressBytes, + rate: rate ? rate : undefined, + estimated: rate && total && inRange ? (total - loaded) / rate : undefined, + event: e, + lengthComputable: total != null, + [isDownloadStream ? 'download' : 'upload']: true + }; + + listener(data); + }, freq); +}; + +const progressEventDecorator = (total, throttled) => { + const lengthComputable = total != null; + + return [(loaded) => throttled[0]({ + lengthComputable, + total, + loaded + }), throttled[1]]; +}; + +const asyncDecorator = (fn) => (...args) => utils$1.asap(() => fn(...args)); + const zlibOptions = { flush: zlib__default["default"].constants.Z_SYNC_FLUSH, finishFlush: zlib__default["default"].constants.Z_SYNC_FLUSH @@ -31370,6 +32166,14 @@ const supportedProtocols = platform.protocols.map(protocol => { return protocol + ':'; }); +const flushOnFinish = (stream, [throttled, flush]) => { + stream + .on('end', flush) + .on('error', flush); + + return throttled; +}; + /** * If the proxy or config beforeRedirects functions are defined, call them with the options * object. @@ -31378,12 +32182,12 @@ const supportedProtocols = platform.protocols.map(protocol => { * * @returns {Object} */ -function dispatchBeforeRedirect(options) { +function dispatchBeforeRedirect(options, responseDetails) { if (options.beforeRedirects.proxy) { options.beforeRedirects.proxy(options); } if (options.beforeRedirects.config) { - options.beforeRedirects.config(options); + options.beforeRedirects.config(options, responseDetails); } } @@ -31496,6 +32300,10 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { // hotfix to support opt.all option which is required for node 20.x lookup = (hostname, opt, cb) => { _lookup(hostname, opt, (err, arg0, arg1) => { + if (err) { + return cb(err); + } + const addresses = utils$1.isArray(arg0) ? arg0.map(addr => buildAddressEntry(addr)) : [buildAddressEntry(arg0, arg1)]; opt.all ? cb(err, addresses) : cb(err, addresses[0].address, addresses[0].family); @@ -31504,7 +32312,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { } // temporary internal emitter until the AxiosRequest class will be implemented - const emitter = new EventEmitter__default["default"](); + const emitter = new events.EventEmitter(); const onFinished = () => { if (config.cancelToken) { @@ -31541,7 +32349,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { // Parse url const fullPath = buildFullPath(config.baseURL, config.url); - const parsed = new URL(fullPath, 'http://localhost'); + const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined); const protocol = parsed.protocol || supportedProtocols[0]; if (protocol === 'data:') { @@ -31599,8 +32407,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { // Only set header if it hasn't been set in config headers.set('User-Agent', 'axios/' + VERSION, false); - const onDownloadProgress = config.onDownloadProgress; - const onUploadProgress = config.onUploadProgress; + const {onUploadProgress, onDownloadProgress} = config; const maxRate = config.maxRate; let maxUploadRate = undefined; let maxDownloadRate = undefined; @@ -31671,15 +32478,16 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { } data = stream__default["default"].pipeline([data, new AxiosTransformStream$1({ - length: contentLength, maxRate: utils$1.toFiniteNumber(maxUploadRate) })], utils$1.noop); - onUploadProgress && data.on('progress', progress => { - onUploadProgress(Object.assign(progress, { - upload: true - })); - }); + onUploadProgress && data.on('progress', flushOnFinish( + data, + progressEventDecorator( + contentLength, + progressEventReducer(asyncDecorator(onUploadProgress), false, 3) + ) + )); } // HTTP basic authentication @@ -31737,7 +32545,7 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { if (config.socketPath) { options.socketPath = config.socketPath; } else { - options.hostname = parsed.hostname; + options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname; options.port = parsed.port; setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); } @@ -31778,17 +32586,18 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { const responseLength = +res.headers['content-length']; - if (onDownloadProgress) { + if (onDownloadProgress || maxDownloadRate) { const transformStream = new AxiosTransformStream$1({ - length: utils$1.toFiniteNumber(responseLength), maxRate: utils$1.toFiniteNumber(maxDownloadRate) }); - onDownloadProgress && transformStream.on('progress', progress => { - onDownloadProgress(Object.assign(progress, { - download: true - })); - }); + onDownloadProgress && transformStream.on('progress', flushOnFinish( + transformStream, + progressEventDecorator( + responseLength, + progressEventReducer(asyncDecorator(onDownloadProgress), true, 3) + ) + )); streams.push(transformStream); } @@ -32001,51 +32810,12 @@ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { }); }; -const cookies = platform.hasStandardBrowserEnv ? - - // Standard browser envs support document.cookie - { - write(name, value, expires, path, domain, secure) { - const cookie = [name + '=' + encodeURIComponent(value)]; - - utils$1.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString()); - - utils$1.isString(path) && cookie.push('path=' + path); - - utils$1.isString(domain) && cookie.push('domain=' + domain); - - secure === true && cookie.push('secure'); - - document.cookie = cookie.join('; '); - }, - - read(name) { - const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); - return (match ? decodeURIComponent(match[3]) : null); - }, - - remove(name) { - this.write(name, '', Date.now() - 86400000); - } - } - - : - - // Non-standard browser env (web workers, react-native) lack needed support. - { - write() {}, - read() { - return null; - }, - remove() {} - }; - const isURLSameOrigin = platform.hasStandardBrowserEnv ? // Standard browser envs have full support of the APIs needed to test // whether the request URL is of the same origin as current location. (function standardBrowserEnv() { - const msie = /(msie|trident)/i.test(navigator.userAgent); + const msie = platform.navigator && /(msie|trident)/i.test(platform.navigator.userAgent); const urlParsingNode = document.createElement('a'); let originURL; @@ -32103,80 +32873,222 @@ const isURLSameOrigin = platform.hasStandardBrowserEnv ? }; })(); -function progressEventReducer(listener, isDownloadStream) { - let bytesNotified = 0; - const _speedometer = speedometer(50, 250); +const cookies = platform.hasStandardBrowserEnv ? - return e => { - const loaded = e.loaded; - const total = e.lengthComputable ? e.total : undefined; - const progressBytes = loaded - bytesNotified; - const rate = _speedometer(progressBytes); - const inRange = loaded <= total; + // Standard browser envs support document.cookie + { + write(name, value, expires, path, domain, secure) { + const cookie = [name + '=' + encodeURIComponent(value)]; - bytesNotified = loaded; + utils$1.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString()); - const data = { - loaded, - total, - progress: total ? (loaded / total) : undefined, - bytes: progressBytes, - rate: rate ? rate : undefined, - estimated: rate && total && inRange ? (total - loaded) / rate : undefined, - event: e - }; + utils$1.isString(path) && cookie.push('path=' + path); - data[isDownloadStream ? 'download' : 'upload'] = true; + utils$1.isString(domain) && cookie.push('domain=' + domain); - listener(data); + secure === true && cookie.push('secure'); + + document.cookie = cookie.join('; '); + }, + + read(name) { + const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); + return (match ? decodeURIComponent(match[3]) : null); + }, + + remove(name) { + this.write(name, '', Date.now() - 86400000); + } + } + + : + + // Non-standard browser env (web workers, react-native) lack needed support. + { + write() {}, + read() { + return null; + }, + remove() {} }; + +const headersToObject = (thing) => thing instanceof AxiosHeaders$1 ? { ...thing } : thing; + +/** + * Config-specific merge-function which creates a new config-object + * by merging two configuration objects together. + * + * @param {Object} config1 + * @param {Object} config2 + * + * @returns {Object} New object resulting from merging config2 to config1 + */ +function mergeConfig(config1, config2) { + // eslint-disable-next-line no-param-reassign + config2 = config2 || {}; + const config = {}; + + function getMergedValue(target, source, caseless) { + if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) { + return utils$1.merge.call({caseless}, target, source); + } else if (utils$1.isPlainObject(source)) { + return utils$1.merge({}, source); + } else if (utils$1.isArray(source)) { + return source.slice(); + } + return source; + } + + // eslint-disable-next-line consistent-return + function mergeDeepProperties(a, b, caseless) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(a, b, caseless); + } else if (!utils$1.isUndefined(a)) { + return getMergedValue(undefined, a, caseless); + } + } + + // eslint-disable-next-line consistent-return + function valueFromConfig2(a, b) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(undefined, b); + } + } + + // eslint-disable-next-line consistent-return + function defaultToConfig2(a, b) { + if (!utils$1.isUndefined(b)) { + return getMergedValue(undefined, b); + } else if (!utils$1.isUndefined(a)) { + return getMergedValue(undefined, a); + } + } + + // eslint-disable-next-line consistent-return + function mergeDirectKeys(a, b, prop) { + if (prop in config2) { + return getMergedValue(a, b); + } else if (prop in config1) { + return getMergedValue(undefined, a); + } + } + + const mergeMap = { + url: valueFromConfig2, + method: valueFromConfig2, + data: valueFromConfig2, + baseURL: defaultToConfig2, + transformRequest: defaultToConfig2, + transformResponse: defaultToConfig2, + paramsSerializer: defaultToConfig2, + timeout: defaultToConfig2, + timeoutMessage: defaultToConfig2, + withCredentials: defaultToConfig2, + withXSRFToken: defaultToConfig2, + adapter: defaultToConfig2, + responseType: defaultToConfig2, + xsrfCookieName: defaultToConfig2, + xsrfHeaderName: defaultToConfig2, + onUploadProgress: defaultToConfig2, + onDownloadProgress: defaultToConfig2, + decompress: defaultToConfig2, + maxContentLength: defaultToConfig2, + maxBodyLength: defaultToConfig2, + beforeRedirect: defaultToConfig2, + transport: defaultToConfig2, + httpAgent: defaultToConfig2, + httpsAgent: defaultToConfig2, + cancelToken: defaultToConfig2, + socketPath: defaultToConfig2, + responseEncoding: defaultToConfig2, + validateStatus: mergeDirectKeys, + headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true) + }; + + utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) { + const merge = mergeMap[prop] || mergeDeepProperties; + const configValue = merge(config1[prop], config2[prop], prop); + (utils$1.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue); + }); + + return config; } +const resolveConfig = (config) => { + const newConfig = mergeConfig({}, config); + + let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig; + + newConfig.headers = headers = AxiosHeaders$1.from(headers); + + newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url), config.params, config.paramsSerializer); + + // HTTP basic authentication + if (auth) { + headers.set('Authorization', 'Basic ' + + btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : '')) + ); + } + + let contentType; + + if (utils$1.isFormData(data)) { + if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) { + headers.setContentType(undefined); // Let the browser set it + } else if ((contentType = headers.getContentType()) !== false) { + // fix semicolon duplication issue for ReactNative FormData implementation + const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : []; + headers.setContentType([type || 'multipart/form-data', ...tokens].join('; ')); + } + } + + // Add xsrf header + // This is only done if running in a standard browser environment. + // Specifically not if we're in a web worker, or react-native. + + if (platform.hasStandardBrowserEnv) { + withXSRFToken && utils$1.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig)); + + if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) { + // Add xsrf header + const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName); + + if (xsrfValue) { + headers.set(xsrfHeaderName, xsrfValue); + } + } + } + + return newConfig; +}; + const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined'; const xhrAdapter = isXHRAdapterSupported && function (config) { return new Promise(function dispatchXhrRequest(resolve, reject) { - let requestData = config.data; - const requestHeaders = AxiosHeaders$1.from(config.headers).normalize(); - let {responseType, withXSRFToken} = config; + const _config = resolveConfig(config); + let requestData = _config.data; + const requestHeaders = AxiosHeaders$1.from(_config.headers).normalize(); + let {responseType, onUploadProgress, onDownloadProgress} = _config; let onCanceled; + let uploadThrottled, downloadThrottled; + let flushUpload, flushDownload; + function done() { - if (config.cancelToken) { - config.cancelToken.unsubscribe(onCanceled); - } + flushUpload && flushUpload(); // flush events + flushDownload && flushDownload(); // flush events - if (config.signal) { - config.signal.removeEventListener('abort', onCanceled); - } - } + _config.cancelToken && _config.cancelToken.unsubscribe(onCanceled); - let contentType; - - if (utils$1.isFormData(requestData)) { - if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) { - requestHeaders.setContentType(false); // Let the browser set it - } else if ((contentType = requestHeaders.getContentType()) !== false) { - // fix semicolon duplication issue for ReactNative FormData implementation - const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : []; - requestHeaders.setContentType([type || 'multipart/form-data', ...tokens].join('; ')); - } + _config.signal && _config.signal.removeEventListener('abort', onCanceled); } let request = new XMLHttpRequest(); - // HTTP basic authentication - if (config.auth) { - const username = config.auth.username || ''; - const password = config.auth.password ? unescape(encodeURIComponent(config.auth.password)) : ''; - requestHeaders.set('Authorization', 'Basic ' + btoa(username + ':' + password)); - } - - const fullPath = buildFullPath(config.baseURL, config.url); - - request.open(config.method.toUpperCase(), buildURL(fullPath, config.params, config.paramsSerializer), true); + request.open(_config.method.toUpperCase(), _config.url, true); // Set the request timeout in MS - request.timeout = config.timeout; + request.timeout = _config.timeout; function onloadend() { if (!request) { @@ -32256,10 +33168,10 @@ const xhrAdapter = isXHRAdapterSupported && function (config) { // Handle timeout request.ontimeout = function handleTimeout() { - let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded'; - const transitional = config.transitional || transitionalDefaults; - if (config.timeoutErrorMessage) { - timeoutErrorMessage = config.timeoutErrorMessage; + let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded'; + const transitional = _config.transitional || transitionalDefaults; + if (_config.timeoutErrorMessage) { + timeoutErrorMessage = _config.timeoutErrorMessage; } reject(new AxiosError( timeoutErrorMessage, @@ -32271,22 +33183,6 @@ const xhrAdapter = isXHRAdapterSupported && function (config) { request = null; }; - // Add xsrf header - // This is only done if running in a standard browser environment. - // Specifically not if we're in a web worker, or react-native. - if(platform.hasStandardBrowserEnv) { - withXSRFToken && utils$1.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(config)); - - if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(fullPath))) { - // Add xsrf header - const xsrfValue = config.xsrfHeaderName && config.xsrfCookieName && cookies.read(config.xsrfCookieName); - - if (xsrfValue) { - requestHeaders.set(config.xsrfHeaderName, xsrfValue); - } - } - } - // Remove Content-Type if data is undefined requestData === undefined && requestHeaders.setContentType(null); @@ -32298,26 +33194,31 @@ const xhrAdapter = isXHRAdapterSupported && function (config) { } // Add withCredentials to request if needed - if (!utils$1.isUndefined(config.withCredentials)) { - request.withCredentials = !!config.withCredentials; + if (!utils$1.isUndefined(_config.withCredentials)) { + request.withCredentials = !!_config.withCredentials; } // Add responseType to request if needed if (responseType && responseType !== 'json') { - request.responseType = config.responseType; + request.responseType = _config.responseType; } // Handle progress if needed - if (typeof config.onDownloadProgress === 'function') { - request.addEventListener('progress', progressEventReducer(config.onDownloadProgress, true)); + if (onDownloadProgress) { + ([downloadThrottled, flushDownload] = progressEventReducer(onDownloadProgress, true)); + request.addEventListener('progress', downloadThrottled); } // Not all browsers support upload events - if (typeof config.onUploadProgress === 'function' && request.upload) { - request.upload.addEventListener('progress', progressEventReducer(config.onUploadProgress)); + if (onUploadProgress && request.upload) { + ([uploadThrottled, flushUpload] = progressEventReducer(onUploadProgress)); + + request.upload.addEventListener('progress', uploadThrottled); + + request.upload.addEventListener('loadend', flushUpload); } - if (config.cancelToken || config.signal) { + if (_config.cancelToken || _config.signal) { // Handle cancellation // eslint-disable-next-line func-names onCanceled = cancel => { @@ -32329,13 +33230,13 @@ const xhrAdapter = isXHRAdapterSupported && function (config) { request = null; }; - config.cancelToken && config.cancelToken.subscribe(onCanceled); - if (config.signal) { - config.signal.aborted ? onCanceled() : config.signal.addEventListener('abort', onCanceled); + _config.cancelToken && _config.cancelToken.subscribe(onCanceled); + if (_config.signal) { + _config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled); } } - const protocol = parseProtocol(fullPath); + const protocol = parseProtocol(_config.url); if (protocol && platform.protocols.indexOf(protocol) === -1) { reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config)); @@ -32348,9 +33249,360 @@ const xhrAdapter = isXHRAdapterSupported && function (config) { }); }; +const composeSignals = (signals, timeout) => { + const {length} = (signals = signals ? signals.filter(Boolean) : []); + + if (timeout || length) { + let controller = new AbortController(); + + let aborted; + + const onabort = function (reason) { + if (!aborted) { + aborted = true; + unsubscribe(); + const err = reason instanceof Error ? reason : this.reason; + controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err)); + } + }; + + let timer = timeout && setTimeout(() => { + timer = null; + onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT)); + }, timeout); + + const unsubscribe = () => { + if (signals) { + timer && clearTimeout(timer); + timer = null; + signals.forEach(signal => { + signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort); + }); + signals = null; + } + }; + + signals.forEach((signal) => signal.addEventListener('abort', onabort)); + + const {signal} = controller; + + signal.unsubscribe = () => utils$1.asap(unsubscribe); + + return signal; + } +}; + +const composeSignals$1 = composeSignals; + +const streamChunk = function* (chunk, chunkSize) { + let len = chunk.byteLength; + + if (!chunkSize || len < chunkSize) { + yield chunk; + return; + } + + let pos = 0; + let end; + + while (pos < len) { + end = pos + chunkSize; + yield chunk.slice(pos, end); + pos = end; + } +}; + +const readBytes = async function* (iterable, chunkSize) { + for await (const chunk of readStream(iterable)) { + yield* streamChunk(chunk, chunkSize); + } +}; + +const readStream = async function* (stream) { + if (stream[Symbol.asyncIterator]) { + yield* stream; + return; + } + + const reader = stream.getReader(); + try { + for (;;) { + const {done, value} = await reader.read(); + if (done) { + break; + } + yield value; + } + } finally { + await reader.cancel(); + } +}; + +const trackStream = (stream, chunkSize, onProgress, onFinish) => { + const iterator = readBytes(stream, chunkSize); + + let bytes = 0; + let done; + let _onFinish = (e) => { + if (!done) { + done = true; + onFinish && onFinish(e); + } + }; + + return new ReadableStream({ + async pull(controller) { + try { + const {done, value} = await iterator.next(); + + if (done) { + _onFinish(); + controller.close(); + return; + } + + let len = value.byteLength; + if (onProgress) { + let loadedBytes = bytes += len; + onProgress(loadedBytes); + } + controller.enqueue(new Uint8Array(value)); + } catch (err) { + _onFinish(err); + throw err; + } + }, + cancel(reason) { + _onFinish(reason); + return iterator.return(); + } + }, { + highWaterMark: 2 + }) +}; + +const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function'; +const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function'; + +// used only inside the fetch adapter +const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ? + ((encoder) => (str) => encoder.encode(str))(new TextEncoder()) : + async (str) => new Uint8Array(await new Response(str).arrayBuffer()) +); + +const test = (fn, ...args) => { + try { + return !!fn(...args); + } catch (e) { + return false + } +}; + +const supportsRequestStream = isReadableStreamSupported && test(() => { + let duplexAccessed = false; + + const hasContentType = new Request(platform.origin, { + body: new ReadableStream(), + method: 'POST', + get duplex() { + duplexAccessed = true; + return 'half'; + }, + }).headers.has('Content-Type'); + + return duplexAccessed && !hasContentType; +}); + +const DEFAULT_CHUNK_SIZE = 64 * 1024; + +const supportsResponseStream = isReadableStreamSupported && + test(() => utils$1.isReadableStream(new Response('').body)); + + +const resolvers = { + stream: supportsResponseStream && ((res) => res.body) +}; + +isFetchSupported && (((res) => { + ['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => { + !resolvers[type] && (resolvers[type] = utils$1.isFunction(res[type]) ? (res) => res[type]() : + (_, config) => { + throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config); + }); + }); +})(new Response)); + +const getBodyLength = async (body) => { + if (body == null) { + return 0; + } + + if(utils$1.isBlob(body)) { + return body.size; + } + + if(utils$1.isSpecCompliantForm(body)) { + const _request = new Request(platform.origin, { + method: 'POST', + body, + }); + return (await _request.arrayBuffer()).byteLength; + } + + if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) { + return body.byteLength; + } + + if(utils$1.isURLSearchParams(body)) { + body = body + ''; + } + + if(utils$1.isString(body)) { + return (await encodeText(body)).byteLength; + } +}; + +const resolveBodyLength = async (headers, body) => { + const length = utils$1.toFiniteNumber(headers.getContentLength()); + + return length == null ? getBodyLength(body) : length; +}; + +const fetchAdapter = isFetchSupported && (async (config) => { + let { + url, + method, + data, + signal, + cancelToken, + timeout, + onDownloadProgress, + onUploadProgress, + responseType, + headers, + withCredentials = 'same-origin', + fetchOptions + } = resolveConfig(config); + + responseType = responseType ? (responseType + '').toLowerCase() : 'text'; + + let composedSignal = composeSignals$1([signal, cancelToken && cancelToken.toAbortSignal()], timeout); + + let request; + + const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => { + composedSignal.unsubscribe(); + }); + + let requestContentLength; + + try { + if ( + onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' && + (requestContentLength = await resolveBodyLength(headers, data)) !== 0 + ) { + let _request = new Request(url, { + method: 'POST', + body: data, + duplex: "half" + }); + + let contentTypeHeader; + + if (utils$1.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) { + headers.setContentType(contentTypeHeader); + } + + if (_request.body) { + const [onProgress, flush] = progressEventDecorator( + requestContentLength, + progressEventReducer(asyncDecorator(onUploadProgress)) + ); + + data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush); + } + } + + if (!utils$1.isString(withCredentials)) { + withCredentials = withCredentials ? 'include' : 'omit'; + } + + // Cloudflare Workers throws when credentials are defined + // see https://github.com/cloudflare/workerd/issues/902 + const isCredentialsSupported = "credentials" in Request.prototype; + request = new Request(url, { + ...fetchOptions, + signal: composedSignal, + method: method.toUpperCase(), + headers: headers.normalize().toJSON(), + body: data, + duplex: "half", + credentials: isCredentialsSupported ? withCredentials : undefined + }); + + let response = await fetch(request); + + const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response'); + + if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) { + const options = {}; + + ['status', 'statusText', 'headers'].forEach(prop => { + options[prop] = response[prop]; + }); + + const responseContentLength = utils$1.toFiniteNumber(response.headers.get('content-length')); + + const [onProgress, flush] = onDownloadProgress && progressEventDecorator( + responseContentLength, + progressEventReducer(asyncDecorator(onDownloadProgress), true) + ) || []; + + response = new Response( + trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => { + flush && flush(); + unsubscribe && unsubscribe(); + }), + options + ); + } + + responseType = responseType || 'text'; + + let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config); + + !isStreamResponse && unsubscribe && unsubscribe(); + + return await new Promise((resolve, reject) => { + settle(resolve, reject, { + data: responseData, + headers: AxiosHeaders$1.from(response.headers), + status: response.status, + statusText: response.statusText, + config, + request + }); + }) + } catch (err) { + unsubscribe && unsubscribe(); + + if (err && err.name === 'TypeError' && /fetch/i.test(err.message)) { + throw Object.assign( + new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request), + { + cause: err.cause || err + } + ) + } + + throw AxiosError.from(err, err && err.code, config, request); + } +}); + const knownAdapters = { http: httpAdapter, - xhr: xhrAdapter + xhr: xhrAdapter, + fetch: fetchAdapter }; utils$1.forEach(knownAdapters, (fn, value) => { @@ -32494,108 +33746,6 @@ function dispatchRequest(config) { }); } -const headersToObject = (thing) => thing instanceof AxiosHeaders$1 ? thing.toJSON() : thing; - -/** - * Config-specific merge-function which creates a new config-object - * by merging two configuration objects together. - * - * @param {Object} config1 - * @param {Object} config2 - * - * @returns {Object} New object resulting from merging config2 to config1 - */ -function mergeConfig(config1, config2) { - // eslint-disable-next-line no-param-reassign - config2 = config2 || {}; - const config = {}; - - function getMergedValue(target, source, caseless) { - if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) { - return utils$1.merge.call({caseless}, target, source); - } else if (utils$1.isPlainObject(source)) { - return utils$1.merge({}, source); - } else if (utils$1.isArray(source)) { - return source.slice(); - } - return source; - } - - // eslint-disable-next-line consistent-return - function mergeDeepProperties(a, b, caseless) { - if (!utils$1.isUndefined(b)) { - return getMergedValue(a, b, caseless); - } else if (!utils$1.isUndefined(a)) { - return getMergedValue(undefined, a, caseless); - } - } - - // eslint-disable-next-line consistent-return - function valueFromConfig2(a, b) { - if (!utils$1.isUndefined(b)) { - return getMergedValue(undefined, b); - } - } - - // eslint-disable-next-line consistent-return - function defaultToConfig2(a, b) { - if (!utils$1.isUndefined(b)) { - return getMergedValue(undefined, b); - } else if (!utils$1.isUndefined(a)) { - return getMergedValue(undefined, a); - } - } - - // eslint-disable-next-line consistent-return - function mergeDirectKeys(a, b, prop) { - if (prop in config2) { - return getMergedValue(a, b); - } else if (prop in config1) { - return getMergedValue(undefined, a); - } - } - - const mergeMap = { - url: valueFromConfig2, - method: valueFromConfig2, - data: valueFromConfig2, - baseURL: defaultToConfig2, - transformRequest: defaultToConfig2, - transformResponse: defaultToConfig2, - paramsSerializer: defaultToConfig2, - timeout: defaultToConfig2, - timeoutMessage: defaultToConfig2, - withCredentials: defaultToConfig2, - withXSRFToken: defaultToConfig2, - adapter: defaultToConfig2, - responseType: defaultToConfig2, - xsrfCookieName: defaultToConfig2, - xsrfHeaderName: defaultToConfig2, - onUploadProgress: defaultToConfig2, - onDownloadProgress: defaultToConfig2, - decompress: defaultToConfig2, - maxContentLength: defaultToConfig2, - maxBodyLength: defaultToConfig2, - beforeRedirect: defaultToConfig2, - transport: defaultToConfig2, - httpAgent: defaultToConfig2, - httpsAgent: defaultToConfig2, - cancelToken: defaultToConfig2, - socketPath: defaultToConfig2, - responseEncoding: defaultToConfig2, - validateStatus: mergeDirectKeys, - headers: (a, b) => mergeDeepProperties(headersToObject(a), headersToObject(b), true) - }; - - utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) { - const merge = mergeMap[prop] || mergeDeepProperties; - const configValue = merge(config1[prop], config2[prop], prop); - (utils$1.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue); - }); - - return config; -} - const validators$1 = {}; // eslint-disable-next-line func-names @@ -32709,7 +33859,34 @@ class Axios { * * @returns {Promise} The Promise to be fulfilled */ - request(configOrUrl, config) { + async request(configOrUrl, config) { + try { + return await this._request(configOrUrl, config); + } catch (err) { + if (err instanceof Error) { + let dummy; + + Error.captureStackTrace ? Error.captureStackTrace(dummy = {}) : (dummy = new Error()); + + // slice off the Error: ... line + const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : ''; + try { + if (!err.stack) { + err.stack = stack; + // match without the 2 top stack lines + } else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) { + err.stack += '\n' + stack; + } + } catch (e) { + // ignore the case where "stack" is an un-writable property + } + } + + throw err; + } + } + + _request(configOrUrl, config) { /*eslint no-param-reassign:0*/ // Allow for axios('example/url'[, config]) a la fetch API if (typeof configOrUrl === 'string') { @@ -32974,6 +34151,20 @@ class CancelToken { } } + toAbortSignal() { + const controller = new AbortController(); + + const abort = (err) => { + controller.abort(err); + }; + + this.subscribe(abort); + + controller.signal.unsubscribe = () => this.unsubscribe(abort); + + return controller.signal; + } + /** * Returns an object that contains a new `CancelToken` and a function that, when called, * cancels the `CancelToken`. @@ -33256,6 +34447,9 @@ if (!!core.getInput('bearerToken')) { /** @type {axios.AxiosRequestConfig} */ const instanceConfig = { + httpsAgent: new https.Agent({ + rejectUnauthorized: core.getInput('ignoreSsl') !== 'true', + }), baseURL: core.getInput('url', { required: true }), timeout: parseInt(core.getInput('timeout') || 5000, 10), headers: { ...headers, ...customHeaders } @@ -33285,7 +34479,7 @@ if (!!core.getInput('retry')) { let retryWait = 3000 if (!!core.getInput('retryWait')) { - retry = parseInt(core.getInput('retryWait')) + retryWait = parseInt(core.getInput('retryWait')) } const data = core.getInput('data') || '{}'; @@ -33330,7 +34524,6 @@ request({ data, method, instanceConfig, files, file, actions, options }).then(re handler.forEach(h => h(response)) } }) - })(); module.exports = __webpack_exports__; diff --git a/package-lock.json b/package-lock.json index d93cf78..5608fb7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,19 +1,19 @@ { "name": "http-request-action", - "version": "1.14.1", + "version": "1.14.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "http-request-action", - "version": "1.14.1", + "version": "1.14.2", "license": "MIT", "dependencies": { "@actions/core": "^1.10.1" }, "devDependencies": { "@vercel/ncc": "^0.38.1", - "axios": "^1.6", + "axios": "^1.7", "form-data": "^4.0.0", "yargs": "^17.7.2" }, @@ -25,24 +25,27 @@ "version": "1.10.1", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "license": "MIT", "dependencies": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" } }, "node_modules/@actions/http-client": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.0.tgz", - "integrity": "sha512-q+epW0trjVUUHboliPb4UF9g2msf+w61b32tAkFEwL/IwP0DQWgbCMM0Hbe3e3WXSKz5VcUXbzJQgy8Hkra/Lg==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "license": "MIT", "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "node_modules/@fastify/busboy": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.0.tgz", - "integrity": "sha512-+KpH+QxZU7O4675t3mnkQKcZZg56u+K/Ct2K+N2AZYNVK8kyeo/bI18tI8aPm3tvNNRyTWfj6s5tnGNlcbQRsA==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "license": "MIT", "engines": { "node": ">=14" } @@ -52,6 +55,7 @@ "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.1.tgz", "integrity": "sha512-IBBb+iI2NLu4VQn3Vwldyi2QwaXt5+hTyh58ggAMoCGE6DJmPvwL3KPBWcJl1m9LYPChBLE980Jw+CS4Wokqxw==", "dev": true, + "license": "MIT", "bin": { "ncc": "dist/ncc/cli.js" } @@ -61,6 +65,7 @@ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -70,6 +75,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, + "license": "MIT", "dependencies": { "color-convert": "^2.0.1" }, @@ -84,15 +90,17 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/axios": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", - "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", "dev": true, + "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -102,6 +110,7 @@ "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", "dev": true, + "license": "ISC", "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", @@ -116,6 +125,7 @@ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, + "license": "MIT", "dependencies": { "color-name": "~1.1.4" }, @@ -127,13 +137,15 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", "dev": true, + "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" }, @@ -146,6 +158,7 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.4.0" } @@ -154,21 +167,23 @@ "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", "dev": true, "funding": [ { @@ -176,6 +191,7 @@ "url": "https://github.com/sponsors/RubenVerborgh" } ], + "license": "MIT", "engines": { "node": ">=4.0" }, @@ -190,6 +206,7 @@ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", "dev": true, + "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -204,6 +221,7 @@ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true, + "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" } @@ -213,6 +231,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } @@ -222,6 +241,7 @@ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -231,6 +251,7 @@ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "dev": true, + "license": "MIT", "dependencies": { "mime-db": "1.52.0" }, @@ -242,13 +263,15 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -258,6 +281,7 @@ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, + "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -272,6 +296,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" }, @@ -283,14 +308,16 @@ "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "license": "MIT", "engines": { "node": ">=0.6.11 <=0.7.0 || >=0.7.3" } }, "node_modules/undici": { - "version": "5.27.2", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.27.2.tgz", - "integrity": "sha512-iS857PdOEy/y3wlM3yRp+6SNQQ6xU0mmZcwRSriqk+et/cwWAtwmIGf6WkoDN2EK/AMdCO/dfXzIwi+rFMrjjQ==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "license": "MIT", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -302,6 +329,7 @@ "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", "bin": { "uuid": "dist/bin/uuid" } @@ -311,6 +339,7 @@ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", @@ -328,6 +357,7 @@ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", "dev": true, + "license": "ISC", "engines": { "node": ">=10" } @@ -337,6 +367,7 @@ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, + "license": "MIT", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -355,6 +386,7 @@ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, + "license": "ISC", "engines": { "node": ">=12" } diff --git a/package.json b/package.json index 1376794..7b9a36b 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "homepage": "https://github.com/fjogeleit/http-request-action#readme", "devDependencies": { "@vercel/ncc": "^0.38.1", - "axios": "^1.6", + "axios": "^1.7", "form-data": "^4.0.0", "yargs": "^17.7.2" }, diff --git a/src/handler/mask.js b/src/handler/mask.js index 5043de6..c619c01 100644 --- a/src/handler/mask.js +++ b/src/handler/mask.js @@ -9,13 +9,7 @@ const { GithubActions } = require('../githubActions'); * @returns {(response: axios.AxiosResponse) => void} */ const createMaskHandler = (actions) => (response) => { - let data = response.data - - if (typeof data == 'object') { - data = JSON.stringify(data) - } - - actions.setSecret(data) + actions.setSecret(JSON.stringify(response.data)) } module.exports = { createMaskHandler } \ No newline at end of file diff --git a/src/handler/output.js b/src/handler/output.js index 8212672..253c09c 100644 --- a/src/handler/output.js +++ b/src/handler/output.js @@ -9,8 +9,9 @@ const { GithubActions } = require('../githubActions'); * @returns {(response: axios.AxiosResponse) => void} */ const createOutputHandler = (actions) => (response) => { - actions.setOutput('response', response.data) + actions.setOutput('response', JSON.stringify(response.data)) actions.setOutput('headers', response.headers) + actions.setOutput('status', response.status) } -module.exports = { createOutputHandler } \ No newline at end of file +module.exports = { createOutputHandler } diff --git a/src/handler/persist.js b/src/handler/persist.js index 58ffb38..043c66c 100644 --- a/src/handler/persist.js +++ b/src/handler/persist.js @@ -17,8 +17,8 @@ const createPersistHandler = (filePath, actions) => (response) => { data = JSON.stringify(data) } - fs.writeFile(filePath, data, err => { - if (!err) { + fs.writeFile(filePath, data, error => { + if (!error) { actions.info(`response persisted successfully at ${filePath}`) return } diff --git a/src/helper.js b/src/helper.js index 5144de2..da3207c 100644 --- a/src/helper.js +++ b/src/helper.js @@ -55,8 +55,8 @@ const retry = async (callback, options) => { lastErr = err; } - if (i < options.retries) { - options.actions.warning(`#${i + 1} request failed: ${err}`); + if (i < options.retry) { + options.actions.warning(`#${i + 1} request failed: ${lastErr}`); await sleep(options.sleep); } diff --git a/src/httpClient.js b/src/httpClient.js index a384689..3f397dc 100644 --- a/src/httpClient.js +++ b/src/httpClient.js @@ -94,10 +94,10 @@ const request = async({ method, instanceConfig, data, files, file, actions, opti } catch(error) { if (error.response && options.ignoredCodes.includes(error.response.status)) { actions.warning(`ignored status code: ${JSON.stringify({ code: error.response.status, message: error.response.data })}`) - - return null + + return error.response } - + if (!error.response && error.request && options.preventFailureOnNoResponse) { actions.warning(`no response received: ${JSON.stringify(error)}`); @@ -194,9 +194,9 @@ const updateConfigForFile = (instanceConfig, filePath, actions) => { * @returns {Promise} */ const contentLength = (formData) => new Promise((resolve, reject) => { - formData.getLength((err, length) => { - if (err) { - reject (err) + formData.getLength((error, length) => { + if (error) { + reject(error) return } diff --git a/src/index.js b/src/index.js index fce7bd4..6d246dc 100644 --- a/src/index.js +++ b/src/index.js @@ -29,6 +29,9 @@ if (!!core.getInput('bearerToken')) { /** @type {axios.AxiosRequestConfig} */ const instanceConfig = { + httpsAgent: new https.Agent({ + rejectUnauthorized: core.getInput('ignoreSsl') !== 'true', + }), baseURL: core.getInput('url', { required: true }), timeout: parseInt(core.getInput('timeout') || 5000, 10), headers: { ...headers, ...customHeaders } @@ -58,7 +61,7 @@ if (!!core.getInput('retry')) { let retryWait = 3000 if (!!core.getInput('retryWait')) { - retry = parseInt(core.getInput('retryWait')) + retryWait = parseInt(core.getInput('retryWait')) } const data = core.getInput('data') || '{}'; @@ -102,4 +105,4 @@ request({ data, method, instanceConfig, files, file, actions, options }).then(re if (response && typeof response == 'object') { handler.forEach(h => h(response)) } -}) +}) \ No newline at end of file