From 73a0b9c9543c5bbf0637ed4d2c7798a21949ac8a Mon Sep 17 00:00:00 2001 From: Rob Herley Date: Tue, 6 Aug 2024 10:23:43 -0400 Subject: [PATCH 1/2] revert back to @actions/artifact 2.1.8 --- .licenses/npm/@actions/artifact.dep.yml | Bin 1233 -> 1233 bytes dist/merge/index.js | 58 ++++++++++++------------ dist/upload/index.js | 58 ++++++++++++------------ package-lock.json | 14 +++--- package.json | 2 +- 5 files changed, 68 insertions(+), 64 deletions(-) diff --git a/.licenses/npm/@actions/artifact.dep.yml b/.licenses/npm/@actions/artifact.dep.yml index 0234e11f75857b510d9397c89c5737eceb50e6d1..8aabb38e3859de510d37f2dae8442fd3ab750ae1 100644 GIT binary patch delta 12 Tcmcb}d69F1HlxKxouw=Q9v1{J delta 12 Tcmcb}d69F1HlyW6ouw=Q9vlQP diff --git a/dist/merge/index.js b/dist/merge/index.js index f1c5238..c09bb45 100644 --- a/dist/merge/index.js +++ b/dist/merge/index.js @@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; const os_1 = __importDefault(__nccwpck_require__(22037)); // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage @@ -3050,10 +3050,6 @@ function getConcurrency() { return concurrency > 300 ? 300 : concurrency; } exports.getConcurrency = getConcurrency; -function getUploadChunkTimeout() { - return 30000; // 30 seconds -} -exports.getUploadChunkTimeout = getUploadChunkTimeout; //# sourceMappingURL=config.js.map /***/ }), @@ -3302,34 +3298,37 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter(this, void 0, void 0, function* () { let uploadByteCount = 0; let lastProgressTime = Date.now(); - const abortController = new AbortController(); - const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - const timer = setInterval(() => { - if (Date.now() - lastProgressTime > interval) { - reject(new Error('Upload progress stalled.')); - } - }, interval); - abortController.signal.addEventListener('abort', () => { - clearInterval(timer); - resolve(); - }); - }); - }); + let timeoutId; + const chunkTimer = (timeout) => { + // clear the previous timeout + if (timeoutId) { + clearTimeout(timeoutId); + } + timeoutId = setTimeout(() => { + const now = Date.now(); + // if there's been more than 30 seconds since the + // last progress event, then we'll consider the upload stalled + if (now - lastProgressTime > timeout) { + throw new Error('Upload progress stalled.'); + } + }, timeout); + return timeoutId; + }; const maxConcurrency = (0, config_1.getConcurrency)(); const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); + const timeoutDuration = 300000; // 30 seconds core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { core.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; + chunkTimer(timeoutDuration); lastProgressTime = Date.now(); }; const options = { blobHTTPHeaders: { blobContentType: 'zip' }, - onProgress: uploadCallback, - abortSignal: abortController.signal + onProgress: uploadCallback }; let sha256Hash = undefined; const uploadStream = new stream.PassThrough(); @@ -3338,10 +3337,9 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check core.info('Beginning upload of artifact content to blob storage'); try { - yield Promise.race([ - blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), - chunkTimer((0, config_1.getUploadChunkTimeout)()) - ]); + // Start the chunk timer + timeoutId = chunkTimer(timeoutDuration); + yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); } catch (error) { if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { @@ -3350,7 +3348,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { throw error; } finally { - abortController.abort(); + // clear the timeout whether or not the upload completes + if (timeoutId) { + clearTimeout(timeoutId); + } } core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); @@ -3777,6 +3778,7 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE const stream = __importStar(__nccwpck_require__(12781)); const archiver = __importStar(__nccwpck_require__(43084)); const core = __importStar(__nccwpck_require__(42186)); +const fs_1 = __nccwpck_require__(57147); const config_1 = __nccwpck_require__(74610); exports.DEFAULT_COMPRESSION_LEVEL = 6; // Custom stream transformer so we can set the highWaterMark property @@ -3808,7 +3810,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D for (const file of uploadSpecification) { if (file.sourcePath !== null) { // Add a normal file to the zip - zip.file(file.sourcePath, { + zip.append((0, fs_1.createReadStream)(file.sourcePath), { name: file.destinationPath }); } @@ -136150,7 +136152,7 @@ module.exports = index; /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); +module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); /***/ }), diff --git a/dist/upload/index.js b/dist/upload/index.js index 0a8827e..b28794f 100644 --- a/dist/upload/index.js +++ b/dist/upload/index.js @@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; +exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0; const os_1 = __importDefault(__nccwpck_require__(22037)); // Used for controlling the highWaterMark value of the zip that is being streamed // The same value is used as the chunk size that is use during upload to blob storage @@ -3050,10 +3050,6 @@ function getConcurrency() { return concurrency > 300 ? 300 : concurrency; } exports.getConcurrency = getConcurrency; -function getUploadChunkTimeout() { - return 30000; // 30 seconds -} -exports.getUploadChunkTimeout = getUploadChunkTimeout; //# sourceMappingURL=config.js.map /***/ }), @@ -3302,34 +3298,37 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter(this, void 0, void 0, function* () { let uploadByteCount = 0; let lastProgressTime = Date.now(); - const abortController = new AbortController(); - const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - const timer = setInterval(() => { - if (Date.now() - lastProgressTime > interval) { - reject(new Error('Upload progress stalled.')); - } - }, interval); - abortController.signal.addEventListener('abort', () => { - clearInterval(timer); - resolve(); - }); - }); - }); + let timeoutId; + const chunkTimer = (timeout) => { + // clear the previous timeout + if (timeoutId) { + clearTimeout(timeoutId); + } + timeoutId = setTimeout(() => { + const now = Date.now(); + // if there's been more than 30 seconds since the + // last progress event, then we'll consider the upload stalled + if (now - lastProgressTime > timeout) { + throw new Error('Upload progress stalled.'); + } + }, timeout); + return timeoutId; + }; const maxConcurrency = (0, config_1.getConcurrency)(); const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); + const timeoutDuration = 300000; // 30 seconds core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { core.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; + chunkTimer(timeoutDuration); lastProgressTime = Date.now(); }; const options = { blobHTTPHeaders: { blobContentType: 'zip' }, - onProgress: uploadCallback, - abortSignal: abortController.signal + onProgress: uploadCallback }; let sha256Hash = undefined; const uploadStream = new stream.PassThrough(); @@ -3338,10 +3337,9 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check core.info('Beginning upload of artifact content to blob storage'); try { - yield Promise.race([ - blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), - chunkTimer((0, config_1.getUploadChunkTimeout)()) - ]); + // Start the chunk timer + timeoutId = chunkTimer(timeoutDuration); + yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); } catch (error) { if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { @@ -3350,7 +3348,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { throw error; } finally { - abortController.abort(); + // clear the timeout whether or not the upload completes + if (timeoutId) { + clearTimeout(timeoutId); + } } core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); @@ -3777,6 +3778,7 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE const stream = __importStar(__nccwpck_require__(12781)); const archiver = __importStar(__nccwpck_require__(43084)); const core = __importStar(__nccwpck_require__(42186)); +const fs_1 = __nccwpck_require__(57147); const config_1 = __nccwpck_require__(74610); exports.DEFAULT_COMPRESSION_LEVEL = 6; // Custom stream transformer so we can set the highWaterMark property @@ -3808,7 +3810,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D for (const file of uploadSpecification) { if (file.sourcePath !== null) { // Add a normal file to the zip - zip.file(file.sourcePath, { + zip.append((0, fs_1.createReadStream)(file.sourcePath), { name: file.destinationPath }); } @@ -136160,7 +136162,7 @@ module.exports = index; /***/ ((module) => { "use strict"; -module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); +module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); /***/ }), diff --git a/package-lock.json b/package-lock.json index a6c4a56..3fad48e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "4.3.5", "license": "MIT", "dependencies": { - "@actions/artifact": "^2.1.9", + "@actions/artifact": "2.1.8", "@actions/core": "^1.10.1", "@actions/github": "^6.0.0", "@actions/glob": "^0.3.0", @@ -34,9 +34,9 @@ } }, "node_modules/@actions/artifact": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.9.tgz", - "integrity": "sha512-f9JXC9JrwramDRJHZiIOKJo3PGw/V3riYegLj5kHi8YEJ2k72TNUd1zDW1BG50ILnzJ0cp1faDVJ2pSdolRQfg==", + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz", + "integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==", "dependencies": { "@actions/core": "^1.10.0", "@actions/github": "^5.1.1", @@ -7902,9 +7902,9 @@ }, "dependencies": { "@actions/artifact": { - "version": "2.1.9", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.9.tgz", - "integrity": "sha512-f9JXC9JrwramDRJHZiIOKJo3PGw/V3riYegLj5kHi8YEJ2k72TNUd1zDW1BG50ILnzJ0cp1faDVJ2pSdolRQfg==", + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz", + "integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==", "requires": { "@actions/core": "^1.10.0", "@actions/github": "^5.1.1", diff --git a/package.json b/package.json index 630afd1..ac158bc 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ }, "homepage": "https://github.com/actions/upload-artifact#readme", "dependencies": { - "@actions/artifact": "^2.1.9", + "@actions/artifact": "2.1.8", "@actions/core": "^1.10.1", "@actions/github": "^6.0.0", "@actions/glob": "^0.3.0", From 134dcf33c0b9454c4b17a936843d7e21dccdc335 Mon Sep 17 00:00:00 2001 From: Rob Herley Date: Tue, 6 Aug 2024 10:24:34 -0400 Subject: [PATCH 2/2] v4.3.6 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 3fad48e..296b9f1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "upload-artifact", - "version": "4.3.5", + "version": "4.3.6", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "upload-artifact", - "version": "4.3.5", + "version": "4.3.6", "license": "MIT", "dependencies": { "@actions/artifact": "2.1.8", diff --git a/package.json b/package.json index ac158bc..8f51092 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "upload-artifact", - "version": "4.3.5", + "version": "4.3.6", "description": "Upload an Actions Artifact in a workflow run", "main": "dist/upload/index.js", "scripts": {