Skip to content

Commit 89ef406

Browse files
authored
Merge pull request #588 from actions/robherley/4.3.5
Bump @actions/artifact to v2.1.9
2 parents 0b2256b + 23d796d commit 89ef406

File tree

5 files changed

+68
-72
lines changed

5 files changed

+68
-72
lines changed

.licenses/npm/@actions/artifact.dep.yml

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dist/merge/index.js

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
29972997
return (mod && mod.__esModule) ? mod : { "default": mod };
29982998
};
29992999
Object.defineProperty(exports, "__esModule", ({ value: true }));
3000-
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
3000+
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
30013001
const os_1 = __importDefault(__nccwpck_require__(22037));
30023002
// Used for controlling the highWaterMark value of the zip that is being streamed
30033003
// The same value is used as the chunk size that is use during upload to blob storage
@@ -3050,6 +3050,10 @@ function getConcurrency() {
30503050
return concurrency > 300 ? 300 : concurrency;
30513051
}
30523052
exports.getConcurrency = getConcurrency;
3053+
function getUploadChunkTimeout() {
3054+
return 30000; // 30 seconds
3055+
}
3056+
exports.getUploadChunkTimeout = getUploadChunkTimeout;
30533057
//# sourceMappingURL=config.js.map
30543058

30553059
/***/ }),
@@ -3298,37 +3302,34 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
32983302
return __awaiter(this, void 0, void 0, function* () {
32993303
let uploadByteCount = 0;
33003304
let lastProgressTime = Date.now();
3301-
let timeoutId;
3302-
const chunkTimer = (timeout) => {
3303-
// clear the previous timeout
3304-
if (timeoutId) {
3305-
clearTimeout(timeoutId);
3306-
}
3307-
timeoutId = setTimeout(() => {
3308-
const now = Date.now();
3309-
// if there's been more than 30 seconds since the
3310-
// last progress event, then we'll consider the upload stalled
3311-
if (now - lastProgressTime > timeout) {
3312-
throw new Error('Upload progress stalled.');
3313-
}
3314-
}, timeout);
3315-
return timeoutId;
3316-
};
3305+
const abortController = new AbortController();
3306+
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
3307+
return new Promise((resolve, reject) => {
3308+
const timer = setInterval(() => {
3309+
if (Date.now() - lastProgressTime > interval) {
3310+
reject(new Error('Upload progress stalled.'));
3311+
}
3312+
}, interval);
3313+
abortController.signal.addEventListener('abort', () => {
3314+
clearInterval(timer);
3315+
resolve();
3316+
});
3317+
});
3318+
});
33173319
const maxConcurrency = (0, config_1.getConcurrency)();
33183320
const bufferSize = (0, config_1.getUploadChunkSize)();
33193321
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
33203322
const blockBlobClient = blobClient.getBlockBlobClient();
3321-
const timeoutDuration = 300000; // 30 seconds
33223323
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
33233324
const uploadCallback = (progress) => {
33243325
core.info(`Uploaded bytes ${progress.loadedBytes}`);
33253326
uploadByteCount = progress.loadedBytes;
3326-
chunkTimer(timeoutDuration);
33273327
lastProgressTime = Date.now();
33283328
};
33293329
const options = {
33303330
blobHTTPHeaders: { blobContentType: 'zip' },
3331-
onProgress: uploadCallback
3331+
onProgress: uploadCallback,
3332+
abortSignal: abortController.signal
33323333
};
33333334
let sha256Hash = undefined;
33343335
const uploadStream = new stream.PassThrough();
@@ -3337,9 +3338,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
33373338
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
33383339
core.info('Beginning upload of artifact content to blob storage');
33393340
try {
3340-
// Start the chunk timer
3341-
timeoutId = chunkTimer(timeoutDuration);
3342-
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
3341+
yield Promise.race([
3342+
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
3343+
chunkTimer((0, config_1.getUploadChunkTimeout)())
3344+
]);
33433345
}
33443346
catch (error) {
33453347
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
@@ -3348,10 +3350,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
33483350
throw error;
33493351
}
33503352
finally {
3351-
// clear the timeout whether or not the upload completes
3352-
if (timeoutId) {
3353-
clearTimeout(timeoutId);
3354-
}
3353+
abortController.abort();
33553354
}
33563355
core.info('Finished uploading artifact content to blob storage!');
33573356
hashStream.end();
@@ -3778,7 +3777,6 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
37783777
const stream = __importStar(__nccwpck_require__(12781));
37793778
const archiver = __importStar(__nccwpck_require__(43084));
37803779
const core = __importStar(__nccwpck_require__(42186));
3781-
const fs_1 = __nccwpck_require__(57147);
37823780
const config_1 = __nccwpck_require__(74610);
37833781
exports.DEFAULT_COMPRESSION_LEVEL = 6;
37843782
// Custom stream transformer so we can set the highWaterMark property
@@ -3810,7 +3808,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
38103808
for (const file of uploadSpecification) {
38113809
if (file.sourcePath !== null) {
38123810
// Add a normal file to the zip
3813-
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
3811+
zip.file(file.sourcePath, {
38143812
name: file.destinationPath
38153813
});
38163814
}
@@ -136152,7 +136150,7 @@ module.exports = index;
136152136150
/***/ ((module) => {
136153136151

136154136152
"use strict";
136155-
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
136153+
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
136156136154

136157136155
/***/ }),
136158136156

dist/upload/index.js

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
29972997
return (mod && mod.__esModule) ? mod : { "default": mod };
29982998
};
29992999
Object.defineProperty(exports, "__esModule", ({ value: true }));
3000-
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
3000+
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
30013001
const os_1 = __importDefault(__nccwpck_require__(22037));
30023002
// Used for controlling the highWaterMark value of the zip that is being streamed
30033003
// The same value is used as the chunk size that is use during upload to blob storage
@@ -3050,6 +3050,10 @@ function getConcurrency() {
30503050
return concurrency > 300 ? 300 : concurrency;
30513051
}
30523052
exports.getConcurrency = getConcurrency;
3053+
function getUploadChunkTimeout() {
3054+
return 30000; // 30 seconds
3055+
}
3056+
exports.getUploadChunkTimeout = getUploadChunkTimeout;
30533057
//# sourceMappingURL=config.js.map
30543058

30553059
/***/ }),
@@ -3298,37 +3302,34 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
32983302
return __awaiter(this, void 0, void 0, function* () {
32993303
let uploadByteCount = 0;
33003304
let lastProgressTime = Date.now();
3301-
let timeoutId;
3302-
const chunkTimer = (timeout) => {
3303-
// clear the previous timeout
3304-
if (timeoutId) {
3305-
clearTimeout(timeoutId);
3306-
}
3307-
timeoutId = setTimeout(() => {
3308-
const now = Date.now();
3309-
// if there's been more than 30 seconds since the
3310-
// last progress event, then we'll consider the upload stalled
3311-
if (now - lastProgressTime > timeout) {
3312-
throw new Error('Upload progress stalled.');
3313-
}
3314-
}, timeout);
3315-
return timeoutId;
3316-
};
3305+
const abortController = new AbortController();
3306+
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
3307+
return new Promise((resolve, reject) => {
3308+
const timer = setInterval(() => {
3309+
if (Date.now() - lastProgressTime > interval) {
3310+
reject(new Error('Upload progress stalled.'));
3311+
}
3312+
}, interval);
3313+
abortController.signal.addEventListener('abort', () => {
3314+
clearInterval(timer);
3315+
resolve();
3316+
});
3317+
});
3318+
});
33173319
const maxConcurrency = (0, config_1.getConcurrency)();
33183320
const bufferSize = (0, config_1.getUploadChunkSize)();
33193321
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
33203322
const blockBlobClient = blobClient.getBlockBlobClient();
3321-
const timeoutDuration = 300000; // 30 seconds
33223323
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
33233324
const uploadCallback = (progress) => {
33243325
core.info(`Uploaded bytes ${progress.loadedBytes}`);
33253326
uploadByteCount = progress.loadedBytes;
3326-
chunkTimer(timeoutDuration);
33273327
lastProgressTime = Date.now();
33283328
};
33293329
const options = {
33303330
blobHTTPHeaders: { blobContentType: 'zip' },
3331-
onProgress: uploadCallback
3331+
onProgress: uploadCallback,
3332+
abortSignal: abortController.signal
33323333
};
33333334
let sha256Hash = undefined;
33343335
const uploadStream = new stream.PassThrough();
@@ -3337,9 +3338,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
33373338
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
33383339
core.info('Beginning upload of artifact content to blob storage');
33393340
try {
3340-
// Start the chunk timer
3341-
timeoutId = chunkTimer(timeoutDuration);
3342-
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
3341+
yield Promise.race([
3342+
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
3343+
chunkTimer((0, config_1.getUploadChunkTimeout)())
3344+
]);
33433345
}
33443346
catch (error) {
33453347
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
@@ -3348,10 +3350,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
33483350
throw error;
33493351
}
33503352
finally {
3351-
// clear the timeout whether or not the upload completes
3352-
if (timeoutId) {
3353-
clearTimeout(timeoutId);
3354-
}
3353+
abortController.abort();
33553354
}
33563355
core.info('Finished uploading artifact content to blob storage!');
33573356
hashStream.end();
@@ -3778,7 +3777,6 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
37783777
const stream = __importStar(__nccwpck_require__(12781));
37793778
const archiver = __importStar(__nccwpck_require__(43084));
37803779
const core = __importStar(__nccwpck_require__(42186));
3781-
const fs_1 = __nccwpck_require__(57147);
37823780
const config_1 = __nccwpck_require__(74610);
37833781
exports.DEFAULT_COMPRESSION_LEVEL = 6;
37843782
// Custom stream transformer so we can set the highWaterMark property
@@ -3810,7 +3808,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
38103808
for (const file of uploadSpecification) {
38113809
if (file.sourcePath !== null) {
38123810
// Add a normal file to the zip
3813-
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
3811+
zip.file(file.sourcePath, {
38143812
name: file.destinationPath
38153813
});
38163814
}
@@ -136162,7 +136160,7 @@ module.exports = index;
136162136160
/***/ ((module) => {
136163136161

136164136162
"use strict";
136165-
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
136163+
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
136166136164

136167136165
/***/ }),
136168136166

package-lock.json

Lines changed: 9 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy