1
0
mirror of https://github.com/actions/download-artifact.git synced 2025-12-06 17:57:49 +01:00

Compare commits

..

4 Commits

Author SHA1 Message Date
Rob Herley
18323e2bed ncc 2023-10-26 23:16:14 -04:00
Konrad Pabjan
59c8579f60 Bump to node20 runtime 2023-10-26 22:34:45 -04:00
Konrad Pabjan
ac384941b4 Fix for all downloads 2023-10-26 22:29:16 -04:00
Konrad Pabjan
54ed8ca4ec Fix extra root extra root directory if downloading single artifact on v4-beta 2023-10-26 21:37:41 -04:00
3 changed files with 29 additions and 36 deletions

View File

@@ -25,5 +25,5 @@ outputs:
download-path:
description: 'Path of artifact download'
runs:
using: 'node16'
using: 'node20'
main: 'dist/index.js'

54
dist/index.js vendored
View File

@@ -7719,8 +7719,8 @@ class ArtifactHttpClient {
// JSON generated client.
request(service, method, contentType, data) {
return __awaiter(this, void 0, void 0, function* () {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
(0, core_1.debug)(`Requesting: ${url}`);
const url = `${this.baseUrl}/twirp/${service}/${method}`;
(0, core_1.debug)(`Requesting ${url}`);
const headers = {
'Content-Type': contentType
};
@@ -7813,16 +7813,12 @@ exports.createArtifactTwirpClient = createArtifactTwirpClient;
/***/ }),
/***/ 95042:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
/***/ ((__unused_webpack_module, exports) => {
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
function getUploadChunkSize() {
@@ -7842,7 +7838,7 @@ function getResultsServiceUrl() {
if (!resultsUrl) {
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
}
return new URL(resultsUrl).origin;
return resultsUrl;
}
exports.getResultsServiceUrl = getResultsServiceUrl;
function isGhes() {
@@ -7858,18 +7854,6 @@ function getGitHubWorkspaceDir() {
return ghWorkspaceDir;
}
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
function getConcurrency() {
const numCPUs = os_1.default.cpus().length;
if (numCPUs <= 4) {
return 32;
}
const concurrency = 16 * numCPUs;
return concurrency > 300 ? 300 : concurrency;
}
exports.getConcurrency = getConcurrency;
//# sourceMappingURL=config.js.map
/***/ }),
@@ -8012,11 +7996,11 @@ const stream = __importStar(__nccwpck_require__(12781));
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return __awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0;
const maxConcurrency = (0, config_1.getConcurrency)();
const maxBuffers = 5;
const bufferSize = (0, config_1.getUploadChunkSize)();
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
core.debug(`Uploading artifact zip to blob storage with maxBuffers: ${maxBuffers}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => {
core.info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes;
@@ -8032,7 +8016,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
try {
core.info('Beginning upload of artifact content to blob storage');
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxBuffers, options);
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
@@ -8256,7 +8240,7 @@ function uploadArtifact(name, files, rootDirectory, options) {
success: false
};
}
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification);
// get the IDs needed for the artifact creation
const backendIds = (0, util_1.getBackendIdsFromToken)();
if (!backendIds.workflowRunBackendId || !backendIds.workflowJobRunBackendId) {
@@ -8487,13 +8471,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0;
exports.createZipUploadStream = exports.ZipUploadStream = void 0;
const stream = __importStar(__nccwpck_require__(12781));
const archiver = __importStar(__nccwpck_require__(71160));
const core = __importStar(__nccwpck_require__(66526));
const fs_1 = __nccwpck_require__(57147);
const config_1 = __nccwpck_require__(95042);
exports.DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
// See https://github.com/nodejs/node/issues/8855
class ZipUploadStream extends stream.Transform {
@@ -8508,12 +8491,14 @@ class ZipUploadStream extends stream.Transform {
}
}
exports.ZipUploadStream = ZipUploadStream;
function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) {
function createZipUploadStream(uploadSpecification) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
const zip = archiver.create('zip', {
highWaterMark: (0, config_1.getUploadChunkSize)(),
zlib: { level: compressionLevel }
zlib: { level: 9 } // Sets the compression level.
// Available options are 0-9
// 0 => no compression
// 1 => fastest with low compression
// 9 => highest compression ratio but the slowest
});
// register callbacks for various events during the zip lifecycle
zip.on('error', zipErrorCallback);
@@ -118721,6 +118706,7 @@ function run() {
if (inputs.path.startsWith(`~`)) {
inputs.path = inputs.path.replace('~', os.homedir());
}
const isSingleArtifactDownload = !!inputs.name;
const resolvedPath = path.resolve(inputs.path);
core.debug(`Resolved path is ${resolvedPath}`);
const [owner, repo] = inputs.repository.split('/');
@@ -118729,7 +118715,8 @@ function run() {
}
const artifactClient = artifact.create();
let artifacts = [];
if (inputs.name) {
if (isSingleArtifactDownload) {
core.info(`Downloading single artifact`);
const { artifact: targetArtifact } = yield artifactClient.getArtifact(inputs.name, inputs.runID, owner, repo, inputs.token);
if (!targetArtifact) {
throw new Error(`Artifact '${inputs.name}' not found`);
@@ -118738,6 +118725,7 @@ function run() {
artifacts = [targetArtifact];
}
else {
core.info(`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`);
const listArtifactResponse = yield artifactClient.listArtifacts(inputs.runID, owner, repo, inputs.token);
if (listArtifactResponse.artifacts.length === 0) {
throw new Error(`No artifacts found for run '${inputs.runID}' in '${inputs.repository}'`);
@@ -118746,7 +118734,7 @@ function run() {
artifacts = listArtifactResponse.artifacts;
}
const downloadPromises = artifacts.map(artifact => artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
path: path.join(resolvedPath, artifact.name)
path: isSingleArtifactDownload ? resolvedPath : path.join(resolvedPath, artifact.name)
}));
const chunkedPromises = exports.chunk(downloadPromises, PARALLEL_DOWNLOADS);
for (const chunk of chunkedPromises) {

View File

@@ -30,6 +30,7 @@ async function run(): Promise<void> {
inputs.path = inputs.path.replace('~', os.homedir())
}
const isSingleArtifactDownload: boolean = !!inputs.name
const resolvedPath = path.resolve(inputs.path)
core.debug(`Resolved path is ${resolvedPath}`)
@@ -43,7 +44,9 @@ async function run(): Promise<void> {
const artifactClient = artifact.create()
let artifacts: artifact.Artifact[] = []
if (inputs.name) {
if (isSingleArtifactDownload) {
core.info(`Downloading single artifact`)
const {artifact: targetArtifact} = await artifactClient.getArtifact(
inputs.name,
inputs.runID,
@@ -62,6 +65,8 @@ async function run(): Promise<void> {
artifacts = [targetArtifact]
} else {
core.info(`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`)
const listArtifactResponse = await artifactClient.listArtifacts(
inputs.runID,
owner,
@@ -81,7 +86,7 @@ async function run(): Promise<void> {
const downloadPromises = artifacts.map(artifact =>
artifactClient.downloadArtifact(artifact.id, owner, repo, inputs.token, {
path: path.join(resolvedPath, artifact.name)
path: isSingleArtifactDownload ? resolvedPath : path.join(resolvedPath, artifact.name)
})
)