mirror of
https://git.mirrors.martin98.com/https://github.com/actions/toolkit
synced 2026-05-04 18:08:05 +08:00
Updates to @actions/artifact package (#367)
* GZip implementation * Optimizations and cleanup * Update tests * More test updates * Update packages/artifact/src/internal-utils.ts Co-Authored-By: Josh Gross <joshmgross@github.com> * Clarification around Upload Paths * Refactor to make http clients classes * GZip fixes * Documentation around compression * More detailed status information during large uploads * Pretty format * Percentage updates without rounding * Fix edge cases with formatting numbers * Update packages/artifact/src/internal-utils.ts Co-Authored-By: Josh Gross <joshmgross@github.com> * Cleanup * Small reorg with status reporter * PR Feedback * Cleanup + Simplification * Test Cleanup * Mock updates * More cleanup * Format fixes * Overhaul to the http-manager * Fix tests * Promisify stats * Documentation around implementation * Improvements to documentation * PR Feedback * Remove Downloading multiple artifacts concurrently Co-authored-by: Josh Gross <joshmgross@github.com>
This commit is contained in:
@@ -1,5 +1,16 @@
|
||||
import {ArtifactClient, DefaultArtifactClient} from './internal-artifact-client'
|
||||
export {ArtifactClient}
|
||||
import {UploadOptions} from './internal/upload-options'
|
||||
import {UploadResponse} from './internal/upload-response'
|
||||
import {DownloadOptions} from './internal/download-options'
|
||||
import {DownloadResponse} from './internal/download-response'
|
||||
import {ArtifactClient, DefaultArtifactClient} from './internal/artifact-client'
|
||||
|
||||
export {
|
||||
ArtifactClient,
|
||||
UploadResponse,
|
||||
UploadOptions,
|
||||
DownloadResponse,
|
||||
DownloadOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ArtifactClient
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
import * as fs from 'fs'
|
||||
import {
|
||||
createHttpClient,
|
||||
getArtifactUrl,
|
||||
getRequestOptions,
|
||||
isSuccessStatusCode,
|
||||
isRetryableStatusCode
|
||||
} from './internal-utils'
|
||||
import {URL} from 'url'
|
||||
import {
|
||||
ListArtifactsResponse,
|
||||
QueryArtifactResponse
|
||||
} from './internal-contracts'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import {DownloadItem} from './internal-download-specification'
|
||||
import {getDownloadFileConcurrency} from './internal-config-variables'
|
||||
import {warning} from '@actions/core'
|
||||
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
export async function listArtifacts(): Promise<ListArtifactsResponse> {
|
||||
const artifactUrl = getArtifactUrl()
|
||||
const client = createHttpClient()
|
||||
const requestOptions = getRequestOptions('application/json')
|
||||
|
||||
const rawResponse = await client.get(artifactUrl, requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(`Unable to list artifacts for the run`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
export async function getContainerItems(
|
||||
artifactName: string,
|
||||
containerUrl: string
|
||||
): Promise<QueryArtifactResponse> {
|
||||
// The itemPath search parameter controls which containers will be returned
|
||||
const resourceUrl = new URL(containerUrl)
|
||||
resourceUrl.searchParams.append('itemPath', artifactName)
|
||||
|
||||
const client = createHttpClient()
|
||||
const rawResponse = await client.get(resourceUrl.toString())
|
||||
const body: string = await rawResponse.readBody()
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
export async function downloadSingleArtifact(
|
||||
downloadItems: DownloadItem[]
|
||||
): Promise<void> {
|
||||
const DOWNLOAD_CONCURRENCY = getDownloadFileConcurrency()
|
||||
// Limit the number of files downloaded at a single time
|
||||
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]
|
||||
const client = createHttpClient()
|
||||
let downloadedFiles = 0
|
||||
await Promise.all(
|
||||
parallelDownloads.map(async () => {
|
||||
while (downloadedFiles < downloadItems.length) {
|
||||
const currentFileToDownload = downloadItems[downloadedFiles]
|
||||
downloadedFiles += 1
|
||||
await downloadIndividualFile(
|
||||
client,
|
||||
currentFileToDownload.sourceLocation,
|
||||
currentFileToDownload.targetPath
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param client http client that will be used to make the necessary calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
export async function downloadIndividualFile(
|
||||
client: HttpClient,
|
||||
artifactLocation: string,
|
||||
downloadPath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(downloadPath)
|
||||
const response = await client.get(artifactLocation)
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
await pipeResponseToStream(response, stream)
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
warning(
|
||||
`Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, 10000))
|
||||
const retryResponse = await client.get(artifactLocation)
|
||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||||
await pipeResponseToStream(response, stream)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(retryResponse)
|
||||
throw new Error(`Unable to download ${artifactLocation}`)
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(response)
|
||||
throw new Error(`Unable to download ${artifactLocation}`)
|
||||
}
|
||||
}
|
||||
|
||||
export async function pipeResponseToStream(
|
||||
response: IHttpClientResponse,
|
||||
stream: NodeJS.WritableStream
|
||||
): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
response.message.pipe(stream).on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,322 +0,0 @@
|
||||
import {debug, warning, info} from '@actions/core'
|
||||
import {HttpClientResponse, HttpClient} from '@actions/http-client/index'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {
|
||||
ArtifactResponse,
|
||||
CreateArtifactParameters,
|
||||
PatchArtifactSize,
|
||||
UploadResults
|
||||
} from './internal-contracts'
|
||||
import * as fs from 'fs'
|
||||
import {UploadSpecification} from './internal-upload-specification'
|
||||
import {UploadOptions} from './internal-upload-options'
|
||||
import {URL} from 'url'
|
||||
import {
|
||||
createHttpClient,
|
||||
getArtifactUrl,
|
||||
getContentRange,
|
||||
getRequestOptions,
|
||||
isRetryableStatusCode,
|
||||
isSuccessStatusCode
|
||||
} from './internal-utils'
|
||||
import {
|
||||
getUploadChunkConcurrency,
|
||||
getUploadChunkSize,
|
||||
getUploadFileConcurrency
|
||||
} from './internal-config-variables'
|
||||
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
export async function createArtifactInFileContainer(
|
||||
artifactName: string
|
||||
): Promise<ArtifactResponse> {
|
||||
const parameters: CreateArtifactParameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
const artifactUrl = getArtifactUrl()
|
||||
const client = createHttpClient()
|
||||
const requestOptions = getRequestOptions('application/json')
|
||||
|
||||
const rawResponse = await client.post(artifactUrl, data, requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(
|
||||
`Unable to create a container for the artifact ${artifactName}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently upload all of the files in chunks
|
||||
* @param {string} uploadUrl Base Url for the artifact that was created
|
||||
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||||
* @returns The size of all the files uploaded in bytes
|
||||
*/
|
||||
export async function uploadArtifactToFileContainer(
|
||||
uploadUrl: string,
|
||||
filesToUpload: UploadSpecification[],
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResults> {
|
||||
const client = createHttpClient()
|
||||
const FILE_CONCURRENCY = getUploadFileConcurrency()
|
||||
const CHUNK_CONCURRENCY = getUploadChunkConcurrency()
|
||||
const MAX_CHUNK_SIZE = getUploadChunkSize()
|
||||
debug(
|
||||
`File Concurrency: ${FILE_CONCURRENCY}, Chunk Concurrency: ${CHUNK_CONCURRENCY} and Chunk Size: ${MAX_CHUNK_SIZE}`
|
||||
)
|
||||
|
||||
const parameters: UploadFileParameters[] = []
|
||||
|
||||
// by default, file uploads will continue if there is an error unless specified differently in the options
|
||||
let continueOnError = true
|
||||
if (options) {
|
||||
if (options.continueOnError === false) {
|
||||
continueOnError = false
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare the necessary parameters to upload all the files
|
||||
for (const file of filesToUpload) {
|
||||
const resourceUrl = new URL(uploadUrl)
|
||||
resourceUrl.searchParams.append('itemPath', file.uploadFilePath)
|
||||
parameters.push({
|
||||
file: file.absoluteFilePath,
|
||||
resourceUrl: resourceUrl.toString(),
|
||||
restClient: client,
|
||||
concurrency: CHUNK_CONCURRENCY,
|
||||
maxChunkSize: MAX_CHUNK_SIZE,
|
||||
continueOnError
|
||||
})
|
||||
}
|
||||
|
||||
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]
|
||||
const failedItemsToReport: string[] = []
|
||||
let uploadedFiles = 0
|
||||
let fileSizes = 0
|
||||
let abortPendingFileUploads = false
|
||||
|
||||
// Only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
||||
await Promise.all(
|
||||
parallelUploads.map(async () => {
|
||||
while (uploadedFiles < filesToUpload.length) {
|
||||
const currentFileParameters = parameters[uploadedFiles]
|
||||
uploadedFiles += 1
|
||||
if (abortPendingFileUploads) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
continue
|
||||
}
|
||||
|
||||
const uploadFileResult = await uploadFileAsync(currentFileParameters)
|
||||
fileSizes += uploadFileResult.successfulUploadSize
|
||||
if (uploadFileResult.isSuccess === false) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
if (!continueOnError) {
|
||||
// Existing uploads will be able to finish however all pending uploads will fail fast
|
||||
abortPendingFileUploads = true
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
info(`Total size of all the files uploaded is ${fileSizes} bytes`)
|
||||
return {
|
||||
size: fileSizes,
|
||||
failedItems: failedItemsToReport
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously uploads a file. If the file is bigger than the max chunk size it will be uploaded via multiple calls
|
||||
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||||
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||||
*/
|
||||
async function uploadFileAsync(
|
||||
parameters: UploadFileParameters
|
||||
): Promise<UploadFileResult> {
|
||||
const fileSize: number = fs.statSync(parameters.file).size
|
||||
const parallelUploads = [...new Array(parameters.concurrency).keys()]
|
||||
let offset = 0
|
||||
let isUploadSuccessful = true
|
||||
let failedChunkSizes = 0
|
||||
let abortFileUpload = false
|
||||
|
||||
await Promise.all(
|
||||
parallelUploads.map(async () => {
|
||||
while (offset < fileSize) {
|
||||
const chunkSize = Math.min(fileSize - offset, parameters.maxChunkSize)
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue on error, any pending upload chunk will be marked as failed
|
||||
failedChunkSizes += chunkSize
|
||||
continue
|
||||
}
|
||||
|
||||
const start = offset
|
||||
const end = offset + chunkSize - 1
|
||||
offset += parameters.maxChunkSize
|
||||
const chunk: NodeJS.ReadableStream = fs.createReadStream(
|
||||
parameters.file,
|
||||
{
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}
|
||||
)
|
||||
|
||||
const result = await uploadChunk(
|
||||
parameters.restClient,
|
||||
parameters.resourceUrl,
|
||||
chunk,
|
||||
start,
|
||||
end,
|
||||
fileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
/**
|
||||
* Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
* successfully uploaded so the server may report a different size for what was uploaded
|
||||
**/
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += chunkSize
|
||||
warning(`Aborting upload for ${parameters.file} due to failure`)
|
||||
abortFileUpload = true
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfulUploadSize: fileSize - failedChunkSizes
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {HttpClient} restClient RestClient that will be making the appropriate HTTP call
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} totalSize Total size of the file in bytes that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
async function uploadChunk(
|
||||
restClient: HttpClient,
|
||||
resourceUrl: string,
|
||||
data: NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number,
|
||||
totalSize: number
|
||||
): Promise<boolean> {
|
||||
info(
|
||||
`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(
|
||||
start,
|
||||
end,
|
||||
totalSize
|
||||
)}`
|
||||
)
|
||||
|
||||
const requestOptions = getRequestOptions(
|
||||
'application/octet-stream',
|
||||
totalSize,
|
||||
getContentRange(start, end, totalSize)
|
||||
)
|
||||
|
||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||
return await restClient.sendStream('PUT', resourceUrl, data, requestOptions)
|
||||
}
|
||||
|
||||
const response = await uploadChunkRequest()
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
debug(
|
||||
`Chunk for ${start}:${end} was successfully uploaded to ${resourceUrl}`
|
||||
)
|
||||
return true
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
info(
|
||||
`Received http ${response.message.statusCode} during chunk upload, will retry at offset ${start} after 10 seconds.`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, 10000))
|
||||
const retryResponse = await uploadChunkRequest()
|
||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||||
return true
|
||||
} else {
|
||||
info(`Unable to upload chunk even after retrying`)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(response)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Upload must have failed spectacularly somehow, log full result for diagnostic purposes
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(response)
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||||
* Updating the size indicates that we are done uploading all the contents of the artifact. A server side check will be run
|
||||
* to check that the artifact size is correct for billing purposes
|
||||
*/
|
||||
export async function patchArtifactSize(
|
||||
size: number,
|
||||
artifactName: string
|
||||
): Promise<void> {
|
||||
const client = createHttpClient()
|
||||
const requestOptions = getRequestOptions('application/json')
|
||||
const resourceUrl = new URL(getArtifactUrl())
|
||||
resourceUrl.searchParams.append('artifactName', artifactName)
|
||||
|
||||
const parameters: PatchArtifactSize = {Size: size}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
debug(`URL is ${resourceUrl.toString()}`)
|
||||
|
||||
const rawResponse: HttpClientResponse = await client.patch(
|
||||
resourceUrl.toString(),
|
||||
data,
|
||||
requestOptions
|
||||
)
|
||||
const body: string = await rawResponse.readBody()
|
||||
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode)) {
|
||||
debug(
|
||||
`Artifact ${artifactName} has been successfully uploaded, total size ${size}`
|
||||
)
|
||||
debug(body)
|
||||
} else if (rawResponse.message.statusCode === 404) {
|
||||
throw new Error(`An Artifact with the name ${artifactName} was not found`)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(body)
|
||||
throw new Error(`Unable to finish uploading artifact ${artifactName}`)
|
||||
}
|
||||
}
|
||||
|
||||
interface UploadFileParameters {
|
||||
file: string
|
||||
resourceUrl: string
|
||||
restClient: HttpClient
|
||||
concurrency: number
|
||||
maxChunkSize: number
|
||||
continueOnError: boolean
|
||||
}
|
||||
|
||||
interface UploadFileResult {
|
||||
isSuccess: boolean
|
||||
successfulUploadSize: number
|
||||
}
|
||||
@@ -12,6 +12,19 @@ export function getUploadChunkConcurrency(): number {
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
export function getUploadRetryCount(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getRetryWaitTimeInMilliseconds(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks the 'ACTIONS_RUNTIME_TOKEN', 'ACTIONS_RUNTIME_URL' and 'GITHUB_RUN_ID' env variables
|
||||
* that are only available from a node context on the runner. This allows for tests to run
|
||||
@@ -2,31 +2,18 @@ import * as core from '@actions/core'
|
||||
import {
|
||||
UploadSpecification,
|
||||
getUploadSpecification
|
||||
} from './internal-upload-specification'
|
||||
import {
|
||||
createArtifactInFileContainer,
|
||||
uploadArtifactToFileContainer,
|
||||
patchArtifactSize
|
||||
} from './internal-upload-http-client'
|
||||
import {UploadResponse} from './internal-upload-response'
|
||||
import {UploadOptions} from './internal-upload-options'
|
||||
import {DownloadOptions} from './internal-download-options'
|
||||
import {DownloadResponse} from './internal-download-response'
|
||||
import {checkArtifactName, createDirectoriesForArtifact} from './internal-utils'
|
||||
import {
|
||||
listArtifacts,
|
||||
downloadSingleArtifact,
|
||||
getContainerItems
|
||||
} from './internal-download-http-client'
|
||||
import {getDownloadSpecification} from './internal-download-specification'
|
||||
import {
|
||||
getWorkSpaceDirectory,
|
||||
getDownloadArtifactConcurrency
|
||||
} from './internal-config-variables'
|
||||
} from './upload-specification'
|
||||
import {UploadHttpClient} from './upload-http-client'
|
||||
import {UploadResponse} from './upload-response'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {DownloadOptions} from './download-options'
|
||||
import {DownloadResponse} from './download-response'
|
||||
import {checkArtifactName, createDirectoriesForArtifact} from './utils'
|
||||
import {DownloadHttpClient} from './download-http-client'
|
||||
import {getDownloadSpecification} from './download-specification'
|
||||
import {getWorkSpaceDirectory} from './config-variables'
|
||||
import {normalize, resolve} from 'path'
|
||||
|
||||
export {UploadResponse, UploadOptions, DownloadResponse, DownloadOptions}
|
||||
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact
|
||||
@@ -96,11 +83,15 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
failedItems: []
|
||||
}
|
||||
|
||||
const uploadHttpClient = new UploadHttpClient()
|
||||
|
||||
if (uploadSpecification.length === 0) {
|
||||
core.warning(`No files found that can be uploaded`)
|
||||
} else {
|
||||
// Create an entry for the artifact in the file container
|
||||
const response = await createArtifactInFileContainer(name)
|
||||
const response = await uploadHttpClient.createArtifactInFileContainer(
|
||||
name
|
||||
)
|
||||
if (!response.fileContainerResourceUrl) {
|
||||
core.debug(response.toString())
|
||||
throw new Error(
|
||||
@@ -110,23 +101,24 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`)
|
||||
|
||||
// Upload each of the files that were found concurrently
|
||||
const uploadResult = await uploadArtifactToFileContainer(
|
||||
const uploadResult = await uploadHttpClient.uploadArtifactToFileContainer(
|
||||
response.fileContainerResourceUrl,
|
||||
uploadSpecification,
|
||||
options
|
||||
)
|
||||
|
||||
//Update the size of the artifact to indicate we are done uploading
|
||||
await patchArtifactSize(uploadResult.size, name)
|
||||
// Update the size of the artifact to indicate we are done uploading
|
||||
// The uncompressed size is used for display when downloading a zip of the artifact from the UI
|
||||
await uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name)
|
||||
|
||||
core.info(
|
||||
`Finished uploading artifact ${name}. Reported size is ${uploadResult.size} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
|
||||
`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`
|
||||
)
|
||||
|
||||
uploadResponse.artifactItems = uploadSpecification.map(
|
||||
item => item.absoluteFilePath
|
||||
)
|
||||
uploadResponse.size = uploadResult.size
|
||||
uploadResponse.size = uploadResult.uploadSize
|
||||
uploadResponse.failedItems = uploadResult.failedItems
|
||||
}
|
||||
return uploadResponse
|
||||
@@ -137,7 +129,9 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
path?: string | undefined,
|
||||
options?: DownloadOptions | undefined
|
||||
): Promise<DownloadResponse> {
|
||||
const artifacts = await listArtifacts()
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
throw new Error(
|
||||
`Unable to find any artifacts for the associated workflow`
|
||||
@@ -151,7 +145,7 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
throw new Error(`Unable to find an artifact with the name: ${name}`)
|
||||
}
|
||||
|
||||
const items = await getContainerItems(
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
artifactToDownload.name,
|
||||
artifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
@@ -179,7 +173,9 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
await downloadSingleArtifact(downloadSpecification.filesToDownload)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -191,8 +187,10 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
async downloadAllArtifacts(
|
||||
path?: string | undefined
|
||||
): Promise<DownloadResponse[]> {
|
||||
const downloadHttpClient = new DownloadHttpClient()
|
||||
|
||||
const response: DownloadResponse[] = []
|
||||
const artifacts = await listArtifacts()
|
||||
const artifacts = await downloadHttpClient.listArtifacts()
|
||||
if (artifacts.count === 0) {
|
||||
core.info('Unable to find any artifacts for the associated workflow')
|
||||
return response
|
||||
@@ -204,46 +202,41 @@ export class DefaultArtifactClient implements ArtifactClient {
|
||||
path = normalize(path)
|
||||
path = resolve(path)
|
||||
|
||||
const ARTIFACT_CONCURRENCY = getDownloadArtifactConcurrency()
|
||||
const parallelDownloads = [...new Array(ARTIFACT_CONCURRENCY).keys()]
|
||||
let downloadedArtifacts = 0
|
||||
await Promise.all(
|
||||
parallelDownloads.map(async () => {
|
||||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts]
|
||||
downloadedArtifacts += 1
|
||||
while (downloadedArtifacts < artifacts.count) {
|
||||
const currentArtifactToDownload = artifacts.value[downloadedArtifacts]
|
||||
downloadedArtifacts += 1
|
||||
|
||||
// Get container entries for the specific artifact
|
||||
const items = await getContainerItems(
|
||||
currentArtifactToDownload.name,
|
||||
currentArtifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
// Get container entries for the specific artifact
|
||||
const items = await downloadHttpClient.getContainerItems(
|
||||
currentArtifactToDownload.name,
|
||||
currentArtifactToDownload.fileContainerResourceUrl
|
||||
)
|
||||
|
||||
// Promise.All is not correctly inferring that 'path' is no longer possibly undefined: https://github.com/microsoft/TypeScript/issues/34925
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
currentArtifactToDownload.name,
|
||||
items.value,
|
||||
path!, // eslint-disable-line @typescript-eslint/no-non-null-assertion
|
||||
true
|
||||
)
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
await downloadSingleArtifact(downloadSpecification.filesToDownload)
|
||||
}
|
||||
const downloadSpecification = getDownloadSpecification(
|
||||
currentArtifactToDownload.name,
|
||||
items.value,
|
||||
path,
|
||||
true
|
||||
)
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(
|
||||
`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`
|
||||
)
|
||||
} else {
|
||||
await createDirectoriesForArtifact(
|
||||
downloadSpecification.directoryStructure
|
||||
)
|
||||
await downloadHttpClient.downloadSingleArtifact(
|
||||
downloadSpecification.filesToDownload
|
||||
)
|
||||
}
|
||||
|
||||
response.push({
|
||||
artifactName: currentArtifactToDownload.name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
})
|
||||
}
|
||||
response.push({
|
||||
artifactName: currentArtifactToDownload.name,
|
||||
downloadPath: downloadSpecification.rootDownloadLocation
|
||||
})
|
||||
)
|
||||
}
|
||||
return response
|
||||
}
|
||||
}
|
||||
@@ -2,21 +2,20 @@ export function getUploadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getUploadChunkConcurrency(): number {
|
||||
return 1
|
||||
}
|
||||
|
||||
export function getUploadChunkSize(): number {
|
||||
return 4 * 1024 * 1024 // 4 MB Chunks
|
||||
}
|
||||
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 2
|
||||
export function getUploadRetryCount(): number {
|
||||
return 3
|
||||
}
|
||||
|
||||
export function getDownloadArtifactConcurrency(): number {
|
||||
// when downloading all artifact at once, this is number of concurrent artifacts being downloaded
|
||||
return 1
|
||||
export function getRetryWaitTimeInMilliseconds(): number {
|
||||
return 10000
|
||||
}
|
||||
|
||||
export function getDownloadFileConcurrency(): number {
|
||||
return 2
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
@@ -28,7 +28,8 @@ export interface PatchArtifactSizeSuccessResponse {
|
||||
}
|
||||
|
||||
export interface UploadResults {
|
||||
size: number
|
||||
uploadSize: number
|
||||
totalSize: number
|
||||
failedItems: string[]
|
||||
}
|
||||
|
||||
189
packages/artifact/src/internal/download-http-client.ts
Normal file
189
packages/artifact/src/internal/download-http-client.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import * as fs from 'fs'
|
||||
import * as zlib from 'zlib'
|
||||
import {
|
||||
getArtifactUrl,
|
||||
getRequestOptions,
|
||||
isSuccessStatusCode,
|
||||
isRetryableStatusCode,
|
||||
createHttpClient
|
||||
} from './utils'
|
||||
import {URL} from 'url'
|
||||
import {ListArtifactsResponse, QueryArtifactResponse} from './contracts'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpManager} from './http-manager'
|
||||
import {DownloadItem} from './download-specification'
|
||||
import {
|
||||
getDownloadFileConcurrency,
|
||||
getRetryWaitTimeInMilliseconds
|
||||
} from './config-variables'
|
||||
import {warning} from '@actions/core'
|
||||
import {IncomingHttpHeaders} from 'http'
|
||||
|
||||
export class DownloadHttpClient {
|
||||
// http manager is used for concurrent connection when downloading mulitple files at once
|
||||
private downloadHttpManager: HttpManager
|
||||
|
||||
constructor() {
|
||||
this.downloadHttpManager = new HttpManager(getDownloadFileConcurrency())
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of all artifacts that are in a specific container
|
||||
*/
|
||||
async listArtifacts(): Promise<ListArtifactsResponse> {
|
||||
const artifactUrl = getArtifactUrl()
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||||
const client = this.downloadHttpManager.getClient(0)
|
||||
const requestOptions = getRequestOptions('application/json')
|
||||
|
||||
const rawResponse = await client.get(artifactUrl, requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(`Unable to list artifacts for the run`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a set of container items that describe the contents of an artifact
|
||||
* @param artifactName the name of the artifact
|
||||
* @param containerUrl the artifact container URL for the run
|
||||
*/
|
||||
async getContainerItems(
|
||||
artifactName: string,
|
||||
containerUrl: string
|
||||
): Promise<QueryArtifactResponse> {
|
||||
// the itemPath search parameter controls which containers will be returned
|
||||
const resourceUrl = new URL(containerUrl)
|
||||
resourceUrl.searchParams.append('itemPath', artifactName)
|
||||
|
||||
// no concurrent calls so a single httpClient without the http-manager is sufficient
|
||||
const client = createHttpClient()
|
||||
|
||||
// no keep-alive header, client disposal is not necessary
|
||||
const requestOptions = getRequestOptions('application/json')
|
||||
const rawResponse = await client.get(resourceUrl.toString(), requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently downloads all the files that are part of an artifact
|
||||
* @param downloadItems information about what items to download and where to save them
|
||||
*/
|
||||
async downloadSingleArtifact(downloadItems: DownloadItem[]): Promise<void> {
|
||||
const DOWNLOAD_CONCURRENCY = getDownloadFileConcurrency()
|
||||
// limit the number of files downloaded at a single time
|
||||
const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]
|
||||
let downloadedFiles = 0
|
||||
await Promise.all(
|
||||
parallelDownloads.map(async index => {
|
||||
while (downloadedFiles < downloadItems.length) {
|
||||
const currentFileToDownload = downloadItems[downloadedFiles]
|
||||
downloadedFiles += 1
|
||||
await this.downloadIndividualFile(
|
||||
index,
|
||||
currentFileToDownload.sourceLocation,
|
||||
currentFileToDownload.targetPath
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// done downloading, safety dispose all connections
|
||||
this.downloadHttpManager.disposeAndReplaceAllClients()
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an individual file
|
||||
* @param httpClientIndex the index of the http client that is used to make all of the calls
|
||||
* @param artifactLocation origin location where a file will be downloaded from
|
||||
* @param downloadPath destination location for the file being downloaded
|
||||
*/
|
||||
private async downloadIndividualFile(
|
||||
httpClientIndex: number,
|
||||
artifactLocation: string,
|
||||
downloadPath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(downloadPath)
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex)
|
||||
const requestOptions = getRequestOptions('application/octet-stream', true)
|
||||
const response = await client.get(artifactLocation, requestOptions)
|
||||
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (headers: IncomingHttpHeaders): boolean => {
|
||||
return (
|
||||
'content-encoding' in headers && headers['content-encoding'] === 'gzip'
|
||||
)
|
||||
}
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
await this.pipeResponseToStream(
|
||||
response,
|
||||
stream,
|
||||
isGzip(response.message.headers)
|
||||
)
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
warning(
|
||||
`Received http ${response.message.statusCode} during file download, will retry ${artifactLocation} after 10 seconds`
|
||||
)
|
||||
// if an error is encountered, dispose of the http connection, and create a new one
|
||||
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, getRetryWaitTimeInMilliseconds())
|
||||
)
|
||||
const retryResponse = await client.get(artifactLocation)
|
||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||||
await this.pipeResponseToStream(
|
||||
response,
|
||||
stream,
|
||||
isGzip(response.message.headers)
|
||||
)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(retryResponse)
|
||||
throw new Error(`Unable to download ${artifactLocation}`)
|
||||
}
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(response)
|
||||
throw new Error(`Unable to download ${artifactLocation}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipes the response from downloading an individual file to the appropriate stream
|
||||
* @param response the http response recieved when downloading a file
|
||||
* @param stream the stream where the file should be written to
|
||||
* @param isGzip does the response need to be be uncompressed
|
||||
*/
|
||||
private async pipeResponseToStream(
|
||||
response: IHttpClientResponse,
|
||||
stream: NodeJS.WritableStream,
|
||||
isGzip: boolean
|
||||
): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
if (isGzip) {
|
||||
// pipe the response into gunzip to decompress
|
||||
const gunzip = zlib.createGunzip()
|
||||
response.message
|
||||
.pipe(gunzip)
|
||||
.pipe(stream)
|
||||
.on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
} else {
|
||||
response.message.pipe(stream).on('close', () => {
|
||||
resolve()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from 'path'
|
||||
import {ContainerEntry} from './internal-contracts'
|
||||
import {ContainerEntry} from './contracts'
|
||||
|
||||
export interface DownloadSpecification {
|
||||
// root download location for the artifact
|
||||
33
packages/artifact/src/internal/http-manager.ts
Normal file
33
packages/artifact/src/internal/http-manager.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import {HttpClient} from '@actions/http-client/index'
|
||||
import {createHttpClient} from './utils'
|
||||
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
export class HttpManager {
|
||||
private clients: HttpClient[]
|
||||
|
||||
constructor(clientCount: number) {
|
||||
if (clientCount < 1) {
|
||||
throw new Error('There must be at least one client')
|
||||
}
|
||||
this.clients = new Array(clientCount).fill(createHttpClient())
|
||||
}
|
||||
|
||||
getClient(index: number): HttpClient {
|
||||
return this.clients[index]
|
||||
}
|
||||
|
||||
// client disposal is necessary if a keep-alive connection is used to properly close the connection
|
||||
// for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
|
||||
disposeAndReplaceClient(index: number): void {
|
||||
this.clients[index].dispose()
|
||||
this.clients[index] = createHttpClient()
|
||||
}
|
||||
|
||||
disposeAndReplaceAllClients(): void {
|
||||
for (const [index] of this.clients.entries()) {
|
||||
this.disposeAndReplaceClient(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
53
packages/artifact/src/internal/upload-gzip.ts
Normal file
53
packages/artifact/src/internal/upload-gzip.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import * as fs from 'fs'
|
||||
import * as zlib from 'zlib'
|
||||
import {promisify} from 'util'
|
||||
const stat = promisify(fs.stat)
|
||||
|
||||
/**
|
||||
* Creates a Gzip compressed file of an original file at the provided temporary filepath location
|
||||
* @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
|
||||
* @param {string} tempFilePath the location of where the Gzip file will be created
|
||||
* @returns the size of gzip file that gets created
|
||||
*/
|
||||
export async function createGZipFileOnDisk(
|
||||
originalFilePath: string,
|
||||
tempFilePath: string
|
||||
): Promise<number> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const inputStream = fs.createReadStream(originalFilePath)
|
||||
const gzip = zlib.createGzip()
|
||||
const outputStream = fs.createWriteStream(tempFilePath)
|
||||
inputStream.pipe(gzip).pipe(outputStream)
|
||||
outputStream.on('finish', async () => {
|
||||
// wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
|
||||
const size = (await stat(tempFilePath)).size
|
||||
resolve(size)
|
||||
})
|
||||
outputStream.on('error', error => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
reject
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
|
||||
* @param originalFilePath the path to the original file that is being GZipped
|
||||
* @returns a buffer with the GZip file
|
||||
*/
|
||||
export async function createGZipFileInBuffer(
|
||||
originalFilePath: string
|
||||
): Promise<Buffer> {
|
||||
return new Promise(async resolve => {
|
||||
const inputStream = fs.createReadStream(originalFilePath)
|
||||
const gzip = zlib.createGzip()
|
||||
inputStream.pipe(gzip)
|
||||
// read stream into buffer, using experimental async itterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
|
||||
const chunks = []
|
||||
for await (const chunk of gzip) {
|
||||
chunks.push(chunk)
|
||||
}
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
}
|
||||
465
packages/artifact/src/internal/upload-http-client.ts
Normal file
465
packages/artifact/src/internal/upload-http-client.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
import * as fs from 'fs'
|
||||
import * as tmp from 'tmp-promise'
|
||||
import * as stream from 'stream'
|
||||
import {
|
||||
ArtifactResponse,
|
||||
CreateArtifactParameters,
|
||||
PatchArtifactSize,
|
||||
UploadResults
|
||||
} from './contracts'
|
||||
import {
|
||||
getArtifactUrl,
|
||||
getContentRange,
|
||||
getRequestOptions,
|
||||
isRetryableStatusCode,
|
||||
isSuccessStatusCode
|
||||
} from './utils'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
getUploadFileConcurrency,
|
||||
getUploadRetryCount,
|
||||
getRetryWaitTimeInMilliseconds
|
||||
} from './config-variables'
|
||||
import {promisify} from 'util'
|
||||
import {URL} from 'url'
|
||||
import {performance} from 'perf_hooks'
|
||||
import {UploadStatusReporter} from './upload-status-reporter'
|
||||
import {debug, warning, info} from '@actions/core'
|
||||
import {HttpClientResponse} from '@actions/http-client/index'
|
||||
import {IHttpClientResponse} from '@actions/http-client/interfaces'
|
||||
import {HttpManager} from './http-manager'
|
||||
import {UploadSpecification} from './upload-specification'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {createGZipFileOnDisk, createGZipFileInBuffer} from './upload-gzip'
|
||||
const stat = promisify(fs.stat)
|
||||
|
||||
export class UploadHttpClient {
|
||||
private uploadHttpManager: HttpManager
|
||||
private statusReporter: UploadStatusReporter
|
||||
|
||||
constructor() {
|
||||
this.uploadHttpManager = new HttpManager(getUploadFileConcurrency())
|
||||
this.statusReporter = new UploadStatusReporter()
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file container for the new artifact in the remote blob storage/file service
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
async createArtifactInFileContainer(
|
||||
artifactName: string
|
||||
): Promise<ArtifactResponse> {
|
||||
const parameters: CreateArtifactParameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
const artifactUrl = getArtifactUrl()
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||||
const client = this.uploadHttpManager.getClient(0)
|
||||
const requestOptions = getRequestOptions('application/json', false, false)
|
||||
const rawResponse = await client.post(artifactUrl, data, requestOptions)
|
||||
const body: string = await rawResponse.readBody()
|
||||
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(rawResponse)
|
||||
throw new Error(
|
||||
`Unable to create a container for the artifact ${artifactName}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Concurrently upload all of the files in chunks
|
||||
* @param {string} uploadUrl Base Url for the artifact that was created
|
||||
* @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
|
||||
* @returns The size of all the files uploaded in bytes
|
||||
*/
|
||||
async uploadArtifactToFileContainer(
|
||||
uploadUrl: string,
|
||||
filesToUpload: UploadSpecification[],
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResults> {
|
||||
const FILE_CONCURRENCY = getUploadFileConcurrency()
|
||||
const MAX_CHUNK_SIZE = getUploadChunkSize()
|
||||
debug(
|
||||
`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`
|
||||
)
|
||||
|
||||
const parameters: UploadFileParameters[] = []
|
||||
// by default, file uploads will continue if there is an error unless specified differently in the options
|
||||
let continueOnError = true
|
||||
if (options) {
|
||||
if (options.continueOnError === false) {
|
||||
continueOnError = false
|
||||
}
|
||||
}
|
||||
|
||||
// prepare the necessary parameters to upload all the files
|
||||
for (const file of filesToUpload) {
|
||||
const resourceUrl = new URL(uploadUrl)
|
||||
resourceUrl.searchParams.append('itemPath', file.uploadFilePath)
|
||||
parameters.push({
|
||||
file: file.absoluteFilePath,
|
||||
resourceUrl: resourceUrl.toString(),
|
||||
maxChunkSize: MAX_CHUNK_SIZE,
|
||||
continueOnError
|
||||
})
|
||||
}
|
||||
|
||||
const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]
|
||||
const failedItemsToReport: string[] = []
|
||||
let currentFile = 0
|
||||
let completedFiles = 0
|
||||
let uploadFileSize = 0
|
||||
let totalFileSize = 0
|
||||
let abortPendingFileUploads = false
|
||||
|
||||
this.statusReporter.setTotalNumberOfFilesToUpload(filesToUpload.length)
|
||||
this.statusReporter.start()
|
||||
|
||||
// only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
|
||||
await Promise.all(
|
||||
parallelUploads.map(async index => {
|
||||
while (currentFile < filesToUpload.length) {
|
||||
const currentFileParameters = parameters[currentFile]
|
||||
currentFile += 1
|
||||
if (abortPendingFileUploads) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
continue
|
||||
}
|
||||
|
||||
const startTime = performance.now()
|
||||
const uploadFileResult = await this.uploadFileAsync(
|
||||
index,
|
||||
currentFileParameters
|
||||
)
|
||||
|
||||
debug(
|
||||
`File: ${++completedFiles}/${filesToUpload.length}. ${
|
||||
currentFileParameters.file
|
||||
} took ${(performance.now() - startTime).toFixed(
|
||||
3
|
||||
)} milliseconds to finish upload`
|
||||
)
|
||||
uploadFileSize += uploadFileResult.successfullUploadSize
|
||||
totalFileSize += uploadFileResult.totalSize
|
||||
if (uploadFileResult.isSuccess === false) {
|
||||
failedItemsToReport.push(currentFileParameters.file)
|
||||
if (!continueOnError) {
|
||||
// existing uploads will be able to finish however all pending uploads will fail fast
|
||||
abortPendingFileUploads = true
|
||||
}
|
||||
}
|
||||
this.statusReporter.incrementProcessedCount()
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
this.statusReporter.stop()
|
||||
// done uploading, safety dispose all connections
|
||||
this.uploadHttpManager.disposeAndReplaceAllClients()
|
||||
|
||||
info(`Total size of all the files uploaded is ${uploadFileSize} bytes`)
|
||||
return {
|
||||
uploadSize: uploadFileSize,
|
||||
totalSize: totalFileSize,
|
||||
failedItems: failedItemsToReport
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
|
||||
* If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
|
||||
* @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
|
||||
* @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
|
||||
* @returns The size of the file that was uploaded in bytes along with any failed uploads
|
||||
*/
|
||||
private async uploadFileAsync(
|
||||
httpClientIndex: number,
|
||||
parameters: UploadFileParameters
|
||||
): Promise<UploadFileResult> {
|
||||
const totalFileSize: number = (await stat(parameters.file)).size
|
||||
let offset = 0
|
||||
let isUploadSuccessful = true
|
||||
let failedChunkSizes = 0
|
||||
let uploadFileSize = 0
|
||||
let isGzip = true
|
||||
|
||||
// the file that is being uploaded is less than 64k in size, to increase thoroughput and to minimize disk I/O
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
if (totalFileSize < 65536) {
|
||||
const buffer = await createGZipFileInBuffer(parameters.file)
|
||||
let uploadStream: NodeJS.ReadableStream
|
||||
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
uploadStream = fs.createReadStream(parameters.file)
|
||||
isGzip = false
|
||||
uploadFileSize = totalFileSize
|
||||
} else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
const passThrough = new stream.PassThrough()
|
||||
passThrough.end(buffer)
|
||||
uploadStream = passThrough
|
||||
uploadFileSize = buffer.byteLength
|
||||
}
|
||||
|
||||
const result = await this.uploadChunk(
|
||||
httpClientIndex,
|
||||
parameters.resourceUrl,
|
||||
uploadStream,
|
||||
0,
|
||||
uploadFileSize - 1,
|
||||
uploadFileSize,
|
||||
isGzip,
|
||||
totalFileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
// chunk failed to upload
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += uploadFileSize
|
||||
warning(`Aborting upload for ${parameters.file} due to failure`)
|
||||
}
|
||||
|
||||
return {
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfullUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
}
|
||||
} else {
|
||||
// the file that is being uploaded is greater than 64k in size, a temprorary file gets created on disk using the
|
||||
// npm tmp-promise package and this file gets used during compression for the GZip file that gets created
|
||||
return tmp
|
||||
.file()
|
||||
.then(async tmpFile => {
|
||||
// create a GZip file of the original file being uploaded, the original file should not be modified in any way
|
||||
uploadFileSize = await createGZipFileOnDisk(
|
||||
parameters.file,
|
||||
tmpFile.path
|
||||
)
|
||||
let uploadFilePath = tmpFile.path
|
||||
|
||||
// compression did not help with size reduction, use the original file for upload and delete the temp GZip file
|
||||
if (totalFileSize < uploadFileSize) {
|
||||
uploadFileSize = totalFileSize
|
||||
uploadFilePath = parameters.file
|
||||
isGzip = false
|
||||
tmpFile.cleanup()
|
||||
}
|
||||
|
||||
let abortFileUpload = false
|
||||
// upload only a single chunk at a time
|
||||
while (offset < uploadFileSize) {
|
||||
const chunkSize = Math.min(
|
||||
uploadFileSize - offset,
|
||||
parameters.maxChunkSize
|
||||
)
|
||||
if (abortFileUpload) {
|
||||
// if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
|
||||
failedChunkSizes += chunkSize
|
||||
continue
|
||||
}
|
||||
|
||||
// if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status
|
||||
if (uploadFileSize > 104857600) {
|
||||
this.statusReporter.updateLargeFileStatus(
|
||||
parameters.file,
|
||||
offset,
|
||||
uploadFileSize
|
||||
)
|
||||
}
|
||||
|
||||
const start = offset
|
||||
const end = offset + chunkSize - 1
|
||||
offset += parameters.maxChunkSize
|
||||
|
||||
const result = await this.uploadChunk(
|
||||
httpClientIndex,
|
||||
parameters.resourceUrl,
|
||||
fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
}),
|
||||
start,
|
||||
end,
|
||||
uploadFileSize,
|
||||
isGzip,
|
||||
totalFileSize
|
||||
)
|
||||
|
||||
if (!result) {
|
||||
// Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
|
||||
// successfully uploaded so the server may report a different size for what was uploaded
|
||||
isUploadSuccessful = false
|
||||
failedChunkSizes += chunkSize
|
||||
warning(`Aborting upload for ${parameters.file} due to failure`)
|
||||
abortFileUpload = true
|
||||
}
|
||||
}
|
||||
})
|
||||
.then(
|
||||
async (): Promise<UploadFileResult> => {
|
||||
// only after the file upload is complete and the temporary file is deleted, return the UploadResult
|
||||
return new Promise(resolve => {
|
||||
resolve({
|
||||
isSuccess: isUploadSuccessful,
|
||||
successfullUploadSize: uploadFileSize - failedChunkSizes,
|
||||
totalSize: totalFileSize
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
* @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
private async uploadChunk(
|
||||
httpClientIndex: number,
|
||||
resourceUrl: string,
|
||||
data: NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number,
|
||||
uploadFileSize: number,
|
||||
isGzip: boolean,
|
||||
totalFileSize: number
|
||||
): Promise<boolean> {
|
||||
// prepare all the necessary headers before making any http call
|
||||
const requestOptions = getRequestOptions(
|
||||
'application/octet-stream',
|
||||
true,
|
||||
isGzip,
|
||||
totalFileSize,
|
||||
end - start + 1,
|
||||
getContentRange(start, end, uploadFileSize)
|
||||
)
|
||||
|
||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex)
|
||||
return await client.sendStream('PUT', resourceUrl, data, requestOptions)
|
||||
}
|
||||
|
||||
let retryCount = 0
|
||||
const retryLimit = getUploadRetryCount()
|
||||
|
||||
// allow for failed chunks to be retried multiple times
|
||||
while (retryCount <= retryLimit) {
|
||||
try {
|
||||
const response = await uploadChunkRequest()
|
||||
|
||||
// Always read the body of the response. There is potential for a resource leak if the body is not read which will
|
||||
// result in the connection remaining open along with unintended consequences when trying to dispose of the client
|
||||
await response.readBody()
|
||||
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
return true
|
||||
} else if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
retryCount++
|
||||
if (retryCount > retryLimit) {
|
||||
info(
|
||||
`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`
|
||||
)
|
||||
return false
|
||||
} else {
|
||||
info(
|
||||
`HTTP ${response.message.statusCode} during chunk upload, will retry at offset ${start} after ${getRetryWaitTimeInMilliseconds} milliseconds. Retry count #${retryCount}. URL ${resourceUrl}`
|
||||
)
|
||||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, getRetryWaitTimeInMilliseconds())
|
||||
)
|
||||
}
|
||||
} else {
|
||||
info(`#ERROR# Unable to upload chunk to ${resourceUrl}`)
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(response)
|
||||
return false
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
|
||||
retryCount++
|
||||
if (retryCount > retryLimit) {
|
||||
info(
|
||||
`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`
|
||||
)
|
||||
return false
|
||||
} else {
|
||||
info(`Retrying chunk upload after encountering an error`)
|
||||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex)
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, getRetryWaitTimeInMilliseconds())
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
|
||||
* Updating the size indicates that we are done uploading all the contents of the artifact
|
||||
*/
|
||||
async patchArtifactSize(size: number, artifactName: string): Promise<void> {
|
||||
const requestOptions = getRequestOptions('application/json', false, false)
|
||||
const resourceUrl = new URL(getArtifactUrl())
|
||||
resourceUrl.searchParams.append('artifactName', artifactName)
|
||||
|
||||
const parameters: PatchArtifactSize = {Size: size}
|
||||
const data: string = JSON.stringify(parameters, null, 2)
|
||||
debug(`URL is ${resourceUrl.toString()}`)
|
||||
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediatly
|
||||
const client = this.uploadHttpManager.getClient(0)
|
||||
const rawResponse: HttpClientResponse = await client.patch(
|
||||
resourceUrl.toString(),
|
||||
data,
|
||||
requestOptions
|
||||
)
|
||||
const body: string = await rawResponse.readBody()
|
||||
if (isSuccessStatusCode(rawResponse.message.statusCode)) {
|
||||
debug(
|
||||
`Artifact ${artifactName} has been successfully uploaded, total size ${size}`
|
||||
)
|
||||
} else if (rawResponse.message.statusCode === 404) {
|
||||
throw new Error(`An Artifact with the name ${artifactName} was not found`)
|
||||
} else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(body)
|
||||
throw new Error(`Unable to finish uploading artifact ${artifactName}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface UploadFileParameters {
|
||||
file: string
|
||||
resourceUrl: string
|
||||
maxChunkSize: number
|
||||
continueOnError: boolean
|
||||
}
|
||||
|
||||
interface UploadFileResult {
|
||||
isSuccess: boolean
|
||||
successfullUploadSize: number
|
||||
totalSize: number
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as fs from 'fs'
|
||||
import {debug} from '@actions/core'
|
||||
import {join, normalize, resolve} from 'path'
|
||||
import {checkArtifactName} from './internal-utils'
|
||||
import {checkArtifactName, checkArtifactFilePath} from './utils'
|
||||
|
||||
export interface UploadSpecification {
|
||||
absoluteFilePath: string
|
||||
@@ -58,7 +58,6 @@ export function getUploadSpecification(
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
|
||||
if (!fs.lstatSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = normalize(file)
|
||||
@@ -69,6 +68,10 @@ export function getUploadSpecification(
|
||||
)
|
||||
}
|
||||
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(rootDirectory, '')
|
||||
checkArtifactFilePath(uploadPath)
|
||||
|
||||
/*
|
||||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||||
@@ -81,7 +84,7 @@ export function getUploadSpecification(
|
||||
*/
|
||||
specifications.push({
|
||||
absoluteFilePath: file,
|
||||
uploadFilePath: join(artifactName, file.replace(rootDirectory, ''))
|
||||
uploadFilePath: join(artifactName, uploadPath)
|
||||
})
|
||||
} else {
|
||||
// Directories are rejected by the server during upload
|
||||
90
packages/artifact/src/internal/upload-status-reporter.ts
Normal file
90
packages/artifact/src/internal/upload-status-reporter.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import {info} from '@actions/core'
|
||||
|
||||
/**
|
||||
* Upload Status Reporter that displays information about the progress/status of an artifact that is being uploaded
|
||||
*
|
||||
* Every 10 seconds, the total status of the upload gets displayed. If there is a large file that is being uploaded,
|
||||
* extra information about the individual status of an upload can also be displayed
|
||||
*/
|
||||
|
||||
export class UploadStatusReporter {
|
||||
private totalNumberOfFilesToUpload = 0
|
||||
private processedCount = 0
|
||||
private largeUploads = new Map<string, string>()
|
||||
private totalUploadStatus: NodeJS.Timeout | undefined
|
||||
private largeFileUploadStatus: NodeJS.Timeout | undefined
|
||||
|
||||
constructor() {
|
||||
this.totalUploadStatus = undefined
|
||||
this.largeFileUploadStatus = undefined
|
||||
}
|
||||
|
||||
setTotalNumberOfFilesToUpload(fileTotal: number): void {
|
||||
this.totalNumberOfFilesToUpload = fileTotal
|
||||
}
|
||||
|
||||
start(): void {
|
||||
const _this = this
|
||||
|
||||
// displays information about the total upload status every 10 seconds
|
||||
this.totalUploadStatus = setInterval(function() {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = _this.formatPercentage(
|
||||
_this.processedCount,
|
||||
_this.totalNumberOfFilesToUpload
|
||||
)
|
||||
info(
|
||||
`Total file(s): ${
|
||||
_this.totalNumberOfFilesToUpload
|
||||
} ---- Processed file #${_this.processedCount} (${percentage.slice(
|
||||
0,
|
||||
percentage.indexOf('.') + 2
|
||||
)}%)`
|
||||
)
|
||||
}, 10000)
|
||||
|
||||
// displays extra information about any large files that take a significant amount of time to upload every 1 second
|
||||
this.largeFileUploadStatus = setInterval(function() {
|
||||
for (const value of Array.from(_this.largeUploads.values())) {
|
||||
info(value)
|
||||
}
|
||||
// delete all entires in the map after displaying the information so it will not be displayed again unless explicitly added
|
||||
_this.largeUploads = new Map<string, string>()
|
||||
}, 1000)
|
||||
}
|
||||
|
||||
updateLargeFileStatus(
|
||||
fileName: string,
|
||||
numerator: number,
|
||||
denomiator: number
|
||||
): void {
|
||||
// display 1 decimal place without any rounding
|
||||
const percentage = this.formatPercentage(numerator, denomiator)
|
||||
const displayInformation = `Uploading ${fileName} (${percentage.slice(
|
||||
0,
|
||||
percentage.indexOf('.') + 2
|
||||
)}%)`
|
||||
|
||||
// any previously added display information should be overwritten for the specific large file because a map is being used
|
||||
this.largeUploads.set(fileName, displayInformation)
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
if (this.totalUploadStatus) {
|
||||
clearInterval(this.totalUploadStatus)
|
||||
}
|
||||
|
||||
if (this.largeFileUploadStatus) {
|
||||
clearInterval(this.largeFileUploadStatus)
|
||||
}
|
||||
}
|
||||
|
||||
incrementProcessedCount(): void {
|
||||
this.processedCount++
|
||||
}
|
||||
|
||||
private formatPercentage(numerator: number, denominator: number): string {
|
||||
// toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
|
||||
return ((numerator / denominator) * 100).toFixed(4).toString()
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
getRuntimeToken,
|
||||
getRuntimeUrl,
|
||||
getWorkFlowRunId
|
||||
} from './internal-config-variables'
|
||||
} from './config-variables'
|
||||
|
||||
/**
|
||||
* Parses a env variable that is a number
|
||||
@@ -59,17 +59,40 @@ export function getContentRange(
|
||||
return `bytes ${start}-${end}/${total}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the necessary headers when making HTTP calls
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||||
* @param {boolean} isGzip is the connection being used to upload GZip compressed content
|
||||
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
||||
* @param {number} contentLength the length of the content that is being uploaded
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate request options to make a specific http call
|
||||
*/
|
||||
export function getRequestOptions(
|
||||
contentType?: string,
|
||||
isKeepAlive?: boolean,
|
||||
isGzip?: boolean,
|
||||
uncompressedLength?: number,
|
||||
contentLength?: number,
|
||||
contentRange?: string
|
||||
): IHeaders {
|
||||
const requestOptions: IHeaders = {
|
||||
// same Accept type for each http call that gets made
|
||||
Accept: `application/json;api-version=${getApiVersion()}`
|
||||
}
|
||||
if (contentType) {
|
||||
requestOptions['Content-Type'] = contentType
|
||||
}
|
||||
if (isKeepAlive) {
|
||||
requestOptions['Connection'] = 'Keep-Alive'
|
||||
// keep alive for at least 10 seconds before closing the connection
|
||||
requestOptions['Keep-Alive'] = '10'
|
||||
}
|
||||
if (isGzip) {
|
||||
requestOptions['Content-Encoding'] = 'gzip'
|
||||
requestOptions['x-tfs-filelength'] = uncompressedLength
|
||||
}
|
||||
if (contentLength) {
|
||||
requestOptions['Content-Length'] = contentLength
|
||||
}
|
||||
@@ -96,20 +119,54 @@ export function getArtifactUrl(): string {
|
||||
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||||
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||||
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidCharacters = ['\\', '/', '"', ':', '<', '>', '|', '*', '?', ' ']
|
||||
const invalidArtifactFilePathCharacters = [
|
||||
'"',
|
||||
':',
|
||||
'<',
|
||||
'>',
|
||||
'|',
|
||||
'*',
|
||||
'?',
|
||||
' '
|
||||
]
|
||||
const invalidArtifactNameCharacters = [
|
||||
...invalidArtifactFilePathCharacters,
|
||||
'\\',
|
||||
'/'
|
||||
]
|
||||
|
||||
/**
|
||||
* Scans the name of the item being uploaded to make sure there are no illegal characters
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export function checkArtifactName(name: string): void {
|
||||
if (!name) {
|
||||
throw new Error(`Artifact name: ${name}, is incorrectly provided`)
|
||||
}
|
||||
for (const invalidChar of invalidCharacters) {
|
||||
|
||||
for (const invalidChar of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidChar)) {
|
||||
throw new Error(
|
||||
`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid characters include: ${invalidCharacters.toString()}.`
|
||||
`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export function checkArtifactFilePath(path: string): void {
|
||||
if (!path) {
|
||||
throw new Error(`Artifact path: ${path}, is incorrectly provided`)
|
||||
}
|
||||
|
||||
for (const invalidChar of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidChar)) {
|
||||
throw new Error(
|
||||
`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`
|
||||
)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user