Compare commits

..

No commits in common. "main" and "0.0.1" have entirely different histories.
main ... 0.0.1

317 changed files with 11395 additions and 56840 deletions

View File

@ -1,5 +1,3 @@
node_modules/ node_modules/
packages/*/node_modules/ packages/*/node_modules/
packages/*/lib/ packages/*/lib/
packages/glob/__tests__/_temp
packages/*/src/generated/*/

View File

@ -1,76 +1,28 @@
{ {
"plugins": [ "plugins": ["jest", "@typescript-eslint"],
"jest", "extends": ["plugin:github/es6"],
"@typescript-eslint",
"prettier"
],
"extends": [
"plugin:github/recommended",
"plugin:prettier/recommended"
],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": { "parserOptions": {
"ecmaVersion": 9, "ecmaVersion": 9,
"sourceType": "module", "sourceType": "module",
"project": "./tsconfig.eslint.json" "project": "./tsconfig.json"
}, },
"rules": { "rules": {
"prettier/prettier": [
"error",
{
"endOfLine": "auto"
}
],
"eslint-comments/no-use": "off", "eslint-comments/no-use": "off",
"no-constant-condition": ["error", { "checkLoops": false }],
"github/no-then": "off",
"import/no-namespace": "off", "import/no-namespace": "off",
"no-shadow": "off",
"no-unused-vars": "off", "no-unused-vars": "off",
"i18n-text/no-en": "off",
"filenames/match-regex": "off",
"import/no-commonjs": "off",
"import/named": "off",
"no-sequences": "off",
"import/no-unresolved": "off",
"no-undef": "off",
"no-only-tests/no-only-tests": "off",
"@typescript-eslint/no-unused-vars": "error", "@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": [ "@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"error",
{
"accessibility": "no-public"
}
],
"@typescript-eslint/no-require-imports": "error", "@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error", "@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error", "@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-comment": "error", "@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off", "camelcase": "off",
"@typescript-eslint/camelcase": "off", "@typescript-eslint/camelcase": "error",
"@typescript-eslint/consistent-type-assertions": "off", "@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": [ "@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"error", "@typescript-eslint/func-call-spacing": ["error", "never"],
{ "@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"allowExpressions": true
}
],
"@typescript-eslint/func-call-spacing": [
"error",
"never"
],
"@typescript-eslint/naming-convention": [
"error",
{
"format": null,
"filter": {
// you can expand this regex as you find more cases that require quoting that you want to allow
"regex": "^[A-Z][A-Za-z]*$",
"match": true
},
"selector": "memberLike"
}
],
"@typescript-eslint/no-array-constructor": "error", "@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error", "@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error", "@typescript-eslint/no-explicit-any": "error",
@ -80,6 +32,7 @@
"@typescript-eslint/no-misused-new": "error", "@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error", "@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn", "@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error", "@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error", "@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error", "@typescript-eslint/no-useless-constructor": "error",
@ -87,18 +40,16 @@
"@typescript-eslint/prefer-for-of": "warn", "@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn", "@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error", "@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error", "@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error", "@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error", "@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off", "semi": "off",
"@typescript-eslint/semi": [ "@typescript-eslint/semi": ["error", "never"],
"error",
"never"
],
"@typescript-eslint/type-annotation-spacing": "error", "@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error" "@typescript-eslint/unbound-method": "error"
}, },
"ignorePatterns": "packages/glob/__tests__/_temp/**/",
"env": { "env": {
"node": true, "node": true,
"es6": true, "es6": true,

View File

@ -1,59 +0,0 @@
# Contributions
We welcome contributions in the form of issues and pull requests. We view the contributions and process as the same for internal and external contributors.
## Issues
Log issues for both bugs and enhancement requests. Logging issues are important for the open community.
Issues in this repository should be for the toolkit packages. General feedback for GitHub Actions should be filed in the [community forums.](https://github.community/t5/GitHub-Actions/bd-p/actions) Runner specific issues can be filed [in the runner repository](https://github.com/actions/runner).
## Enhancements and Feature Requests
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
1. Create a feature request.
2. When we agree to take the enhancement, create an ADR to agree on the details of the change.
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](../docs/adrs/README.md).
## Development Life Cycle
This repository uses [Lerna](https://github.com/lerna/lerna#readme) to manage multiple packages. Read the documentation there to begin contributing.
Note that before a PR will be accepted, you must ensure:
- all tests are passing
- `npm run format` reports no issues
- `npm run lint` reports no issues
### Useful Scripts
- `npm run bootstrap` This runs `lerna exec -- npm install` which will install dependencies in this repository's packages and cross-link packages where necessary.
- `npm run build` This compiles TypeScript code in each package (this is especially important if one package relies on changes in another when you're running tests). This is just an alias for `lerna run tsc`.
- `npm run format` This checks that formatting has been applied with Prettier.
- `npm test` This runs all Jest tests in all packages in this repository.
- If you need to run tests for only one package, you can pass normal Jest CLI options:
```console
$ npm test -- packages/toolkit
```
- `npm run create-package [name]` This runs a script that automates a couple of parts of creating a new package.
### Creating a Package
1. In a new branch, create a new Lerna package:
```console
$ npm run new-package [name]
```
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
2. Add `tsc` script to the new package's package.json file:
```json
"scripts": {
"tsc": "tsc"
}
```
3. Start developing 😄.

View File

@ -1,46 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
Thank you 🙇‍♀ for wanting to create an issue in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions about writing workflows or action files, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If your issue is relevant to this repository, please include the information below:
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.

View File

@ -1,25 +0,0 @@
---
name: Feature Request
about: Create a request to help us improve
title: ''
labels: enhancement
assignees: ''
---
Thank you 🙇‍♀ for wanting to create an issue in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions about writing workflows or action files, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If your issue is relevant to this repository, please include the information below:
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.
**Code Snippet**
If applicable, add a code snippet to show the api enhancement.
**Additional information**
Add any other context about the feature here.

39
.github/main.workflow vendored Normal file
View File

@ -0,0 +1,39 @@
workflow "CI" {
on = "push"
resolves = ["Format", "Lint", "Test"]
}
action "Dependencies" {
uses = "actions/npm@v2.0.0"
args = "ci"
}
action "Bootstrap" {
needs = "Dependencies"
uses = "actions/npm@v2.0.0"
args = "run bootstrap"
}
action "Compile" {
needs = "Bootstrap"
uses = "actions/npm@v2.0.0"
args = "run build"
}
action "Format" {
needs = "Dependencies"
uses = "actions/npm@v2.0.0"
args = "run format-check"
}
action "Lint" {
needs = "Dependencies"
uses = "actions/npm@v2.0.0"
args = "run lint"
}
action "Test" {
needs = "Compile"
uses = "actions/npm@v2.0.0"
args = "test"
}

View File

@ -1,194 +0,0 @@
name: artifact-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
upload:
name: Upload
strategy:
matrix:
runs-on: [ubuntu-latest, windows-latest, macos-latest]
fail-fast: false
runs-on: ${{ matrix.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the artifacts package
- name: Install root npm packages
run: npm ci
- name: Compile artifact package
run: |
npm ci
npm run tsc
working-directory: packages/artifact
- name: Create files that will be uploaded
run: |
mkdir artifact-path
echo -n 'hello from file 1' > artifact-path/first.txt
echo -n 'hello from file 2' > artifact-path/second.txt
- name: Upload Artifacts
uses: actions/github-script@v7
with:
script: |
const {default: artifact} = require('./packages/artifact/lib/artifact')
const artifactName = 'my-artifact-${{ matrix.runs-on }}'
console.log('artifactName: ' + artifactName)
const fileContents = ['artifact-path/first.txt','artifact-path/second.txt']
const uploadResult = await artifact.uploadArtifact(artifactName, fileContents, './')
console.log(uploadResult)
const size = uploadResult.size
const id = uploadResult.id
console.log(`Successfully uploaded artifact ${id}`)
try {
await artifact.uploadArtifact(artifactName, fileContents, './')
throw new Error('should have failed second upload')
} catch (err) {
console.log('Successfully blocked second artifact upload')
}
verify:
name: Verify and Delete
runs-on: ubuntu-latest
needs: [upload]
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the artifacts package
- name: Install root npm packages
run: npm ci
- name: Compile artifact package
run: |
npm ci
npm run tsc
working-directory: packages/artifact
- name: List and Download Artifacts
uses: actions/github-script@v7
with:
script: |
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
const {readFile} = require('fs/promises')
const path = require('path')
const findBy = {
repositoryOwner: process.env.GITHUB_REPOSITORY.split('/')[0],
repositoryName: process.env.GITHUB_REPOSITORY.split('/')[1],
token: '${{ secrets.GITHUB_TOKEN }}',
workflowRunId: process.env.GITHUB_RUN_ID
}
const listResult = await artifactClient.listArtifacts({latest: true, findBy})
console.log(listResult)
const artifacts = listResult.artifacts
const expected = [
'my-artifact-ubuntu-latest',
'my-artifact-windows-latest',
'my-artifact-macos-latest'
]
const foundArtifacts = artifacts.filter(artifact =>
expected.includes(artifact.name)
)
if (foundArtifacts.length !== 3) {
console.log('Unexpected length of found artifacts', foundArtifacts)
throw new Error(
`Expected 3 artifacts but found ${foundArtifacts.length} artifacts.`
)
}
console.log('Successfully listed artifacts that were uploaded')
const files = [
{name: 'artifact-path/first.txt', content: 'hello from file 1'},
{name: 'artifact-path/second.txt', content: 'hello from file 2'}
]
for (const artifact of foundArtifacts) {
const {downloadPath} = await artifactClient.downloadArtifact(artifact.id, {
path: artifact.name,
findBy
})
console.log('Downloaded artifact to:', downloadPath)
for (const file of files) {
const filepath = path.join(
process.env.GITHUB_WORKSPACE,
downloadPath,
file.name
)
console.log('Checking file:', filepath)
const content = await readFile(filepath, 'utf8')
if (content.trim() !== file.content.trim()) {
throw new Error(
`Expected file '${file.name}' to contain '${file.content}' but found '${content}'`
)
}
}
}
- name: Delete Artifacts
uses: actions/github-script@v7
with:
script: |
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
const artifactsToDelete = [
'my-artifact-ubuntu-latest',
'my-artifact-windows-latest',
'my-artifact-macos-latest'
]
for (const artifactName of artifactsToDelete) {
const {id} = await artifactClient.deleteArtifact(artifactName)
}
const {artifacts} = await artifactClient.listArtifacts({latest: true})
const foundArtifacts = artifacts.filter(artifact =>
artifactsToDelete.includes(artifact.name)
)
if (foundArtifacts.length !== 0) {
console.log('Unexpected length of found artifacts:', foundArtifacts)
throw new Error(
`Expected 0 artifacts but found ${foundArtifacts.length} artifacts.`
)
}

View File

@ -1,38 +0,0 @@
name: toolkit-audit
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Audit
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
- name: npm install
run: npm install
- name: Bootstrap
run: npm run bootstrap
- name: audit tools (without allow-list)
run: npm audit --audit-level=moderate --omit dev
- name: audit packages
run: npm run audit-all

View File

@ -1,91 +0,0 @@
name: cache-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
strategy:
matrix:
runs-on: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the cache package
- name: Install root npm packages
run: npm ci
- name: Compile cache package
run: |
npm ci
npm run tsc
working-directory: packages/cache
- name: Generate files in working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
# We're using node -e to call the functions directly available in the @actions/cache package
- name: Save cache using saveCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with http-client
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))"
- name: Verify cache restored with http-client
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with Azure SDK
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Verify cache restored with Azure SDK
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache

View File

@ -1,90 +0,0 @@
name: cache-windows-bsd-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- shell: bash
run: |
rm "C:\Program Files\Git\usr\bin\tar.exe"
- name: Set Node.js 20.x
uses: actions/setup-node@v1
with:
node-version: 20.x
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
# node context. This runs a local action that gets and sets the necessary env variables that are needed
- name: Set env variables
uses: ./packages/cache/__tests__/__fixtures__/
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
# without these to just compile the cache package
- name: Install root npm packages
run: npm ci
- name: Compile cache package
run: |
npm ci
npm run tsc
working-directory: packages/cache
- name: Generate files in working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: packages/cache/__tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
# We're using node -e to call the functions directly available in the @actions/cache package
- name: Save cache using saveCache()
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').saveCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with http-client
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}',[],{useAzureSdk: false}))"
- name: Verify cache restored with http-client
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Delete cache folders before restoring
shell: bash
run: |
rm -rf test-cache
rm -rf ~/test-cache
- name: Restore cache using restoreCache() with Azure SDK
run: |
node -e "Promise.resolve(require('./packages/cache/lib/cache').restoreCache(['test-cache','~/test-cache'],'test-${{ runner.os }}-${{ github.run_id }}'))"
- name: Verify cache restored with Azure SDK
shell: bash
run: |
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} test-cache
packages/cache/__tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache

View File

@ -1,37 +0,0 @@
name: "Code Scanning - Action"
on:
push:
branches:
- main
pull_request:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: javascript
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -1,98 +0,0 @@
name: Publish NPM
run-name: Publish NPM - ${{ github.event.inputs.package }}
on:
workflow_dispatch:
inputs:
package:
type: choice
required: true
description: 'Which package to release'
options:
- artifact
- attest
- cache
- core
- exec
- github
- glob
- http-client
- io
- tool-cache
jobs:
test:
runs-on: macos-latest-large
steps:
- name: setup repo
uses: actions/checkout@v4
- name: verify package exists
run: ls packages/${{ github.event.inputs.package }}
- name: Set Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
- name: npm install
run: npm install
- name: bootstrap
run: npm run bootstrap
- name: build
run: npm run build
- name: test
run: npm run test
- name: pack
run: npm pack
working-directory: packages/${{ github.event.inputs.package }}
- name: upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ github.event.inputs.package }}
path: packages/${{ github.event.inputs.package }}/*.tgz
publish:
runs-on: macos-latest-large
needs: test
environment: npm-publish
permissions:
contents: read
id-token: write
steps:
- name: download artifact
uses: actions/download-artifact@v4
with:
name: ${{ github.event.inputs.package }}
- name: setup authentication
run: echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" >> .npmrc
env:
NPM_TOKEN: ${{ secrets.TOKEN }}
- name: publish
run: npm publish --provenance *.tgz
- name: notify slack on failure
if: failure()
run: |
curl -X POST -H 'Content-type: application/json' --data '{"text":":pb__failed: Failed to publish a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK
env:
SLACK_WEBHOOK: ${{ secrets.SLACK }}
- name: notify slack on success
if: success()
run: |
curl -X POST -H 'Content-type: application/json' --data '{"text":":dance: Successfully published a new version of ${{ github.event.inputs.package }}"}' $SLACK_WEBHOOK
env:
SLACK_WEBHOOK: ${{ secrets.SLACK }}

View File

@ -1,55 +0,0 @@
name: toolkit-unit-tests
on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
jobs:
build:
name: Build
strategy:
matrix:
runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
# Node 18 is the current default Node version in hosted runners, so users may still use the toolkit with it when running tests (see https://github.com/actions/toolkit/issues/1841)
# Node 20 is the currently support Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
node-version: [18.x, 20.x]
fail-fast: false
runs-on: ${{ matrix.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: npm install
run: npm install
- name: Bootstrap
run: npm run bootstrap
- name: Compile
run: npm run build
- name: npm test
run: npm test -- --runInBand --forceExit
env:
GITHUB_TOKEN: ${{ github.token }}
- name: Lint
run: npm run lint
- name: Format
run: npm run format-check

View File

@ -1,45 +0,0 @@
name: "UpdateOctokit"
on:
workflow_dispatch:
jobs:
UpdateOctokit:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'actions' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Update Octokit
working-directory: packages/github
run: |
npx npm-check-updates -u --dep prod
npm install
- name: Check Status
id: status
working-directory: packages/github
run: |
if [[ "$(git status --porcelain)" != "" ]]; then
echo "::set-output name=createPR::true"
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
git checkout -b bots/updateGitHubDependencies-${{github.run_number}}
git add .
git commit -m "Update Dependencies"
git push --set-upstream origin bots/updateGitHubDependencies-${{github.run_number}}
fi
- name: Create PR
if: ${{steps.status.outputs.createPR}}
uses: actions/github-script@v6
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.pulls.create(
{
base: "main",
owner: "${{github.repository_owner}}",
repo: "toolkit",
title: "Update Octokit dependencies",
body: "Update Octokit dependencies",
head: "bots/updateGitHubDependencies-${{github.run_number}}"
})

3
.gitignore vendored
View File

@ -2,6 +2,3 @@ node_modules/
packages/*/node_modules/ packages/*/node_modules/
packages/*/lib/ packages/*/lib/
packages/*/__tests__/_temp/ packages/*/__tests__/_temp/
.DS_Store
*.xar
packages/*/audit.json

View File

@ -1,5 +1,3 @@
node_modules/ node_modules/
packages/*/node_modules/ packages/*/node_modules/
packages/*/lib/ packages/*/lib/
packages/glob/__tests__/_temp/**/
packages/*/src/generated/*/

View File

@ -7,6 +7,5 @@
"trailingComma": "none", "trailingComma": "none",
"bracketSpacing": false, "bracketSpacing": false,
"arrowParens": "avoid", "arrowParens": "avoid",
"parser": "typescript", "parser": "typescript"
"endOfLine": "auto"
} }

View File

@ -1,5 +0,0 @@
* @actions/actions-runtime
/packages/artifact/ @actions/artifacts-actions
/packages/cache/ @actions/actions-cache
/packages/attest/ @actions/package-security

View File

@ -1,76 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at opensource@github.com. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

View File

@ -1,5 +1,3 @@
The MIT License (MIT)
Copyright 2019 GitHub Copyright 2019 GitHub
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

267
README.md
View File

@ -1,236 +1,53 @@
# Actions Toolkit 🛠
<p align="center">
<img src="res/at-logo.png">
</p>
<p align="center">
<a href="https://github.com/actions/toolkit/actions?query=workflow%3Atoolkit-unit-tests"><img alt="Toolkit unit tests status" src="https://github.com/actions/toolkit/workflows/toolkit-unit-tests/badge.svg"></a>
<a href="https://github.com/actions/toolkit/actions?query=workflow%3Atoolkit-audit"><img alt="Toolkit audit status" src="https://github.com/actions/toolkit/workflows/toolkit-audit/badge.svg"></a>
</p>
## GitHub Actions Toolkit
The GitHub Actions ToolKit provides a set of packages to make creating actions easier.
<br/>
<h3 align="center">Get started with the <a href="https://github.com/actions/javascript-action">javascript-action template</a>!</h3>
<br/>
## Packages ## Packages
:heavy_check_mark: [@actions/core](packages/core) | Package | Description |
| ------- | ----------- |
| [@actions/core](packages/core) | Core functions for setting results, logging, secrets and environment variables |
| [@actions/exec](packages/exec) | Functions necessary for running tools on the command line |
| [@actions/exit](packages/exit) | Provides utilities for exiting from an action |
| [@actions/io](packages/io) | Core functions for CLI filesystem scenarios |
| [@actions/tool-cache](packages/tool-cache) | Functions necessary for downloading and caching tools |
| [@actions/toolkit](packages/toolkit) | A general-purpose toolkit for writing actions |
Provides functions for inputs, outputs, results, logging, secrets and variables. Read more [here](packages/core) ## Development
```bash This repository uses [Lerna](https://github.com/lerna/lerna#readme) to manage multiple packages. Read the documentation there to begin contributing.
npm install @actions/core
Note that before a PR will be accepted, you must ensure:
- all tests are passing
- `npm run format` reports no issues
- `npm run lint` reports no issues
### Useful Scripts
- `npm run bootstrap` This runs `lerna bootstrap` which will install dependencies in this repository's packages and cross-link packages where necessary.
- `npm run build` This compiles TypeScript code in each package (this is especially important if one package relies on changes in another when you're running tests). This is just an alias for `lerna run tsc`.
- `npm run format` This checks that formatting has been applied with Prettier.
- `npm test` This runs all Jest tests in all packages in this repository.
- If you need to run tests for only one package, you can pass normal Jest CLI options:
```console
$ npm test -- packages/toolkit
```
- `npm run create-package [name]` This runs a script that automates a couple of parts of creating a new package.
### Creating a Package
1. In a new branch, create a new Lerna package:
```console
$ npm run create-package new-package
``` ```
<br/>
:runner: [@actions/exec](packages/exec) This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
Provides functions to exec cli tools and process output. Read more [here](packages/exec) 2. Add `tsc` script to the new package's package.json file:
```bash ```json
npm install @actions/exec "scripts": {
"tsc": "tsc"
}
``` ```
<br/>
:ice_cream: [@actions/glob](packages/glob) 3. Start developing 😄 and open a pull request.
Provides functions to search for files matching glob patterns. Read more [here](packages/glob)
```bash
npm install @actions/glob
```
<br/>
:phone: [@actions/http-client](packages/http-client)
A lightweight HTTP client optimized for building actions. Read more [here](packages/http-client)
```bash
npm install @actions/http-client
```
<br/>
:pencil2: [@actions/io](packages/io)
Provides disk i/o functions like cp, mv, rmRF, which etc. Read more [here](packages/io)
```bash
npm install @actions/io
```
<br/>
:hammer: [@actions/tool-cache](packages/tool-cache)
Provides functions for downloading and caching tools. e.g. setup-* actions. Read more [here](packages/tool-cache)
See @actions/cache for caching workflow dependencies.
```bash
npm install @actions/tool-cache
```
<br/>
:octocat: [@actions/github](packages/github)
Provides an Octokit client hydrated with the context that the current action is being run in. Read more [here](packages/github)
```bash
npm install @actions/github
```
<br/>
:floppy_disk: [@actions/artifact](packages/artifact)
Provides functions to interact with actions artifacts. Read more [here](packages/artifact)
```bash
npm install @actions/artifact
```
<br/>
:dart: [@actions/cache](packages/cache)
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
```bash
npm install @actions/cache
```
<br/>
:lock_with_ink_pen: [@actions/attest](packages/attest)
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
```bash
npm install @actions/attest
```
<br/>
## Creating an Action with the Toolkit
:question: [Choosing an action type](docs/action-types.md)
Outlines the differences and why you would want to create a JavaScript or a container based action.
<br/>
<br/>
:curly_loop: [Versioning](docs/action-versioning.md)
Actions are downloaded and run from the GitHub graph of repos. This contains guidance for versioning actions and safe releases.
<br/>
<br/>
:warning: [Problem Matchers](docs/problem-matchers.md)
Problem Matchers are a way to scan the output of actions for a specified regex pattern and surface that information prominently in the UI.
<br/>
<br/>
:warning: [Proxy Server Support](docs/proxy-support.md)
Self-hosted runners can be configured to run behind proxy servers.
<br/>
<br/>
<h3><a href="https://github.com/actions/hello-world-javascript-action">Hello World JavaScript Action</a></h3>
Illustrates how to create a simple hello world javascript action.
```javascript
...
const nameToGreet = core.getInput('who-to-greet');
console.log(`Hello ${nameToGreet}!`);
...
```
<br/>
<h3><a href="https://github.com/actions/javascript-action">JavaScript Action Walkthrough</a></h3>
Walkthrough and template for creating a JavaScript Action with tests, linting, workflow, publishing, and versioning.
```javascript
async function run() {
try {
const ms = core.getInput('milliseconds');
console.log(`Waiting ${ms} milliseconds ...`)
...
```
```javascript
PASS ./index.test.js
✓ throws invalid number
✓ wait 500 ms
✓ test runs
Test Suites: 1 passed, 1 total
Tests: 3 passed, 3 total
```
<br/>
<h3><a href="https://github.com/actions/typescript-action">TypeScript Action Walkthrough</a></h3>
Walkthrough creating a TypeScript Action with compilation, tests, linting, workflow, publishing, and versioning.
```javascript
import * as core from '@actions/core';
async function run() {
try {
const ms = core.getInput('milliseconds');
console.log(`Waiting ${ms} milliseconds ...`)
...
```
```javascript
PASS ./index.test.js
✓ throws invalid number
✓ wait 500 ms
✓ test runs
Test Suites: 1 passed, 1 total
Tests: 3 passed, 3 total
```
<br/>
<br/>
<h3><a href="docs/container-action.md">Docker Action Walkthrough</a></h3>
Create an action that is delivered as a container and run with docker.
```docker
FROM alpine:3.10
COPY LICENSE README.md /
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
```
<br/>
<h3><a href="https://github.com/actions/container-toolkit-action">Docker Action Walkthrough with Octokit</a></h3>
Create an action that is delivered as a container which uses the toolkit. This example uses the GitHub context to construct an Octokit client.
```docker
FROM node:slim
COPY . .
RUN npm install --production
ENTRYPOINT ["node", "/lib/main.js"]
```
```javascript
const myInput = core.getInput('myInput');
core.debug(`Hello ${myInput} from inside a container`);
const context = github.context;
console.log(`We can even get context data, like the repo: ${context.repo.repo}`)
```
<br/>
## Contributing
We welcome contributions. See [how to contribute](.github/CONTRIBUTING.md).
## Code of Conduct
See [our code of conduct](CODE_OF_CONDUCT.md).

View File

@ -1,3 +0,0 @@
If you discover a security issue in this repo, please submit it through the [GitHub Security Bug Bounty](https://hackerone.com/github)
Thanks for helping make GitHub Actions safe for everyone.

View File

@ -1,31 +0,0 @@
# Debugging
If the job logs do not provide enough detail on why a job may be failing, some other options exist to assist with troubleshooting.
## Step Debug Logs
This is the primary way for customers to debug job failures caused by failed steps.
Step debug logs increase the verbosity of a job's logs during and after a job's execution to assist with troubleshooting.
Additional log events with the prefix `::debug::` will now also appear in the job's logs, these log events are provided by the Action's author and the runner process.
### How to Access Step Debug Logs
This flag can be enabled by [setting the secret](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) `ACTIONS_STEP_DEBUG` to `true`.
All actions ran while this secret is enabled will show debug events in the [Downloaded Logs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#downloading-logs) and [Web Logs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#viewing-logs-to-diagnose-failures).
## Runner Diagnostic Logs
Runner Diagnostic Logs provide additional log files detailing how the Runner is executing an action.
You need the runner diagnostic logs only if you think there is an infrastructure problem with GitHub Actions and you want the product team to check the logs.
Each file contains different logging information that corresponds to that process:
* The Runner process coordinates setting up workers to execute jobs.
* The Worker process executes the job.
These files contain the prefix `Runner_` or `Worker_` to indicate the log source.
### How to Access Runner Diagnostic Logs
These log files are enabled by [setting the secret](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets) `ACTIONS_RUNNER_DEBUG` to `true`.
All actions ran while this secret is enabled contain additional diagnostic log files in the `runner-diagnostic-logs` folder of the [log archive](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/managing-a-workflow-run#downloading-logs).

View File

@ -1,45 +0,0 @@
# Action Types
There are two types of actions. JavaScript and Docker actions.
- **JavaScript Actions**: JavaScript actions run on the host machine. The unit of work is decoupled from the environment.
- **Docker Actions**: A container action is a container which carries both the unit of work along with the environment and its dependencies packaged up as a container.
Both have access to the workspace and the github event payload and context.
## Why would I choose a Docker action?
Docker actions carry both the unit of work and the environment.
This creates a more consistent and reliable unit of work where the consumer of the action does not need to worry about the toolsets and its dependencies.
Docker actions are currently limited to Linux only.
## Why would I choose a host action?
JavaScript actions decouple the unit of work from the environment and run directly on the host machine or VM.
Consider a simple example of testing a node lib on node 8, 10 and running a custom action. Each job will setup a node version on the host and custom-action will run its unit of work on each environment (node8+ubuntu16, node8+windows-2019, etc.)
```yaml
on: push
jobs:
build:
strategy:
matrix:
node: [8.x, 10.x]
os: [ubuntu-16.04, windows-2019]
runs-on: ${{matrix.os}}
actions:
- uses: actions/setup-node@v4
with:
version: ${{matrix.node}}
- run: |
npm install
- run: |
npm test
- uses: actions/custom-action@v1
```
JavaScript actions work on any environment that host action runtime is supported on which is currently node 12. However, a host action that runs a toolset expects the environment that it's running on to have that toolset in its PATH or using a setup-* action to acquire it on demand.

View File

@ -1,63 +0,0 @@
# Versioning
Actions are downloaded and run from the GitHub graph of repos. The workflow references an action using a ref.
Examples:
```yaml
steps:
- uses: actions/javascript-action@v1 # recommended. starter workflows use this
- uses: actions/javascript-action@v1.0.0 # if an action offers specific releases
- uses: actions/javascript-action@41775a4da8ffae865553a738ab8ac1cd5a3c0044 # sha
```
# Compatibility
Binding to a major version is the latest of that major version ( e.g. `v1` == "1.*" )
Major versions should guarantee compatibility. A major version can add net new capabilities but should not break existing input compatibility or break existing workflows.
Major version binding allows you to take advantage of bug fixes and critical functionality and security fixes. The `main` branch has the latest code and is unstable to bind to since changes get committed to main and released to the market place by creating a tag. In addition, a new major version carrying breaking changes will get implemented in main after branching off the previous major version.
> Warning: do not reference `main` since that is the latest code and can be carrying breaking changes of the next major version.
```yaml
steps:
- uses: actions/javascript-action@main # do not do this
```
Binding to the immutable full sha1 may offer more reliability. However, note that the hosted images toolsets (e.g. ubuntu-latest) move forward and if there is a tool breaking issue, actions may react with a patch to a major version to compensate so binding to a specific SHA may prevent you from getting fixes.
> Recommendation: bind to major versions to get functionality and fixes but reserve binding to a specific release or SHA as a mitigation strategy for unforeseen breaks.
# Recommendations
1. **Create a GitHub release for each specific version**: Creating a release like [ v1.0.0 ](https://github.com/actions/javascript-action/releases/tag/v1.0.0) allows users to bind back to a specific version if an issue is encountered with the latest major version.
2. **Publish the specific version to the marketplace**: When you release a specific version, choose the option to "Publish this Action to the GitHub Marketplace".
<img src="https://user-images.githubusercontent.com/33549821/78670739-36f5ae00-78ac-11ea-9660-57d5687ce520.png" alt="screenshot" height="250"/>
3. **Make the new release available to those binding to the major version tag**: Move the major version tag (v1, v2, etc.) to point to the ref of the current release. This will act as the stable release for that major version. You should keep this tag updated to the most recent stable minor/patch release.
```
git tag -fa v1 -m "Update v1 tag"
git push origin v1 --force
```
# Major Versions
All releases for a major version should hold compat including input compatibility and behavior compatibility.
Introduce a major version for compatibility breaks and major rewrites of the action.
Ideally, a major version would carry other benefits to the user to entice them to upgrade their workflows. Since updating their workflows will need to be done with an understanding of the changes and what compatibility was broken, introducing a new major version shouldn't be taken lightly.
To get feedback and to set expectations, the new major version can be initially released with `v2-beta` tag to indicate you can try it out but it's still going under some churn. Upon release the `-beta` can be dropped and there's an expectation of compatibility from that point forward.
[An example of v2-beta with checkout](https://github.com/actions/checkout/tree/c170eefc2657d93cc91397be50a299bff978a052#checkout-v2-beta)
# Sample Workflow
This illustrates the versioning workflow covered above.
![versioning](assets/action-releases.png)

View File

@ -1,216 +0,0 @@
# ADR 381: `glob` module
**Date**: 2019-12-05
**Status**: Accepted
## Context
This ADR proposes adding a `glob` function to the toolkit.
First party actions should have a consistent glob experience.
Related to artifact upload/download v2.
## Decision
### New module
Create a new module `@actions/glob` that can be versioned at it's own pace - not tied to `@actions/io`.
### Signature
```js
/**
* Constructs a globber from patterns
*
* @param patterns Patterns separated by newlines
* @param options Glob options
*/
export function create(
patterns: string,
options?: GlobOptions
): Promise<Globber> {}
/**
* Used to match files and directories
*/
export interface Globber {
/**
* Returns the search path preceding the first glob segment, from each pattern.
* Duplicates and descendants of other paths are filtered out.
*
* Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`.
*
* Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`.
*/
getSearchPaths(): string[]
/**
* Returns files and directories matching the glob patterns.
*
* Order of the results is not guaranteed.
*/
glob(): Promise<string[]>
/**
* Returns files and directories matching the glob patterns.
*
* Order of the results is not guaranteed.
*/
globGenerator(): AsyncGenerator<string, void>
}
/**
* Options to control globbing behavior
*/
export interface GlobOptions {
/**
* Indicates whether to follow symbolic links. Generally should set to false
* when deleting files.
*
* @default true
*/
followSymbolicLinks?: boolean
/**
* Indicates whether directories that match a glob pattern, should implicitly
* cause all descendant paths to be matched.
*
* For example, given the directory `my-dir`, the following glob patterns
* would produce the same results: `my-dir/**`, `my-dir/`, `my-dir`
*
* @default true
*/
implicitDescendants?: boolean
/**
* Indicates whether broken symbolic should be ignored and omitted from the
* result set. Otherwise an error will be thrown.
*
* @default true
*/
omitBrokenSymbolicLinks?: boolean
}
```
### Toolkit usage
Example, do not follow symbolic links:
```js
const patterns = core.getInput('path')
const globber = glob.create(patterns, {followSymbolicLinks: false})
const files = globber.glob()
```
Example, iterator:
```js
const patterns = core.getInput('path')
const globber = glob.create(patterns)
for await (const file of this.globGenerator()) {
console.log(file)
}
```
### Action usage
Actions should follow symbolic links by default.
Users can opt-out.
Example:
```yaml
jobs:
build:
steps:
- uses: actions/upload-artifact@v1
with:
path: |
**/*.tar.gz
**/*.pkg
follow-symbolic-links: false # opt out, should default to true
```
### HashFiles function
Hash files should not follow symbolic links by default.
User can opt-in by specifying flag `--follow-symbolic-links`.
Example:
```yaml
jobs:
build:
steps:
- uses: actions/cache@v1
with:
hash: ${{ hashFiles('--follow-symbolic-links', '**/package-lock.json') }}
```
### Glob behavior
Patterns `*`, `?`, `[...]`, `**` (globstar) are supported.
With the following behaviors:
- File names that begin with `.` may be included in the results
- Case insensitive on Windows
- Directory separator `/` and `\` both supported on Windows
Note:
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html#Pattern-Matching) for more information about Bash glob patterns.
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/The-Shopt-Builtin.html) for more information about Bash glob options.
### Tilde expansion
Support basic tilde expansion, for current user HOME replacement only.
For example, on macOS:
- `~` may expand to `/Users/johndoe`
- `~/foo` may expand to `/Users/johndoe/foo`
Note:
- Refer [here](https://www.gnu.org/software/bash/manual/html_node/Tilde-Expansion.html) for more information about Bash tilde expansion.
- All other forms of tilde expansion are not supported.
- Use `os.homedir()` to resolve the HOME path
### Root and normalize paths
An unrooted pattern will be rooted using the current working directory, prior to searching. Additionally the search path will be normalized prior to searching (relative pathing removed, slashes normalized on Windows, extra slashes removed).
The two side effects are:
1. Rooted and normalized paths are always returned
2. The pattern `**` will include the working directory in the results
These side effects diverge from Bash behavior. Whereas Bash is designed to be a shell, we are designing an API. This decision is intended to improve predictability of the API results.
Note:
- In Bash, the results are not rooted when the pattern is relative.
- In Bash, the results are not normalized. For example, the results from `./*` may look like: `./foo ./bar`
- In Bash, the results from the pattern `**` does not include the working directory. However the results from `/foo/**` would include the directory `/foo`. Also the results from `foo/**` would include the directory `foo`.
## Comments
Patterns that begin with `#` are treated as comments.
## Exclude patterns
Leading `!` changes the meaning of an include pattern to exclude.
Note:
- Multiple leading `!` flips the meaning.
## Escaping
Wrapping special characters in `[]` can be used to escape literal glob characters in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`.
On Linux/macOS `\` is also treated as an escape character.
## Consequences
- Publish new module `@actions/glob`
- Publish docs for the module (add link from `./README.md` to new doc `./packages/glob/README.md`)

View File

@ -1,19 +0,0 @@
# ADRs
ADR, short for "Architecture Decision Record" is a way of capturing important architectural decisions, along with their context and consequences.
This folder includes ADRs for the actions toolkit. ADRs are proposed in the form of a pull request, and they commonly follow this format:
* **Title**: short present tense imperative phrase, less than 50 characters, like a git commit message.
* **Status**: proposed, accepted, rejected, deprecated, superseded, etc.
* **Context**: what is the issue that we're seeing that is motivating this decision or change.
* **Decision**: what is the change that we're actually proposing or doing.
* **Consequences**: what becomes easier or more difficult to do because of this change.
---
- More information about ADRs can be found [here](https://github.com/joelparkerhenderson/architecture_decision_record).

View File

@ -1 +0,0 @@
<mxfile modified="2019-12-12T18:56:00.899Z" host="www.draw.io" agent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36" etag="CISsL8yLQ-3TSrXBbF_M" version="12.3.8" type="device" pages="1"><diagram name="Page-1" id="ff44883e-f642-bcb2-894b-16b3d25a3f0b">7VvbctsqFP0aP9YjkISkx9hJcx7amZ7mTNs8dYiEZVpZeBC+9esPyMi64UsdW3Ymdh4Mm5tgrbU3AqdnDyfLR46n488sIkkPWtGyZ9/3IASWi+SXsqzWFhf5a0PMaaQrlYYn+ocULbV1RiOS1SoKxhJBp3VjyNKUhKJmw5yzRb3aiCX1Uac4Ji3DU4iTwtp3S/t3GomxtgMUlAX/EBqP9eA+1FN+weHvmLNZqkfsQXuUf9bFE1z0paeajXHEFhWT/dCzh5wxsU5NlkOSqNUtFq5oJ1bF0/bswVhMEpkBMpkXf9zSGBzSWE6Ok1RUh9vW38h6CcMgsC2HoJGLPwSt/kkkV1ZnU5bKr0E443MS6RErg2cCc6HJIJlgD0ga3Sk0ZT5McJbRcG38SJOijcxVW2SCs99kyBLG8+FtK/9sSgos1bRHLN2MBvOuBF/92PQrM88yY0kq6Oy94re1ya2K3JKKH5V0pZXMlY1UpmjTXucCGTbjITEvLtRKwDwmwlxF96IWvdKtxu6RsAmRTy4rcJJgQed1EWCtmnhTb9P0C6PyQaGlFe4UQtX69jyr3sV6GrpVyR+ZqDxGacpZdRjDADo1xfaxaSdnTNjvRH7Dsgqxnquk28Kyoxhjd0MH5NfpgNwGHdaUPQsd1gPNcTIrHFqLHkki44ZixWJMBXma4ny5FjJ01cmRe+2cNTn4hWMGJr+C8k+LI06dI7bqCCc0TpULk8gRripIwlW6GrnqbxfEc8IFWe5EUJfCBhJuIcxFGcSKKuNq+LK2Y15D62+gaUeqa1PqHi99bEQ4Sqtov3eH3cjZDRoksjuTM2zJud/vX1TR4MKKdrzrUbR9Xc720tB4Th0aH14OGucGTQ2apgu7ZBx0bnHwL+Kgsz8Oom7ioGdfLA6im5yrckbO5eQc/+T3377+Gz/Mp9/++5SxT+Hwg/G1AyVy1MGLTMR5guM0lM92V5TIgTaF8htPFE7pS6a+JFMJzkgm68zbZzQSs6lKhjhJ2Ezsh3xKOJUTVbAUjb6UpsGUZVRQlsJcxcA/kc9FjUMBpw0SgF2idNuv1AACln25DYsRoFtk3BUZd3F6V3js6NTHQ6DvAdeDvufK2OgFdW45xtI9B4Tto6XGRg6hs8Vg43L7+x19yAkWZL2eyon3lKoGOUbxVvdvgT7o25u+eCUw1Hvf2UnnsULCeBJXFDjNWAHasQJ1GtGtW7DYhdDlt1xtrr+xYHG80y/IeW6X7lsNbwsbaB7qtv3GqRUC3bptELTUPMGZEk2TQ1IOokmVqkI1r6pK06aWGpW4qHShd7pgQqMo2eYt6i4ip5d+qIbyFVgn2f017uyQZdieewZFg2A7jV6laNh2ubvfg94PVm5jo24f+CrV1OvpsDrkoq28rtfLHuFsbPLGBy/ZXodXWRDXsB6F7ZV+ETSu1bymOzvULwLg7enpdI4xcFf8bjX8NcOfvybsz+Pz/XffcMFysv2s1bfe3H7WLvezLXUaCLn93rVx9uEEBsGeaz9rBNp09nEKoOvnVvAkqLddydtA3XP9K0PddA10HtTBu0U9gAdovWBGJ6i753PqoA/frVNH1rU59fa10ASnqxyoZYmt3D1bhl9NXOPu+fUgOY5bA8kFBpDsM22VjSB5LZCGcs6YpgoXMVbSk5tRkqlt4QT/kgudL0Em+XulmJ0iVDbPcQ3HSYHbIUqG874RVSKi6QamjPA5DWkay7S+zoMWG6mJczKnbJa9JwiDxqtLYFKa6UjwCAxltvwJ/Prdp/xPA/vhfw==</diagram></mxfile>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

View File

@ -1,217 +0,0 @@
# :: Commands
The [core toolkit package](https://github.com/actions/toolkit/tree/main/packages/core) offers a number of convenience functions for
setting results, logging, registering secrets and exporting variables across actions. Sometimes, however, its useful to be able to do
these things in a script or other tool.
To allow this, we provide a special `::` syntax which, if logged to `stdout` on a new line, will allow the runner to perform special behavior on
your commands. The following commands are all supported:
### Set outputs
To set an output for the step, use `::set-output`:
```sh
echo "::set-output name=FOO::BAR"
```
Running `steps.[step-id].outputs.FOO` in your Yaml will now give you `BAR`
```yaml
steps:
- name: Set the value
id: step_one
run: echo "::set-output name=FOO::BAR"
- name: Use it
run: echo ${{ steps.step_one.outputs.FOO }}
```
This is wrapped by the core setOutput method:
```javascript
export function setOutput(name: string, value: string): void {}
```
### Register a secret
If a script or action does work to create a secret at runtime, it can be registered with the runner to be masked in logs.
To mask a value in the logs, use `::add-mask`:
```sh
echo "::add-mask::mysecretvalue"
```
This is wrapped by the core setSecret method
```javascript
function setSecret(secret: string): void {}
```
Now, future logs containing BAR will be masked. E.g. running `echo "Hello FOO BAR World"` will now print `Hello FOO **** World`.
**WARNING** The add-mask and setSecret commands only support single-line
secrets or multi-line secrets that have been escaped. `@actions/core`
`setSecret` will escape the string you provide by default. When an escaped
multi-line string is provided the whole string and each of its lines
individually will be masked. For example you can mask `first\nsecond\r\nthird`
using:
```sh
echo "::add-mask::first%0Asecond%0D%0Athird"
```
This will mask `first%0Asecond%0D%0Athird`, `first`, `second` and `third`.
**WARNING** Do **not** mask short values if you can avoid it, it could render your output unreadable (and future steps' output as well).
For example, if you mask the letter `l`, running `echo "Hello FOO BAR World"` will now print `He*********o FOO BAR Wor****d`
### Group and Ungroup Log Lines
Emitting a group with a title will instruct the logs to create a collapsible region up to the next endgroup command.
```bash
echo "::group::my title"
echo "::endgroup::"
```
This is wrapped by the core methods:
```javascript
function startGroup(name: string): void {}
function endGroup(): void {}
```
### Problem Matchers
Problems matchers can be used to scan a build's output to automatically surface lines to the user that matches the provided pattern. A file path to a .json Problem Matcher must be provided. See [Problem Matchers](problem-matchers.md) for more information on how to define a Problem Matcher.
```bash
echo "::add-matcher::eslint-compact-problem-matcher.json"
echo "::remove-matcher owner=eslint-compact::"
```
`add-matcher` takes a path to a Problem Matcher file
`remove-matcher` removes a Problem Matcher by owner
### Save State
Save a state to an environmental variable that can later be used in the main or post action.
```bash
echo "::save-state name=FOO::foovalue"
```
Because `save-state` prepends the string `STATE_` to the name, the environment variable `STATE_FOO` will be available to use in the post or main action. See [Sending Values to the pre and post actions](https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#sending-values-to-the-pre-and-post-actions) for more information.
### Log Level
There are several commands to emit different levels of log output:
| log level | example usage |
|---|---|
| [debug](action-debugging.md) | `echo "::debug::My debug message"` |
| notice | `echo "::notice::My notice message"` |
| warning | `echo "::warning::My warning message"` |
| error | `echo "::error::My error message"` |
Additional syntax options are described at [the workflow command documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-a-debug-message).
### Command Echoing
By default, the echoing of commands to stdout only occurs if [Step Debugging is enabled](./action-debugging.md#How-to-Access-Step-Debug-Logs)
You can enable or disable this for the current step by using the `echo` command.
```bash
echo "::echo::on"
```
You can also disable echoing.
```bash
echo "::echo::off"
```
This is wrapped by the core method:
```javascript
function setCommandEcho(enabled: boolean): void {}
```
The `add-mask`, `debug`, `warning` and `error` commands do not support echoing.
### Command Prompt
CMD processes the `"` character differently from other shells when echoing. In CMD, the above snippets should have the `"` characters removed in order to correctly process. For example, the set output command would be:
```cmd
echo ::set-output name=FOO::BAR
```
## Environment files
During the execution of a workflow, the runner generates temporary files that can be used to perform certain actions. The path to these files are exposed via environment variables. You will need to use the `utf-8` encoding when writing to these files to ensure proper processing of the commands. Multiple commands can be written to the same file, separated by newlines.
### Set an environment variable
To set an environment variable for future out of process steps, write to the file located at `GITHUB_ENV` or use the equivalent `actions/core` function
```sh
echo "FOO=BAR" >> $GITHUB_ENV
```
Running `$FOO` in a future step will now return `BAR`
For multiline strings, you may use a heredoc style syntax with your choice of delimeter. In the below example, we use `EOF`.
```
steps:
- name: Set the value
id: step_one
run: |
echo 'JSON_RESPONSE<<EOF' >> $GITHUB_ENV
curl https://httpbin.org/json >> $GITHUB_ENV
echo 'EOF' >> $GITHUB_ENV
```
This would set the value of the `JSON_RESPONSE` env variable to the value of the curl response.
The expected syntax for the heredoc style is:
```
{VARIABLE_NAME}<<{DELIMETER}
{VARIABLE_VALUE}
{DELIMETER}
```
This is wrapped by the core `exportVariable` method which sets for future steps but also updates the variable for this step.
```javascript
export function exportVariable(name: string, val: string): void {}
```
### PATH Manipulation
To prepend a string to PATH write to the file located at `GITHUB_PATH` or use the equivalent `actions/core` function
```sh
echo "/Users/test/.nvm/versions/node/v12.18.3/bin" >> $GITHUB_PATH
```
Running `$PATH` in a future step will now return `/Users/test/.nvm/versions/node/v12.18.3/bin:{Previous Path}`;
This is wrapped by the core addPath method:
```javascript
export function addPath(inputPath: string): void {}
```
### Powershell
Powershell does not use UTF8 by default. You will want to make sure you write in the correct encoding. For example, to set the path:
```
steps:
- run: echo "mypath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
```

View File

@ -1,3 +0,0 @@
# Creating a Container Action Using the Toolkit
In progress.

View File

@ -1,71 +0,0 @@
# Creating a Docker Action
The [container-template](https://github.com/actions/container-template) repo contains the base files to create a Docker action.
# Create a Repo from the Template
Navigate to https://github.com/actions/container-template
Click on `Use this template` to create the repo for your action.
![template](assets/node12-template.png)
Complete creating your repo and clone the repo.
> NOTE: The location of the repo will be how users will reference your action in their workflow file with the using keyword.
e.g. To use https://github.com/actions/setup-node, users will author:
```yaml
steps:
using: actions/setup-node@v4
```
# Define Metadata
Your action has a name and a description. Update the author.
Create inputs that your unit of work will need. These will be what workflow authors set with the `with:` keyword.
```yaml
name: 'My Container Action'
description: 'Get started with Container actions'
author: 'GitHub'
inputs:
myInput:
description: 'Input to use'
default: 'world'
runs:
using: 'docker'
image: 'Dockerfile'
args:
- ${{ inputs.myInput }}
```
It will be run with docker and the input is mapped into the args
# Change Code
The entry point is in entrypoint.sh
```bash
#!/bin/sh -l
echo "hello $1"
```
# Publish
Simply push your action to publish.
```bash
$ git push
```
The runner will download the action and build the docker container on the fly at runtime.
> Consider versioning your actions with tags. See [versioning](/docs/action-versioning.md)

View File

@ -1,288 +0,0 @@
# Creating an Action using the GitHub Context
## Goal
In this walkthrough we will learn how to build a basic action using GitHub context data to greet users when they open an issue or PR. In the process we will explore how to access this context and how to make authenticated requests to the GitHub API.
Note that a complete version of this action can be found at https://github.com/damccorm/issue-greeter.
## Prerequisites
This walkthrough assumes that you have gone through the basic [javascript action walkthrough](https://github.com/actions/javascript-action) and have a basic action set up. If not, we recommend you go through that first.
## Installing dependencies
All of the dependencies we need should come packaged for us in this library's core and github packages. To install, run the following in your action:
`npm install @actions/core && npm install @actions/github`
## Metadata
Next, we will need a welcome message and a repo token as an input. Recall that inputs are defined in the `action.yml` metadata file - update your `action.yml` file to define `welcome-message` and `repo-token` as inputs.
```yaml
name: "Welcome"
description: "A basic welcome action"
author: "GitHub"
inputs:
welcome-message:
description: "Message to display when a user opens an issue or PR"
default: "Thanks for opening an issue! Make sure you've followed CONTRIBUTING.md"
repo-token:
description: "Token for the repo. Can be passed in using {{ secrets.GITHUB_TOKEN }}"
required: true
runs:
using: "node12"
main: "lib/main.js"
```
## Action logic
Now that we've installed our dependencies and defined our inputs, we're ready to start writing the action logic in `src/main.ts`! For clarity, we'll structure our action up as follows:
```ts
import * as core from '@actions/core';
import * as github from '@actions/github';
export async function run() {
try {
const welcomeMessage: string = core.getInput('welcome-message');
// TODO - Get context data
// TODO - make request to the GitHub API to comment on the issue
}
catch (error) {
core.setFailed(error.message);
throw error;
}
}
run();
```
### Getting context data
For the purpose of this walkthrough, we will need the following pieces of context data:
- the name of the repo that the action is being run on
- the organization/owner of that repo
- the number of the issue that has been opened
Fortunately, the GitHub package provides all of this to us with [a single convenience function](https://github.com/actions/toolkit/blob/ac007c06984bc483fae2ba649788dfc858bc6a8b/packages/github/src/context.ts#L34), so we can simply do:
`const issue: {owner: string; repo: string; number: number} = github.context.issue;`
The context object also contains a number of easily accessed properties, as well as easy access to the full [GitHub payload](https://developer.github.com/v3/activity/events/types/). We can use this to check and make sure we're actually looking at a recently opened issue (and not something else, like a comment on an existing issue):
```ts
if (github.context.payload.action !== 'opened') {
console.log('No issue or PR was opened, skipping');
return;
}
```
Our whole `src/main.ts` file now looks like:
```ts
import * as core from '@actions/core';
import * as github from '@actions/github';
export async function run() {
try {
const welcomeMessage: string = core.getInput('welcome-message', {required: true});
const repoToken: string = core.getInput('repo-token', {required: true});
const issue: {owner: string; repo: string; number: number} = github.context.issue;
if (github.context.payload.action !== 'opened') {
console.log('No issue or pull request was opened, skipping');
return;
}
// TODO - make request to the GitHub API to comment on the issue
}
catch (error) {
core.setFailed(error.message);
throw error;
}
}
run();
```
### Sending requests to the GitHub API
Now that we have our context data, we are able to send a request to the GitHub API using the [Octokit REST client](https://github.com/octokit/rest.js). The REST client exposes a number of easy convenience functions, including one for adding comments to issues/PRs (issues and PRs are treated as one concept by the Octokit client):
```ts
const client: github.GitHub = new github.GitHub(repoToken);
await client.issues.createComment({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: welcomeMessage
});
```
For more docs on the client, you can visit the [Octokit REST documentation](https://octokit.github.io/rest.js/). Now our action code should be complete:
```ts
import * as core from '@actions/core';
import * as github from '@actions/github';
export async function run() {
try {
const welcomeMessage: string = core.getInput('welcome-message', {required: true});
const repoToken: string = core.getInput('repo-token', {required: true});
const issue: {owner: string; repo: string; number: number} = github.context.issue;
if (github.context.payload.action !== 'opened') {
console.log('No issue or pull request was opened, skipping');
return;
}
const client: github.GitHub = new github.GitHub(repoToken);
await client.issues.createComment({
owner: issue.owner,
repo: issue.repo,
issue_number: issue.number,
body: welcomeMessage
});
}
catch (error) {
core.setFailed(error.message);
throw error;
}
}
run();
```
## Writing unit tests for your action
Next, we're going to write a basic unit test for our action using jest. If you followed the [javascript walkthrough](https://github.com/actions/javascript-action), you should have a file `__tests__/main.test.ts` that runs tests when `npm test` is called. We're going to start by populating that with one test:
```ts
const nock = require('nock');
const path = require('path');
describe('action test suite', () => {
it('It posts a comment on an opened issue', async () => {
// TODO
});
});
```
For the purposes of this walkthrough, we'll focus on populating this test and leave the remaining test coverage as an exercise for the reader.
### Mocking inputs
First, we want to make sure that we can mock our inputs (welcome-message, and repo-token). Actions handles inputs by populating process.env.INPUT_${input name in all caps}, so we can mock that simply by setting those environment variables:
```ts
const nock = require('nock');
const path = require('path');
describe('action test suite', () => {
it('It posts a comment on an opened issue', async () => {
const welcomeMessage = 'hello';
const repoToken = 'token';
process.env['INPUT_WELCOME-MESSAGE'] = welcomeMessage;
process.env['INPUT_REPO-TOKEN'] = repoToken;
// TODO
});
});
```
### Mocking the GitHub context
Mocking the GitHub context is relatively straightforward. Since most of it is simply populated by environment variables, you can just set the corresponding environment variables defined [here](https://github.com/actions/toolkit/blob/ac007c06984bc483fae2ba649788dfc858bc6a8b/packages/github/src/context.ts#L23) and test that it works in that environment. In this case, we can setup our test with:
```ts
const nock = require('nock');
const path = require('path');
describe('action test suite', () => {
it('It posts a comment on an opened issue', async () => {
const welcomeMessage = 'hello';
const repoToken = 'token';
process.env['INPUT_WELCOME-MESSAGE'] = welcomeMessage;
process.env['INPUT_REPO-TOKEN'] = repoToken;
process.env['GITHUB_REPOSITORY'] = 'foo/bar';
process.env['GITHUB_EVENT_PATH'] = path.join(__dirname, 'payload.json');
// TODO
});
});
```
Note that the payload is loaded from GITHUB_EVENT_PATH. Since we set that to `path.join(__dirname, 'payload.json')`, we need to go save our payload there. For the purposes of this test, we can simply save the following to `__tests__/payload.json`:
```json
{
"issue": {
"number": 10
},
"action": "opened"
}
```
Now, calling `github.context.issue` should return `{owner: foo, repo: bar, number: 10}`, and `github.context.payload.action` should get set to 'opened'
> One important detail here is that because the GitHub context loads these environment variables as soon as it is required, you should set them before you require your action. In most cases, this means you need to rerequire your action in every test. If this is a problem, you can get around it by mocking the class directly using jest (or whatever framework you choose).
### Mocking the Octokit Client
To mock the client calls, we recommend using [nock](https://github.com/nock/nock) which allows you to mock the http requests made by the client. First, install nock with `npm install nock --save-dev`.
For this test, we expect the following call:
```ts
client.issues.createComment({
owner: 'foo',
repo: 'bar',
issue_number: 10,
body: 'you posted your first issue'
});
```
From [the GitHub endpoint docs](https://developer.github.com/v3/issues/comments/#create-a-comment), we expect this to get make a POST request to `https://api.github.com/repos/foo/bar/issues/10/comments` with body of `{"body":"hello"}`
We can mock this with:
```ts
const nock = require('nock');
const path = require('path');
describe('action test suite', () => {
it('It posts a comment on an opened issue', async () => {
const welcomeMessage = 'hello';
const repoToken = 'token';
process.env['INPUT_WELCOME-MESSAGE'] = welcomeMessage;
process.env['INPUT_REPO-TOKEN'] = repoToken;
process.env['GITHUB_REPOSITORY'] = 'foo/bar';
process.env['GITHUB_EVENT_PATH'] = path.join(__dirname, 'payload.json');
nock('https://api.github.com')
.persist()
.post('/repos/foo/bar/issues/10/comments', '{\"body\":\"hello\"}')
.reply(200);
const main = require('../src/main');
await main.run();
});
});
```
This will fail if the url or body doesn't exactly match the parameters passed into the nock function. We can now run `npm test` and the test should succeed.
## Build and publish
Now that we've written and unit tested our action, we can build our action with `npm run build` and push it to a repo where it can be consumed by workflows. For more info on versioning your action, see [our versioning docs](./action-versioning.md).
## Next steps
If you're interested in building out this action further, try extending your action to only run on a user's first issue. See our [first-contribution action](https://github.com/actions/first-interaction) for inspiration.

View File

@ -6,7 +6,7 @@ In order to support the node-config action, I propose adding the following into
Holds all the functions necessary for interacting with the runner/environment. Holds all the functions necessary for interacting with the runner/environment.
```ts ```
// Logging functions // Logging functions
export function debug(message: string): void export function debug(message: string): void
export function warning(message: string): void export function warning(message: string): void
@ -66,7 +66,7 @@ export function setFailed(message: string): void
Holds all the functions necessary for file system manipulation (cli scenarios, not fs replacements): Holds all the functions necessary for file system manipulation (cli scenarios, not fs replacements):
```ts ```
/** /**
* Interface for cp/mv options * Interface for cp/mv options
*/ */
@ -132,7 +132,7 @@ export function which(tool: string, options?: WhichOptions): Promise<string>
Holds all the functions necessary for running the tools node-config depends on (aka 7-zip and tar) Holds all the functions necessary for running the tools node-config depends on (aka 7-zip and tar)
```ts ```
/** /**
* Interface for exec options * Interface for exec options
*/ */
@ -155,7 +155,7 @@ export function exec(commandLine: string, args?: string[], options?: IExecOption
Holds all the functions necessary for downloading and caching node. Holds all the functions necessary for downloading and caching node.
```ts ```
/** /**
* Download a tool from an url and stream it into a file * Download a tool from an url and stream it into a file
* *

View File

@ -1,149 +0,0 @@
# Problem Matchers
Problem Matchers are a way to scan the output of actions for a specified regex pattern and surface that information prominently in the UI. Both [GitHub Annotations](https://developer.github.com/v3/checks/runs/#annotations-object-1) and log file decorations are created when a match is detected.
## Limitations
Currently, GitHub Actions limit the annotation count in a workflow run.
- 10 warning annotations, 10 error annotations, and 10 notice annotations per step
- 50 annotations per job (sum of annotations from all the steps)
- 50 annotations per run (separate from the job annotations, these annotations arent created by users)
If your workflow may exceed these annotation counts, consider filtering of the log messages which the Problem Matcher is exposed to (e.g. by PR touched files, lines, or other).
## Single Line Matchers
Let's consider the ESLint compact output:
```
badFile.js: line 50, col 11, Error - 'myVar' is defined but never used. (no-unused-vars)
```
We can define a problem matcher in json that detects input in that format:
```json
{
"problemMatcher": [
{
"owner": "eslint-compact",
"pattern": [
{
"regexp": "^(.+):\\sline\\s(\\d+),\\scol\\s(\\d+),\\s(Error|Warning|Info)\\s-\\s(.+)\\s\\((.+)\\)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
"code": 6
}
]
}
]
}
```
The following fields are available for problem matchers:
```
{
owner: an ID field that can be used to remove or replace the problem matcher. **required**
severity: indicates the default severity, either 'warning' or 'error' case-insensitive. Defaults to 'error'
pattern: [
{
regexp: the regex pattern that provides the groups to match against **required**
file: a group number containing the file name
fromPath: a group number containing a filepath used to root the file (e.g. a project file)
line: a group number containing the line number
column: a group number containing the column information
severity: a group number containing either 'warning' or 'error' case-insensitive. Defaults to `error`
code: a group number containing the error code
message: a group number containing the error message. **required** at least one pattern must set the message
loop: whether to loop until a match is not found, only valid on the last pattern of a multipattern matcher
}
]
}
```
## Multiline Matching
Consider the following output:
```
test.js
1:0 error Missing "use strict" statement strict
5:10 error 'addOne' is defined but never used no-unused-vars
✖ 2 problems (2 errors, 0 warnings)
```
The file name is printed once, yet multiple error lines are printed. The `loop` keyword provides a way to discover multiple errors in outputs.
The eslint-stylish problem matcher defined below catches that output, and creates two annotations from it.
```
{
"problemMatcher": [
{
"owner": "eslint-stylish",
"pattern": [
{
// Matches the 1st line in the output
"regexp": "^([^\\s].*)$",
"file": 1
},
{
// Matches the 2nd and 3rd line in the output
"regexp": "^\\s+(\\d+):(\\d+)\\s+(error|warning|info)\\s+(.*)\\s\\s+(.*)$",
// File is carried through from above, so we define the rest of the groups
"line": 1,
"column": 2,
"severity": 3,
"message": 4,
"code": 5,
"loop": true
}
]
}
]
}
```
The first pattern matches the `test.js` line and records the file information. This line is not decorated in the UI.
The second pattern loops through the remaining lines with `loop: true` until it fails to find a match, and surfaces these lines prominently in the UI.
Note that the pattern matches must be on consecutive lines. The following would not result in any match findings.
```
test.js
extraneous log line of no interest
1:0 error Missing "use strict" statement strict
5:10 error 'addOne' is defined but never used no-unused-vars
✖ 2 problems (2 errors, 0 warnings)
```
## Adding and Removing Problem Matchers
Problem Matchers are enabled and removed via the toolkit [commands](commands.md#problem-matchers).
## Duplicate Problem Matchers
Registering two problem-matchers with the same owner will result in only the problem matcher registered last running.
## Examples
Some of the starter actions are already using problem matchers, for example:
- [setup-node](https://github.com/actions/setup-node/tree/main/.github)
- [setup-python](https://github.com/actions/setup-python/tree/main/.github)
- [setup-go](https://github.com/actions/setup-go/tree/main/.github)
- [setup-dotnet](https://github.com/actions/setup-dotnet/tree/main/.github)
## Troubleshooting
### Regular expression not matching
Use ECMAScript regular expression syntax when testing patterns.
### File property getting dropped
[Enable debug logging](https://docs.github.com/en/actions/managing-workflow-runs/enabling-debug-logging) to determine why the file is getting dropped.
This usually happens when the file does not exist or is not under the workflow repo.

View File

@ -1,10 +0,0 @@
# Proxy Server Support
Self-hosted runners [can be configured](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners) to run behind a proxy server in enterprises.
For actions to **just work** behind a proxy server:
1. Use [tool-cache](/packages/tool-cache) version >= 1.3.1
2. Optionally use [actions/http-client](/packages/http-client)
If you are using other http clients, refer to the [environment variables set by the runner](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners).

View File

@ -1,100 +0,0 @@
# Github Package
In order to support using actions to interact with GitHub, I propose adding a `github` package to the toolkit.
Its main purpose will be to provide a hydrated GitHub context/Octokit client with some convenience functions. It is largely pulled from the GitHub utilities provided in https://github.com/JasonEtco/actions-toolkit, though it has been condensed.
### Spec
##### interfaces.ts
```ts
/*
* Interfaces
*/
export interface PayloadRepository {
[key: string]: any
full_name?: string
name: string
owner: {
[key: string]: any
login: string
name?: string
}
html_url?: string
}
export interface WebhookPayloadWithRepository {
[key: string]: any
repository?: PayloadRepository
issue?: {
[key: string]: any
number: number
html_url?: string
body?: string
}
pull_request?: {
[key: string]: any
number: number
html_url?: string
body?: string
}
sender?: {
[key: string]: any
type: string
}
action?: string
installation?: {
id: number
[key: string]: any
}
}
```
##### context.ts
Contains a GitHub context
```ts
export class Context {
/**
* Webhook payload object that triggered the workflow
*/
public payload: WebhookPayloadWithRepository
/**
* Name of the event that triggered the workflow
*/
public event: string
public sha: string
public ref: string
public workflow: string
public action: string
public actor: string
/**
* Hydrate the context from the environment
*/
constructor ()
public get issue ()
public get repo ()
}
```
##### github.ts
Contains a hydrated Octokit client
```ts
export class GithubClient extends Octokit {
// For making GraphQL requests
public graphql: (query: string, variables?: Variables) => Promise<GraphQlQueryResponse>
// Calls super and initializes graphql
constructor (token: string)
}
```

View File

@ -4,6 +4,7 @@ module.exports = {
roots: ['<rootDir>/packages'], roots: ['<rootDir>/packages'],
testEnvironment: 'node', testEnvironment: 'node',
testMatch: ['**/__tests__/*.test.ts'], testMatch: ['**/__tests__/*.test.ts'],
testRunner: 'jest-circus/runner',
transform: { transform: {
'^.+\\.ts$': 'ts-jest' '^.+\\.ts$': 'ts-jest'
}, },

View File

@ -1,6 +1,6 @@
{ {
"packages": [ "packages": [
"packages/**/*" "packages/*"
], ],
"version": "independent" "version": "independent"
} }

24
nx.json
View File

@ -1,24 +0,0 @@
{
"tasksRunnerOptions": {
"default": {
"runner": "nx/tasks-runners/default",
"options": {
"cacheableOperations": []
}
}
},
"affected": {
"defaultBase": "master"
},
"$schema": "./node_modules/nx/schemas/nx-schema.json",
"namedInputs": {
"default": [
"{projectRoot}/**/*",
"sharedGlobals"
],
"sharedGlobals": [],
"production": [
"default"
]
}
}

23015
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,35 +2,29 @@
"name": "root", "name": "root",
"private": true, "private": true,
"scripts": { "scripts": {
"audit-all": "lerna run audit-moderate", "bootstrap": "lerna bootstrap",
"bootstrap": "lerna exec -- npm install",
"build": "lerna run tsc", "build": "lerna run tsc",
"clean": "lerna clean",
"repair": "lerna repair",
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build -- -- --noEmit\"", "check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build -- -- --noEmit\"",
"format": "prettier --write packages/**/*.ts", "format": "prettier --write packages/**/*.ts",
"format-check": "prettier --check packages/**/*.ts", "format-check": "prettier --check packages/**/*.ts",
"lint": "eslint packages/**/*.ts", "lint": "eslint packages/**/*.ts",
"lint-fix": "eslint packages/**/*.ts --fix",
"new-package": "scripts/create-package", "new-package": "scripts/create-package",
"test": "jest --testTimeout 70000" "test": "jest"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^29.5.4", "@types/jest": "^24.0.11",
"@types/node": "^20.5.7", "@types/node": "^11.13.5",
"@types/signale": "^1.4.1", "@types/signale": "^1.2.1",
"concurrently": "^6.1.0", "@typescript-eslint/parser": "^1.9.0",
"eslint": "^8.0.1", "concurrently": "^4.1.0",
"eslint-config-prettier": "^8.9.0", "eslint": "^5.16.0",
"eslint-plugin-github": "^4.9.2", "eslint-plugin-github": "^2.0.0",
"eslint-plugin-jest": "^27.2.3", "eslint-plugin-jest": "^22.5.1",
"eslint-plugin-prettier": "^5.0.0", "jest": "^24.7.1",
"flow-bin": "^0.115.0", "jest-circus": "^24.7.1",
"jest": "^29.6.4", "lerna": "^3.13.3",
"lerna": "^6.4.1", "prettier": "^1.17.0",
"nx": "16.6.0", "ts-jest": "^24.0.2",
"prettier": "^3.0.0", "typescript": "^3.4.4"
"ts-jest": "^29.1.1",
"typescript": "^5.2.2"
} }
} }

View File

@ -1,44 +0,0 @@
# Contributions
This package is used internally by the v4 versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
Any issues or feature requests that are related to the artifact actions should be filled in the appropriate repo.
A limited range of unit tests run as part of each PR when making changes to the artifact packages. For small contributions and fixes, they should be sufficient.
If making large changes, there are a few scenarios that should be tested:
- Uploading very large artifacts
- Uploading artifacts with lots of small files
- Uploading artifacts using a self-hosted runner (uploads and downloads behave differently due to extra latency)
- Downloading a single artifact (large and small, if lots of small files are part of an artifact, timeouts and non-success HTTP responses can be expected)
- Downloading all artifacts at once
Large architectural changes can impact upload/download performance so it is important to separately run extra tests. We request that any large contributions/changes have extra detailed testing so we can verify performance and possible regressions.
Tests will run for every push/pull_request [via Actions](https://github.com/actions/toolkit/blob/main/.github/workflows/artifact-tests.yml).
# Testing
## Package tests
To run unit tests for the `@actions/artifact` package:
1. Clone `actions/toolkit` locally
2. Install dependencies: `npm bootstrap`
3. Change working directory to `packages/artifact`
4. Run jest tests: `npm run test`
## Within upload-artifact or download-artifact actions
Any easy way to test changes for the official upload/download actions is to fork them, compile changes and run them.
1. For your local `actions/toolkit` changes:
1. Change directory to `packages/artifact`
2. Compile the changes: `npm run tsc`
3. Symlink your package change: `npm link`
2. Fork and clone either [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact)
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
4. The format for the above is `<username>/<repository-name>/@<ref>`, i.e. `me/myrepo/@HEAD`

View File

@ -1,192 +0,0 @@
# `@actions/artifact`
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
- [`@actions/artifact`](#actionsartifact)
- [v2 - What's New](#v2---whats-new)
- [Improvements](#improvements)
- [Breaking changes](#breaking-changes)
- [Quick Start](#quick-start)
- [Examples](#examples)
- [Upload and Download](#upload-and-download)
- [Delete an Artifact](#delete-an-artifact)
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
- [Speeding up large uploads](#speeding-up-large-uploads)
- [Additional Resources](#additional-resources)
## v2 - What's New
> [!IMPORTANT]
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
### Improvements
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
### Breaking changes
1. Firewall rules required for self-hosted runners.
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
e.g.
```bash
curl https://api.github.com/meta | jq .domains.actions
```
2. Uploading to the same named Artifact multiple times.
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
3. Limit of Artifacts for an individual job.
Each job in a workflow run now has a limit of 10 artifacts.
## Quick Start
Install the package:
```bash
npm i @actions/artifact
```
Import the module:
```js
// ES6 module
import {DefaultArtifactClient} from '@actions/artifact'
// CommonJS
const {DefaultArtifactClient} = require('@actions/artifact')
```
Then instantiate:
```js
const artifact = new DefaultArtifactClient()
```
For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
## Examples
### Upload and Download
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
```js
const {id, size} = await artifact.uploadArtifact(
// name of the artifact
'my-artifact',
// files to include (supports absolute and relative paths)
['/absolute/path/file1.txt', './relative/file2.txt'],
{
// optional: how long to retain the artifact
// if unspecified, defaults to repository/org retention settings (the limit of this value)
retentionDays: 10
}
)
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
const {downloadPath} = await artifact.downloadArtifact(id, {
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
path: '/tmp/dst/path',
})
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
```
### Delete an Artifact
To delete an artifact, all you need is the name.
```js
const {id} = await artifact.deleteArtifact(
// name of the artifact
'my-artifact'
)
console.log(`Deleted Artifact ID '${id}'`)
```
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
```js
const findBy = {
// must have actions:write permission on target repository
token: process.env['GITHUB_TOKEN'],
workflowRunId: 123,
repositoryOwner: 'actions',
repositoryName: 'toolkit'
}
const {id} = await artifact.deleteArtifact(
// name of the artifact
'my-artifact',
// options to find by other repo/owner
{ findBy }
)
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
```
### Downloading from other workflow runs or repos
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
```ts
const findBy = {
// must have actions:read permission on target repository
token: process.env['GITHUB_TOKEN'],
workflowRunId: 123,
repositoryOwner: 'actions',
repositoryName: 'toolkit'
}
await artifact.downloadArtifact(1337, {
findBy
})
// can also be used in other methods
await artifact.getArtifact('my-artifact', {
findBy
})
await artifact.listArtifacts({
findBy
})
```
### Speeding up large uploads
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
```ts
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
// The level of compression for Zlib to be applied to the artifact archive.
// - 0: No compression
// - 1: Best speed
// - 6: Default compression (same as GNU Gzip)
// - 9: Best compression
compressionLevel: 0
})
```
## Additional Resources
- [Releases](./RELEASES.md)
- [Contribution Guide](./CONTRIBUTIONS.md)
- [Frequently Asked Questions](./docs/faq.md)

View File

@ -1,188 +0,0 @@
# @actions/artifact Releases
### 2.3.3
- Dependency updates [#2049](https://github.com/actions/toolkit/pull/2049)
### 2.3.2
- Added masking for Shared Access Signature (SAS) artifact URLs [#1982](https://github.com/actions/toolkit/pull/1982)
- Change hash to digest for consistent terminology across runner logs [#1991](https://github.com/actions/toolkit/pull/1991)
### 2.3.1
- Fix comment typo on expectedHash. [#1986](https://github.com/actions/toolkit/pull/1986)
### 2.3.0
- Allow ArtifactClient to perform digest comparisons, if supplied. [#1975](https://github.com/actions/toolkit/pull/1975)
### 2.2.2
- Default concurrency to 5 for uploading artifacts [#1962](https://github.com/actions/toolkit/pull/1962)
### 2.2.1
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
### 2.2.0
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
### 2.1.11
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
- Use native `crypto` [#1815](https://github.com/actions/toolkit/pull/1815)
### 2.1.10
- Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830)
- Fixed a regression with chunk timeout [#1786](https://github.com/actions/toolkit/pull/1786)
### 2.1.9
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
### 2.1.8
- Allows `*.localhost` domains for hostname checks for local development.
### 2.1.7
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
### 2.1.6
- Will retry on invalid request responses.
### 2.1.5
- Bumped `archiver` dependency to 7.0.1
### 2.1.4
- Adds info-level logging for zip extraction
### 2.1.3
- Fixes a bug in the extract logic updated in 2.1.2
### 2.1.2
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
### 2.1.1
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
### 2.1.0
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
### 2.0.1
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
### 2.0.0
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
- Numerous API changes, [some breaking](./README.md#breaking-changes).
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
### 1.1.1
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
### 1.1.0
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
### 1.0.2
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
### 1.0.1
- Update to v2.0.0 of `@actions/http-client`
### 1.0.0
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
### 0.6.1
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
### 0.6.0
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
### 0.5.2
- Add HTTP 500 as a retryable status code for artifact upload and download.
### 0.5.1
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
### 0.5.0
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
### 0.4.2
- Improved retry-ability when a partial artifact download is encountered
### 0.4.1
- Update to latest @actions/core version
### 0.4.0
- Add option to specify custom retentions on artifacts
-
### 0.3.5
- Retry in the event of a 413 response
### 0.3.3
- Increase chunk size during upload from 4MB to 8MB
- Improve user-agent strings during API calls to help internally diagnose issues
### 0.3.2
- Fix to ensure readstreams get correctly reset in the event of a retry
### 0.3.1
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
- Remove spaces as a forbidden character during upload
### 0.3.0
- Fixes to gzip decompression when downloading artifacts
- Support handling 429 response codes
- Improved download experience when dealing with empty files
- Exponential backoff when retryable status codes are encountered
- Clearer error message if storage quota has been reached
- Improved logging and output during artifact download
### 0.2.0
- Fixes to TCP connections not closing
- GZip file compression to speed up downloads
- Improved logging and output
- Extra documentation
### 0.1.0
- Initial release

View File

@ -1,348 +0,0 @@
import * as http from 'http'
import * as net from 'net'
import {HttpClient} from '@actions/http-client'
import * as config from '../src/internal/shared/config'
import {internalArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
import {noopLogs} from './common'
import {NetworkError, UsageError} from '../src/internal/shared/errors'
jest.mock('@actions/http-client')
const clientOptions = {
maxAttempts: 5,
retryIntervalMs: 1,
retryMultiplier: 1.5
}
describe('artifact-http-client', () => {
beforeAll(() => {
noopLogs()
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('http://localhost:8080')
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
})
beforeEach(() => {
jest.clearAllMocks()
})
it('should successfully create a client', () => {
const client = internalArtifactTwirpClient()
expect(client).toBeDefined()
})
it('should make a request', async () => {
const mockPost = jest.fn(() => {
const msg = new http.IncomingMessage(new net.Socket())
msg.statusCode = 200
return {
message: msg,
readBody: async () => {
return Promise.resolve(
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient()
const artifact = await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(1)
expect(artifact).toBeDefined()
expect(artifact.ok).toBe(true)
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
})
it('should retry if the request fails', async () => {
const mockPost = jest
.fn(() => {
const msgSucceeded = new http.IncomingMessage(new net.Socket())
msgSucceeded.statusCode = 200
return {
message: msgSucceeded,
readBody: async () => {
return Promise.resolve(
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
)
}
}
})
.mockImplementationOnce(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 500
msgFailed.statusMessage = 'Internal Server Error'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve(`{"ok": false}`)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
const artifact = await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(artifact).toBeDefined()
expect(artifact.ok).toBe(true)
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
expect(mockPost).toHaveBeenCalledTimes(2)
})
it('should retry if invalid body response', async () => {
const mockPost = jest
.fn(() => {
const msgSucceeded = new http.IncomingMessage(new net.Socket())
msgSucceeded.statusCode = 200
return {
message: msgSucceeded,
readBody: async () => {
return Promise.resolve(
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
)
}
}
})
.mockImplementationOnce(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 502
msgFailed.statusMessage = 'Bad Gateway'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve('💥')
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
const artifact = await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(artifact).toBeDefined()
expect(artifact.ok).toBe(true)
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
expect(mockPost).toHaveBeenCalledTimes(2)
})
it('should fail if the request fails 5 times', async () => {
const mockPost = jest.fn(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 500
msgFailed.statusMessage = 'Internal Server Error'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve(`{"ok": false}`)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
await expect(async () => {
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
}).rejects.toThrowError(
'Failed to make request after 5 attempts: Failed request: (500) Internal Server Error'
)
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(5)
})
it('should fail immediately if there is a non-retryable error', async () => {
const mockPost = jest.fn(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 401
msgFailed.statusMessage = 'Unauthorized'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve(`{"ok": false}`)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
await expect(async () => {
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
}).rejects.toThrowError(
'Received non-retryable error: Failed request: (401) Unauthorized'
)
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(1)
})
it('should fail with a descriptive error', async () => {
// 409 duplicate error
const mockPost = jest.fn(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 409
msgFailed.statusMessage = 'Conflict'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve(
`{"msg": "an artifact with this name already exists on the workflow run"}`
)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient(clientOptions)
await expect(async () => {
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
}).rejects.toThrowError(
'Failed to CreateArtifact: Received non-retryable error: Failed request: (409) Conflict: an artifact with this name already exists on the workflow run'
)
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(1)
})
it('should properly describe a network failure', async () => {
class FakeNodeError extends Error {
code: string
constructor(code: string) {
super()
this.code = code
}
}
const mockPost = jest.fn(() => {
throw new FakeNodeError('ENOTFOUND')
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient()
await expect(async () => {
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
}).rejects.toThrowError(new NetworkError('ENOTFOUND').message)
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(1)
})
it('should properly describe a usage error', async () => {
const mockPost = jest.fn(() => {
const msgFailed = new http.IncomingMessage(new net.Socket())
msgFailed.statusCode = 403
msgFailed.statusMessage = 'Forbidden'
return {
message: msgFailed,
readBody: async () => {
return Promise.resolve(
`{"msg": "insufficient usage to create artifact"}`
)
}
}
})
const mockHttpClient = (
HttpClient as unknown as jest.Mock
).mockImplementation(() => {
return {
post: mockPost
}
})
const client = internalArtifactTwirpClient()
await expect(async () => {
await client.CreateArtifact({
workflowRunBackendId: '1234',
workflowJobRunBackendId: '5678',
name: 'artifact',
version: 4
})
}).rejects.toThrowError(new UsageError().message)
expect(mockHttpClient).toHaveBeenCalledTimes(1)
expect(mockPost).toHaveBeenCalledTimes(1)
})
})

View File

@ -1,9 +0,0 @@
import * as core from '@actions/core'
// noopLogs mocks the console.log and core.* functions to prevent output in the console while testing
export const noopLogs = (): void => {
jest.spyOn(console, 'log').mockImplementation(() => {})
jest.spyOn(core, 'debug').mockImplementation(() => {})
jest.spyOn(core, 'info').mockImplementation(() => {})
jest.spyOn(core, 'warning').mockImplementation(() => {})
}

View File

@ -1,103 +0,0 @@
import * as config from '../src/internal/shared/config'
import os from 'os'
// Mock the 'os' module
jest.mock('os', () => ({
cpus: jest.fn()
}))
beforeEach(() => {
jest.resetModules()
})
describe('isGhes', () => {
it('should return false when the request domain is github.com', () => {
process.env.GITHUB_SERVER_URL = 'https://github.com'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain ends with ghe.com', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain ends with ghe.localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain ends with .localhost', () => {
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
expect(config.isGhes()).toBe(false)
})
it('should return false when the request domain is specific to an enterprise', () => {
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
expect(config.isGhes()).toBe(true)
})
})
describe('uploadChunkTimeoutEnv', () => {
it('should return default 300000 when no env set', () => {
expect(config.getUploadChunkTimeout()).toBe(300000)
})
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
expect(config.getUploadChunkTimeout()).toBe(150000)
})
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
expect(() => {
config.getUploadChunkTimeout()
}).toThrow()
})
})
describe('uploadConcurrencyEnv', () => {
it('Concurrency default to 5', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
expect(config.getConcurrency()).toBe(5)
})
it('Concurrency max out at 300 on systems with many CPUs', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '301'
expect(config.getConcurrency()).toBe(300)
})
it('Concurrency can be set to 32 when cpu num is <= 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '32'
expect(config.getConcurrency()).toBe(32)
})
it('Concurrency can be set 16 * num of cpu when cpu num is > 4', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '96'
expect(config.getConcurrency()).toBe(96)
})
it('Concurrency can be overridden by env var ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
expect(config.getConcurrency()).toBe(10)
})
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
expect(() => {
config.getConcurrency()
}).toThrow()
})
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
expect(() => {
config.getConcurrency()
}).toThrow()
})
})

View File

@ -1,192 +0,0 @@
import * as github from '@actions/github'
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
import type {RequestInterface} from '@octokit/types'
import {
deleteArtifactInternal,
deleteArtifactPublic
} from '../src/internal/delete/delete-artifact'
import * as config from '../src/internal/shared/config'
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
import * as util from '../src/internal/shared/util'
import {noopLogs} from './common'
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
type MockedDeleteArtifact = jest.MockedFunction<
RestEndpointMethods['actions']['deleteArtifact']
>
jest.mock('@actions/github', () => ({
getOctokit: jest.fn().mockReturnValue({
request: jest.fn(),
rest: {
actions: {
deleteArtifact: jest.fn()
}
}
})
}))
const fixtures = {
repo: 'toolkit',
owner: 'actions',
token: 'ghp_1234567890',
runId: 123,
backendIds: {
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
},
artifacts: [
{
id: 1,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-01')
},
{
id: 2,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-02')
}
]
}
describe('delete-artifact', () => {
beforeAll(() => {
noopLogs()
})
describe('public', () => {
it('should delete an artifact', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
artifacts: [
{
name: fixtures.artifacts[0].name,
id: fixtures.artifacts[0].id,
size_in_bytes: fixtures.artifacts[0].size,
created_at: fixtures.artifacts[0].createdAt.toISOString()
}
]
}
})
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
.deleteArtifact as MockedDeleteArtifact
mockDeleteArtifact.mockResolvedValueOnce({
status: 204,
headers: {},
url: '',
data: null as never
})
const response = await deleteArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
expect(response).toEqual({
id: fixtures.artifacts[0].id
})
})
it('should fail if non-200 response', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
artifacts: [
{
name: fixtures.artifacts[0].name,
id: fixtures.artifacts[0].id,
size_in_bytes: fixtures.artifacts[0].size,
created_at: fixtures.artifacts[0].createdAt.toISOString()
}
]
}
})
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
.deleteArtifact as MockedDeleteArtifact
mockDeleteArtifact.mockRejectedValue(new Error('boom'))
await expect(
deleteArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
).rejects.toThrow('boom')
})
})
describe('internal', () => {
beforeEach(() => {
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIds)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
})
it('should delete an artifact', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: fixtures.artifacts.map(artifact => ({
...fixtures.backendIds,
databaseId: artifact.id.toString(),
name: artifact.name,
size: artifact.size.toString(),
createdAt: Timestamp.fromDate(artifact.createdAt)
}))
})
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
.mockResolvedValue({
ok: true,
artifactId: fixtures.artifacts[0].id.toString()
})
const response = await deleteArtifactInternal(fixtures.artifacts[0].name)
expect(response).toEqual({
id: fixtures.artifacts[0].id
})
})
it('should fail if non-200 response', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: fixtures.artifacts.map(artifact => ({
...fixtures.backendIds,
databaseId: artifact.id.toString(),
name: artifact.name,
size: artifact.size.toString(),
createdAt: Timestamp.fromDate(artifact.createdAt)
}))
})
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
.mockRejectedValue(new Error('boom'))
await expect(
deleteArtifactInternal(fixtures.artifacts[0].id)
).rejects.toThrow('boom')
})
})
})

View File

@ -1,614 +0,0 @@
import fs from 'fs'
import * as http from 'http'
import * as net from 'net'
import * as path from 'path'
import * as github from '@actions/github'
import {HttpClient} from '@actions/http-client'
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
import archiver from 'archiver'
import {
downloadArtifactInternal,
downloadArtifactPublic,
streamExtractExternal
} from '../src/internal/download/download-artifact'
import {getUserAgentString} from '../src/internal/shared/user-agent'
import {noopLogs} from './common'
import * as config from '../src/internal/shared/config'
import {ArtifactServiceClientJSON} from '../src/generated'
import * as util from '../src/internal/shared/util'
type MockedDownloadArtifact = jest.MockedFunction<
RestEndpointMethods['actions']['downloadArtifact']
>
const testDir = path.join(__dirname, '_temp', 'download-artifact')
const fixtures = {
workspaceDir: path.join(testDir, 'workspace'),
exampleArtifact: {
path: path.join(testDir, 'artifact.zip'),
files: [
{
path: 'hello.txt',
content: 'Hello World!'
},
{
path: 'goodbye.txt',
content: 'Goodbye World!'
}
]
},
artifactID: 1234,
artifactName: 'my-artifact',
artifactSize: 123456,
repositoryOwner: 'actions',
repositoryName: 'toolkit',
token: 'ghp_1234567890',
blobStorageUrl: 'https://blob-storage.local?signed=true',
backendIds: {
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
}
}
jest.mock('@actions/github', () => ({
getOctokit: jest.fn().mockReturnValue({
rest: {
actions: {
downloadArtifact: jest.fn()
}
}
})
}))
jest.mock('@actions/http-client')
// Create a zip archive with the contents of the example artifact
const createTestArchive = async (): Promise<void> => {
const archive = archiver('zip', {
zlib: {level: 9}
})
for (const file of fixtures.exampleArtifact.files) {
archive.append(file.content, {name: file.path})
}
archive.finalize()
return new Promise((resolve, reject) => {
archive.pipe(fs.createWriteStream(fixtures.exampleArtifact.path))
archive.on('error', reject)
archive.on('finish', resolve)
})
}
const expectExtractedArchive = async (dir: string): Promise<void> => {
for (const file of fixtures.exampleArtifact.files) {
const filePath = path.join(dir, file.path)
expect(fs.readFileSync(filePath, 'utf8')).toEqual(file.content)
}
}
const setup = async (): Promise<void> => {
noopLogs()
await fs.promises.mkdir(testDir, {recursive: true})
await createTestArchive()
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
}
const cleanup = async (): Promise<void> => {
jest.restoreAllMocks()
await fs.promises.rm(testDir, {recursive: true})
delete process.env['GITHUB_WORKSPACE']
}
const mockGetArtifactSuccess = jest.fn(() => {
const message = new http.IncomingMessage(new net.Socket())
message.statusCode = 200
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
message.push(null)
return {
message
}
})
const mockGetArtifactFailure = jest.fn(() => {
const message = new http.IncomingMessage(new net.Socket())
message.statusCode = 500
message.push('Internal Server Error')
message.push(null)
return {
message
}
})
const mockGetArtifactMalicious = jest.fn(() => {
const message = new http.IncomingMessage(new net.Socket())
message.statusCode = 200
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
message.push(null)
return {
message
}
})
describe('download-artifact', () => {
describe('public', () => {
beforeEach(setup)
afterEach(cleanup)
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactSuccess
}
}
)
const response = await downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
expectExtractedArchive(fixtures.workspaceDir)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
})
it('should not allow path traversal from malicious artifacts', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactMalicious
}
}
)
const response = await downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
// ensure path traversal was not possible
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
).toBe(true)
expect(
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
).toBe(true)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
})
it('should successfully download an artifact to user defined path', async () => {
const customPath = path.join(testDir, 'custom')
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactSuccess
}
}
)
const response = await downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token,
{
path: customPath
}
)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
expectExtractedArchive(customPath)
expect(response.downloadPath).toBe(customPath)
})
it('should fail if download artifact API does not respond with location', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {},
status: 302,
url: '',
data: Buffer.from('')
})
await expect(
downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
).rejects.toBeInstanceOf(Error)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
})
it('should fail if blob storage storage chunk does not respond within 30s', async () => {
// mock http client to delay response data by 30s
const msg = new http.IncomingMessage(new net.Socket())
msg.statusCode = 200
const mockGet = jest.fn(async () => {
return new Promise((resolve, reject) => {
// Reject with an error after 31 seconds
setTimeout(() => {
reject(new Error('Request timeout'))
}, 31000) // Timeout after 31 seconds
})
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGet
}
}
)
await expect(
streamExtractExternal(fixtures.blobStorageUrl, fixtures.workspaceDir)
).rejects.toBeInstanceOf(Error)
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
}, 35000) // add longer timeout to allow for timer to run out
it('should fail if blob storage response is non-200 after 5 retries', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactFailure
}
}
)
await expect(
downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
).rejects.toBeInstanceOf(Error)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(5)
}, 38000)
it('should retry if blob storage response is non-200 and then succeed with a 200', async () => {
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
.actions.downloadArtifact as MockedDownloadArtifact
downloadArtifactMock.mockResolvedValueOnce({
headers: {
location: fixtures.blobStorageUrl
},
status: 302,
url: '',
data: Buffer.from('')
})
const mockGetArtifact = jest
.fn(mockGetArtifactSuccess)
.mockImplementationOnce(mockGetArtifactFailure)
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifact
}
}
)
const response = await downloadArtifactPublic(
fixtures.artifactID,
fixtures.repositoryOwner,
fixtures.repositoryName,
fixtures.token
)
expect(downloadArtifactMock).toHaveBeenCalledWith({
owner: fixtures.repositoryOwner,
repo: fixtures.repositoryName,
artifact_id: fixtures.artifactID,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(1)
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
fixtures.blobStorageUrl
)
expect(mockGetArtifactSuccess).toHaveBeenCalledTimes(1)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
}, 28000)
})
describe('internal', () => {
beforeEach(async () => {
await setup()
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIds)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
})
afterEach(async () => {
await cleanup()
})
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
const mockListArtifacts = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: [
{
...fixtures.backendIds,
databaseId: fixtures.artifactID.toString(),
name: fixtures.artifactName,
size: fixtures.artifactSize.toString()
}
]
})
const mockGetSignedArtifactURL = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
.mockReturnValue(
Promise.resolve({
signedUrl: fixtures.blobStorageUrl
})
)
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactSuccess
}
}
)
const response = await downloadArtifactInternal(fixtures.artifactID)
expectExtractedArchive(fixtures.workspaceDir)
expect(response.downloadPath).toBe(fixtures.workspaceDir)
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockListArtifacts).toHaveBeenCalledWith({
idFilter: {
value: fixtures.artifactID.toString()
},
...fixtures.backendIds
})
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
...fixtures.backendIds,
name: fixtures.artifactName
})
})
it('should successfully download an artifact to user defined path', async () => {
const customPath = path.join(testDir, 'custom')
const mockListArtifacts = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: [
{
...fixtures.backendIds,
databaseId: fixtures.artifactID.toString(),
name: fixtures.artifactName,
size: fixtures.artifactSize.toString()
}
]
})
const mockGetSignedArtifactURL = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
.mockReturnValue(
Promise.resolve({
signedUrl: fixtures.blobStorageUrl
})
)
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactSuccess
}
}
)
const response = await downloadArtifactInternal(fixtures.artifactID, {
path: customPath
})
expectExtractedArchive(customPath)
expect(response.downloadPath).toBe(customPath)
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockListArtifacts).toHaveBeenCalledWith({
idFilter: {
value: fixtures.artifactID.toString()
},
...fixtures.backendIds
})
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
...fixtures.backendIds,
name: fixtures.artifactName
})
})
it('should fail if download artifact API does not respond with location', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockRejectedValue(new Error('boom'))
await expect(
downloadArtifactInternal(fixtures.artifactID)
).rejects.toBeInstanceOf(Error)
})
it('should fail if blob storage response is non-200', async () => {
const mockListArtifacts = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: [
{
...fixtures.backendIds,
databaseId: fixtures.artifactID.toString(),
name: fixtures.artifactName,
size: fixtures.artifactSize.toString()
}
]
})
const mockGetSignedArtifactURL = jest
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
.mockReturnValue(
Promise.resolve({
signedUrl: fixtures.blobStorageUrl
})
)
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
() => {
return {
get: mockGetArtifactFailure
}
}
)
await expect(
downloadArtifactInternal(fixtures.artifactID)
).rejects.toBeInstanceOf(Error)
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
expect(mockListArtifacts).toHaveBeenCalledWith({
idFilter: {
value: fixtures.artifactID.toString()
},
...fixtures.backendIds
})
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
...fixtures.backendIds,
name: fixtures.artifactName
})
})
})
})

View File

@ -1,239 +0,0 @@
import * as github from '@actions/github'
import type {RequestInterface} from '@octokit/types'
import {
getArtifactInternal,
getArtifactPublic
} from '../src/internal/find/get-artifact'
import * as config from '../src/internal/shared/config'
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
import * as util from '../src/internal/shared/util'
import {noopLogs} from './common'
import {
ArtifactNotFoundError,
InvalidResponseError
} from '../src/internal/shared/errors'
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
jest.mock('@actions/github', () => ({
getOctokit: jest.fn().mockReturnValue({
request: jest.fn()
})
}))
const fixtures = {
repo: 'toolkit',
owner: 'actions',
token: 'ghp_1234567890',
runId: 123,
backendIds: {
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
},
artifacts: [
{
id: 1,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-01')
},
{
id: 2,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-02')
}
]
}
describe('get-artifact', () => {
beforeAll(() => {
noopLogs()
})
describe('public', () => {
it('should return the artifact if it is found', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
artifacts: [
{
name: fixtures.artifacts[0].name,
id: fixtures.artifacts[0].id,
size_in_bytes: fixtures.artifacts[0].size,
created_at: fixtures.artifacts[0].createdAt.toISOString()
}
]
}
})
const response = await getArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
expect(response).toEqual({
artifact: fixtures.artifacts[0]
})
})
it('should return the latest artifact if multiple are found', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
artifacts: fixtures.artifacts.map(artifact => ({
name: artifact.name,
id: artifact.id,
size_in_bytes: artifact.size,
created_at: artifact.createdAt.toISOString()
}))
}
})
const response = await getArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
expect(response).toEqual({
artifact: fixtures.artifacts[1]
})
})
it('should fail if no artifacts are found', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
artifacts: []
}
})
const response = getArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
expect(response).rejects.toThrowError(ArtifactNotFoundError)
})
it('should fail if non-200 response', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 404,
headers: {},
url: '',
data: {}
})
const response = getArtifactPublic(
fixtures.artifacts[0].name,
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token
)
expect(response).rejects.toThrowError(InvalidResponseError)
})
})
describe('internal', () => {
beforeEach(() => {
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIds)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
})
it('should return the artifact if it is found', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: [
{
...fixtures.backendIds,
databaseId: fixtures.artifacts[0].id.toString(),
name: fixtures.artifacts[0].name,
size: fixtures.artifacts[0].size.toString(),
createdAt: Timestamp.fromDate(fixtures.artifacts[0].createdAt)
}
]
})
const response = await getArtifactInternal(fixtures.artifacts[0].name)
expect(response).toEqual({
artifact: fixtures.artifacts[0]
})
})
it('should return the latest artifact if multiple are found', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: fixtures.artifacts.map(artifact => ({
...fixtures.backendIds,
databaseId: artifact.id.toString(),
name: artifact.name,
size: artifact.size.toString(),
createdAt: Timestamp.fromDate(artifact.createdAt)
}))
})
const response = await getArtifactInternal(fixtures.artifacts[0].name)
expect(response).toEqual({
artifact: fixtures.artifacts[1]
})
})
it('should fail if no artifacts are found', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: []
})
const response = getArtifactInternal(fixtures.artifacts[0].name)
expect(response).rejects.toThrowError(ArtifactNotFoundError)
})
it('should fail if non-200 response', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockRejectedValue(new Error('boom'))
const response = getArtifactInternal(fixtures.artifacts[0].name)
expect(response).rejects.toThrow()
})
})
})

View File

@ -1,241 +0,0 @@
import * as github from '@actions/github'
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
import {
listArtifactsInternal,
listArtifactsPublic
} from '../src/internal/find/list-artifacts'
import * as config from '../src/internal/shared/config'
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
import * as util from '../src/internal/shared/util'
import {noopLogs} from './common'
import {Artifact} from '../src/internal/shared/interfaces'
import {RequestInterface} from '@octokit/types'
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
jest.mock('@actions/github', () => ({
getOctokit: jest.fn().mockReturnValue({
request: jest.fn(),
rest: {
actions: {
listWorkflowRunArtifacts: jest.fn()
}
}
})
}))
const artifactsToListResponse = (
artifacts: Artifact[]
): RestEndpointMethodTypes['actions']['listWorkflowRunArtifacts']['response']['data'] => {
return {
total_count: artifacts.length,
artifacts: artifacts.map(artifact => ({
name: artifact.name,
id: artifact.id,
size_in_bytes: artifact.size,
created_at: artifact.createdAt?.toISOString() || '',
run_id: fixtures.runId,
// unused fields for tests
url: '',
archive_download_url: '',
expired: false,
expires_at: '',
node_id: '',
run_url: '',
type: '',
updated_at: ''
}))
}
}
const fixtures = {
repo: 'toolkit',
owner: 'actions',
token: 'ghp_1234567890',
runId: 123,
backendIds: {
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
},
artifacts: [
{
id: 1,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-01')
},
{
id: 2,
name: 'my-artifact',
size: 456,
createdAt: new Date('2023-12-02')
}
]
}
describe('list-artifact', () => {
beforeAll(() => {
noopLogs()
})
describe('public', () => {
it('should return a list of artifacts', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: artifactsToListResponse(fixtures.artifacts)
})
const response = await listArtifactsPublic(
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token,
false
)
expect(response).toEqual({
artifacts: fixtures.artifacts
})
})
it('should return the latest artifact when latest is specified', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: artifactsToListResponse(fixtures.artifacts)
})
const response = await listArtifactsPublic(
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token,
true
)
expect(response).toEqual({
artifacts: [fixtures.artifacts[1]]
})
})
it('can return empty artifacts', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockResolvedValueOnce({
status: 200,
headers: {},
url: '',
data: {
total_count: 0,
artifacts: []
}
})
const response = await listArtifactsPublic(
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token,
true
)
expect(response).toEqual({
artifacts: []
})
})
it('should fail if non-200 response', async () => {
const mockRequest = github.getOctokit(fixtures.token)
.request as MockedRequest
mockRequest.mockRejectedValueOnce(new Error('boom'))
await expect(
listArtifactsPublic(
fixtures.runId,
fixtures.owner,
fixtures.repo,
fixtures.token,
false
)
).rejects.toThrow('boom')
})
})
describe('internal', () => {
beforeEach(() => {
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIds)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
})
it('should return a list of artifacts', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: fixtures.artifacts.map(artifact => ({
...fixtures.backendIds,
databaseId: artifact.id.toString(),
name: artifact.name,
size: artifact.size.toString(),
createdAt: Timestamp.fromDate(artifact.createdAt)
}))
})
const response = await listArtifactsInternal(false)
expect(response).toEqual({
artifacts: fixtures.artifacts
})
})
it('should return the latest artifact when latest is specified', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: fixtures.artifacts.map(artifact => ({
...fixtures.backendIds,
databaseId: artifact.id.toString(),
name: artifact.name,
size: artifact.size.toString(),
createdAt: Timestamp.fromDate(artifact.createdAt)
}))
})
const response = await listArtifactsInternal(true)
expect(response).toEqual({
artifacts: [fixtures.artifacts[1]]
})
})
it('can return empty artifacts', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockResolvedValue({
artifacts: []
})
const response = await listArtifactsInternal(false)
expect(response).toEqual({
artifacts: []
})
})
it('should fail if non-200 response', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
.mockRejectedValue(new Error('boom'))
await expect(listArtifactsInternal(false)).rejects.toThrow('boom')
})
})
})

View File

@ -1,75 +0,0 @@
import {
validateArtifactName,
validateFilePath
} from '../src/internal/upload/path-and-artifact-name-validation'
import {noopLogs} from './common'
describe('Path and artifact name validation', () => {
beforeAll(() => {
noopLogs()
})
it('Check Artifact Name for any invalid characters', () => {
const invalidNames = [
'my\\artifact',
'my/artifact',
'my"artifact',
'my:artifact',
'my<artifact',
'my>artifact',
'my|artifact',
'my*artifact',
'my?artifact',
''
]
for (const invalidName of invalidNames) {
expect(() => {
validateArtifactName(invalidName)
}).toThrow()
}
const validNames = [
'my-normal-artifact',
'myNormalArtifact',
'm¥ñðrmålÄr†ï£å¢†'
]
for (const validName of validNames) {
expect(() => {
validateArtifactName(validName)
}).not.toThrow()
}
})
it('Check Artifact File Path for any invalid characters', () => {
const invalidNames = [
'some/invalid"artifact/path',
'some/invalid:artifact/path',
'some/invalid<artifact/path',
'some/invalid>artifact/path',
'some/invalid|artifact/path',
'some/invalid*artifact/path',
'some/invalid?artifact/path',
'some/invalid\rartifact/path',
'some/invalid\nartifact/path',
'some/invalid\r\nartifact/path',
''
]
for (const invalidName of invalidNames) {
expect(() => {
validateFilePath(invalidName)
}).toThrow()
}
const validNames = [
'my/perfectly-normal/artifact-path',
'my/perfectly\\Normal/Artifact-path',
'm¥/ñðrmål/Är†ï£å¢†'
]
for (const validName of validNames) {
expect(() => {
validateFilePath(validName)
}).not.toThrow()
}
})
})

View File

@ -1,65 +0,0 @@
import {Timestamp} from '../src/generated'
import * as retention from '../src/internal/upload/retention'
describe('retention', () => {
beforeEach(() => {
delete process.env['GITHUB_RETENTION_DAYS']
})
it('should return the inputted retention days if it is less than the max retention days', () => {
// setup
const mockDate = new Date('2020-01-01')
jest.useFakeTimers().setSystemTime(mockDate)
process.env['GITHUB_RETENTION_DAYS'] = '90'
const exp = retention.getExpiration(30)
expect(exp).toBeDefined()
if (exp) {
const expDate = Timestamp.toDate(exp)
const expected = new Date()
expected.setDate(expected.getDate() + 30)
expect(expDate).toEqual(expected)
}
})
it('should return the max retention days if the inputted retention days is greater than the max retention days', () => {
// setup
const mockDate = new Date('2020-01-01')
jest.useFakeTimers().setSystemTime(mockDate)
process.env['GITHUB_RETENTION_DAYS'] = '90'
const exp = retention.getExpiration(120)
expect(exp).toBeDefined()
if (exp) {
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
const expected = new Date()
expected.setDate(expected.getDate() + 90)
expect(expDate).toEqual(expected)
}
})
it('should return undefined if the inputted retention days is undefined', () => {
const exp = retention.getExpiration()
expect(exp).toBeUndefined()
})
it('should return the inputted retention days if there is no max retention days', () => {
// setup
const mockDate = new Date('2020-01-01')
jest.useFakeTimers().setSystemTime(mockDate)
const exp = retention.getExpiration(30)
expect(exp).toBeDefined()
if (exp) {
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
const expected = new Date()
expected.setDate(expected.getDate() + 30)
expect(expDate).toEqual(expected)
}
})
})

View File

@ -1,373 +0,0 @@
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
import * as zip from '../src/internal/upload/zip'
import * as util from '../src/internal/shared/util'
import * as config from '../src/internal/shared/config'
import {ArtifactServiceClientJSON} from '../src/generated'
import * as blobUpload from '../src/internal/upload/blob-upload'
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
import {noopLogs} from './common'
import {FilesNotFoundError} from '../src/internal/shared/errors'
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
import * as fs from 'fs'
import * as path from 'path'
import unzip from 'unzip-stream'
const uploadStreamMock = jest.fn()
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
uploadStream: uploadStreamMock
}))
jest.mock('@azure/storage-blob', () => ({
BlobClient: jest.fn().mockImplementation(() => {
return {
getBlockBlobClient: blockBlobClientMock
}
})
}))
const fixtures = {
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
files: [
{name: 'file1.txt', content: 'test 1 file content'},
{name: 'file2.txt', content: 'test 2 file content'},
{name: 'file3.txt', content: 'test 3 file content'},
{
name: 'real.txt',
content: 'from a symlink'
},
{
name: 'relative.txt',
content: 'from a symlink',
symlink: 'real.txt',
relative: true
},
{
name: 'absolute.txt',
content: 'from a symlink',
symlink: 'real.txt',
relative: false
}
],
backendIDs: {
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
},
runtimeToken: 'test-token',
resultsServiceURL: 'http://results.local',
inputs: {
artifactName: 'test-artifact',
files: [
'/home/user/files/plz-upload/file1.txt',
'/home/user/files/plz-upload/file2.txt',
'/home/user/files/plz-upload/dir/file3.txt'
],
rootDirectory: '/home/user/files/plz-upload'
}
}
describe('upload-artifact', () => {
beforeAll(() => {
fs.mkdirSync(fixtures.uploadDirectory, {
recursive: true
})
for (const file of fixtures.files) {
if (file.symlink) {
let symlinkPath = file.symlink
if (!file.relative) {
symlinkPath = path.join(fixtures.uploadDirectory, file.symlink)
}
if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) {
fs.symlinkSync(
symlinkPath,
path.join(fixtures.uploadDirectory, file.name),
'file'
)
}
} else {
fs.writeFileSync(
path.join(fixtures.uploadDirectory, file.name),
file.content
)
}
}
})
beforeEach(() => {
noopLogs()
jest
.spyOn(uploadZipSpecification, 'validateRootDirectory')
.mockReturnValue()
jest
.spyOn(util, 'getBackendIdsFromToken')
.mockReturnValue(fixtures.backendIDs)
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockReturnValue(
fixtures.files.map(file => ({
sourcePath: path.join(fixtures.uploadDirectory, file.name),
destinationPath: file.name,
stats: new fs.Stats()
}))
)
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue(fixtures.resultsServiceURL)
})
afterEach(() => {
jest.restoreAllMocks()
})
it('should reject if there are no files to upload', async () => {
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockClear()
.mockReturnValue([])
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
})
it('should reject if no backend IDs are found', async () => {
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow()
})
it('should return false if the creation request fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow()
})
it('should return false if blob storage upload is unsuccessful', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
})
)
jest
.spyOn(blobUpload, 'uploadZipToBlobStorage')
.mockReturnValue(Promise.reject(new Error('boom')))
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow()
})
it('should reject if finalize artifact fails', async () => {
jest
.spyOn(zip, 'createZipUploadStream')
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.com'
})
)
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
Promise.resolve({
uploadSize: 1234,
sha256Hash: 'test-sha256-hash'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow()
})
it('should successfully upload an artifact', async () => {
jest
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
.mockRestore()
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
let loadedBytes = 0
const uploadedZip = path.join(
fixtures.uploadDirectory,
'..',
'uploaded.zip'
)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress} = options || {}
if (fs.existsSync(uploadedZip)) {
fs.unlinkSync(uploadedZip)
}
const uploadedZipStream = fs.createWriteStream(uploadedZip)
onProgress?.({loadedBytes: 0})
return new Promise((resolve, reject) => {
stream.on('data', chunk => {
loadedBytes += chunk.length
uploadedZipStream.write(chunk)
onProgress?.({loadedBytes})
})
stream.on('end', () => {
onProgress?.({loadedBytes})
uploadedZipStream.end()
resolve({})
})
stream.on('error', err => {
reject(err)
})
})
}
)
const {id, size, digest} = await uploadArtifact(
fixtures.inputs.artifactName,
fixtures.files.map(file =>
path.join(fixtures.uploadDirectory, file.name)
),
fixtures.uploadDirectory
)
expect(id).toBe(1)
expect(size).toBe(loadedBytes)
expect(digest).toBeDefined()
expect(digest).toHaveLength(64)
const extractedDirectory = path.join(
fixtures.uploadDirectory,
'..',
'extracted'
)
if (fs.existsSync(extractedDirectory)) {
fs.rmdirSync(extractedDirectory, {recursive: true})
}
const extract = new Promise((resolve, reject) => {
fs.createReadStream(uploadedZip)
.pipe(unzip.Extract({path: extractedDirectory}))
.on('close', () => {
resolve(true)
})
.on('error', err => {
reject(err)
})
})
await expect(extract).resolves.toBe(true)
for (const file of fixtures.files) {
const filePath = path.join(extractedDirectory, file.name)
expect(fs.existsSync(filePath)).toBe(true)
expect(fs.readFileSync(filePath, 'utf8')).toBe(file.content)
}
})
it('should throw an error uploading blob chunks get delayed', async () => {
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
signedUploadUrl: 'https://signed-upload-url.local'
})
)
jest
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
.mockReturnValue(
Promise.resolve({
ok: true,
artifactId: '1'
})
)
jest
.spyOn(config, 'getResultsServiceUrl')
.mockReturnValue('https://results.local')
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
uploadStreamMock.mockImplementation(
async (
stream: NodeJS.ReadableStream,
bufferSize?: number,
maxConcurrency?: number,
options?: BlockBlobUploadStreamOptions
) => {
const {onProgress, abortSignal} = options || {}
onProgress?.({loadedBytes: 0})
return new Promise(resolve => {
abortSignal?.addEventListener('abort', () => {
resolve({})
})
})
}
)
const uploadResp = uploadArtifact(
fixtures.inputs.artifactName,
fixtures.inputs.files,
fixtures.inputs.rootDirectory
)
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
})
})

View File

@ -1,326 +0,0 @@
import * as io from '../../io/src/io'
import * as path from 'path'
import {promises as fs} from 'fs'
import {
getUploadZipSpecification,
validateRootDirectory
} from '../src/internal/upload/upload-zip-specification'
import {noopLogs} from './common'
const root = path.join(__dirname, '_temp', 'upload-specification')
const goodItem1Path = path.join(
root,
'folder-a',
'folder-b',
'folder-c',
'good-item1.txt'
)
const goodItem2Path = path.join(root, 'folder-d', 'good-item2.txt')
const goodItem3Path = path.join(root, 'folder-d', 'good-item3.txt')
const goodItem4Path = path.join(root, 'folder-d', 'good-item4.txt')
const goodItem5Path = path.join(root, 'good-item5.txt')
const badItem1Path = path.join(
root,
'folder-a',
'folder-b',
'folder-c',
'bad-item1.txt'
)
const badItem2Path = path.join(root, 'folder-d', 'bad-item2.txt')
const badItem3Path = path.join(root, 'folder-f', 'bad-item3.txt')
const badItem4Path = path.join(root, 'folder-h', 'folder-i', 'bad-item4.txt')
const badItem5Path = path.join(root, 'folder-h', 'folder-i', 'bad-item5.txt')
const extraFileInFolderCPath = path.join(
root,
'folder-a',
'folder-b',
'folder-c',
'extra-file-in-folder-c.txt'
)
const amazingFileInFolderHPath = path.join(root, 'folder-h', 'amazing-item.txt')
const artifactFilesToUpload = [
goodItem1Path,
goodItem2Path,
goodItem3Path,
goodItem4Path,
goodItem5Path,
extraFileInFolderCPath,
amazingFileInFolderHPath
]
describe('Search', () => {
beforeAll(async () => {
noopLogs()
// clear temp directory
await io.rmRF(root)
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), {
recursive: true
})
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-e'), {
recursive: true
})
await fs.mkdir(path.join(root, 'folder-d'), {
recursive: true
})
await fs.mkdir(path.join(root, 'folder-f'), {
recursive: true
})
await fs.mkdir(path.join(root, 'folder-g'), {
recursive: true
})
await fs.mkdir(path.join(root, 'folder-h', 'folder-i'), {
recursive: true
})
await fs.writeFile(goodItem1Path, 'good item1 file')
await fs.writeFile(goodItem2Path, 'good item2 file')
await fs.writeFile(goodItem3Path, 'good item3 file')
await fs.writeFile(goodItem4Path, 'good item4 file')
await fs.writeFile(goodItem5Path, 'good item5 file')
await fs.writeFile(badItem1Path, 'bad item1 file')
await fs.writeFile(badItem2Path, 'bad item2 file')
await fs.writeFile(badItem3Path, 'bad item3 file')
await fs.writeFile(badItem4Path, 'bad item4 file')
await fs.writeFile(badItem5Path, 'bad item5 file')
await fs.writeFile(extraFileInFolderCPath, 'extra file')
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
/*
Directory structure of files that get created:
root/
folder-a/
folder-b/
folder-c/
good-item1.txt
bad-item1.txt
extra-file-in-folder-c.txt
folder-e/
folder-d/
good-item2.txt
good-item3.txt
good-item4.txt
bad-item2.txt
folder-f/
bad-item3.txt
folder-g/
folder-h/
amazing-item.txt
folder-i/
bad-item4.txt
bad-item5.txt
good-item5.txt
*/
})
it('Upload Specification - Fail non-existent rootDirectory', async () => {
const invalidRootDirectory = path.join(
__dirname,
'_temp',
'upload-specification-invalid'
)
expect(() => {
validateRootDirectory(invalidRootDirectory)
}).toThrow(
`The provided rootDirectory ${invalidRootDirectory} does not exist`
)
})
it('Upload Specification - Fail invalid rootDirectory', async () => {
expect(() => {
validateRootDirectory(goodItem1Path)
}).toThrow(
`The provided rootDirectory ${goodItem1Path} is not a valid directory`
)
})
it('Upload Specification - File does not exist', async () => {
const fakeFilePath = path.join(
'folder-a',
'folder-b',
'non-existent-file.txt'
)
expect(() => {
getUploadZipSpecification([fakeFilePath], root)
}).toThrow(`File ${fakeFilePath} does not exist`)
})
it('Upload Specification - Non parent directory', async () => {
const folderADirectory = path.join(root, 'folder-a')
const artifactFiles = [
goodItem1Path,
badItem1Path,
extraFileInFolderCPath,
goodItem5Path
]
expect(() => {
getUploadZipSpecification(artifactFiles, folderADirectory)
}).toThrow(
`The rootDirectory: ${folderADirectory} is not a parent directory of the file: ${goodItem5Path}`
)
})
it('Upload Specification - Success', async () => {
const specifications = getUploadZipSpecification(
artifactFilesToUpload,
root
)
expect(specifications.length).toEqual(7)
const absolutePaths = specifications.map(item => item.sourcePath)
expect(absolutePaths).toContain(goodItem1Path)
expect(absolutePaths).toContain(goodItem2Path)
expect(absolutePaths).toContain(goodItem3Path)
expect(absolutePaths).toContain(goodItem4Path)
expect(absolutePaths).toContain(goodItem5Path)
expect(absolutePaths).toContain(extraFileInFolderCPath)
expect(absolutePaths).toContain(amazingFileInFolderHPath)
for (const specification of specifications) {
if (specification.sourcePath === goodItem1Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
)
} else if (specification.sourcePath === goodItem2Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item2.txt')
)
} else if (specification.sourcePath === goodItem3Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item3.txt')
)
} else if (specification.sourcePath === goodItem4Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item4.txt')
)
} else if (specification.sourcePath === goodItem5Path) {
expect(specification.destinationPath).toEqual(
path.join('/good-item5.txt')
)
} else if (specification.sourcePath === extraFileInFolderCPath) {
expect(specification.destinationPath).toEqual(
path.join(
'/folder-a',
'folder-b',
'folder-c',
'extra-file-in-folder-c.txt'
)
)
} else if (specification.sourcePath === amazingFileInFolderHPath) {
expect(specification.destinationPath).toEqual(
path.join('/folder-h', 'amazing-item.txt')
)
} else {
throw new Error(
'Invalid specification found. This should never be reached'
)
}
}
})
it('Upload Specification - Success with extra slash', async () => {
const rootWithSlash = `${root}/`
const specifications = getUploadZipSpecification(
artifactFilesToUpload,
rootWithSlash
)
expect(specifications.length).toEqual(7)
const absolutePaths = specifications.map(item => item.sourcePath)
expect(absolutePaths).toContain(goodItem1Path)
expect(absolutePaths).toContain(goodItem2Path)
expect(absolutePaths).toContain(goodItem3Path)
expect(absolutePaths).toContain(goodItem4Path)
expect(absolutePaths).toContain(goodItem5Path)
expect(absolutePaths).toContain(extraFileInFolderCPath)
expect(absolutePaths).toContain(amazingFileInFolderHPath)
for (const specification of specifications) {
if (specification.sourcePath === goodItem1Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
)
} else if (specification.sourcePath === goodItem2Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item2.txt')
)
} else if (specification.sourcePath === goodItem3Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item3.txt')
)
} else if (specification.sourcePath === goodItem4Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-d', 'good-item4.txt')
)
} else if (specification.sourcePath === goodItem5Path) {
expect(specification.destinationPath).toEqual(
path.join('/good-item5.txt')
)
} else if (specification.sourcePath === extraFileInFolderCPath) {
expect(specification.destinationPath).toEqual(
path.join(
'/folder-a',
'folder-b',
'folder-c',
'extra-file-in-folder-c.txt'
)
)
} else if (specification.sourcePath === amazingFileInFolderHPath) {
expect(specification.destinationPath).toEqual(
path.join('/folder-h', 'amazing-item.txt')
)
} else {
throw new Error(
'Invalid specification found. This should never be reached'
)
}
}
})
it('Upload Specification - Empty Directories are included', async () => {
const folderEPath = path.join(root, 'folder-a', 'folder-b', 'folder-e')
const filesWithDirectory = [goodItem1Path, folderEPath]
const specifications = getUploadZipSpecification(filesWithDirectory, root)
expect(specifications.length).toEqual(2)
const absolutePaths = specifications.map(item => item.sourcePath)
expect(absolutePaths).toContain(goodItem1Path)
expect(absolutePaths).toContain(null)
for (const specification of specifications) {
if (specification.sourcePath === goodItem1Path) {
expect(specification.destinationPath).toEqual(
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
)
} else if (specification.sourcePath === null) {
expect(specification.destinationPath).toEqual(
path.join('/folder-a', 'folder-b', 'folder-e')
)
} else {
throw new Error(
'Invalid specification found. This should never be reached'
)
}
}
})
it('Upload Specification - Includes symlinks', async () => {
const targetPath = path.join(root, 'link-dir', 'symlink-me.txt')
await fs.mkdir(path.dirname(targetPath), {recursive: true})
await fs.writeFile(targetPath, 'symlink file content')
const uploadPath = path.join(root, 'upload-dir', 'symlink.txt')
await fs.mkdir(path.dirname(uploadPath), {recursive: true})
await fs.symlink(targetPath, uploadPath, 'file')
const specifications = getUploadZipSpecification([uploadPath], root)
expect(specifications.length).toEqual(1)
expect(specifications[0].sourcePath).toEqual(uploadPath)
expect(specifications[0].destinationPath).toEqual(
path.join('/upload-dir', 'symlink.txt')
)
expect(specifications[0].stats.isSymbolicLink()).toBe(true)
})
})

View File

@ -1,219 +0,0 @@
import * as config from '../src/internal/shared/config'
import * as util from '../src/internal/shared/util'
import {maskSigUrl, maskSecretUrls} from '../src/internal/shared/util'
import {setSecret, debug} from '@actions/core'
export const testRuntimeToken =
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
describe('get-backend-ids-from-token', () => {
it('should return backend ids when the token is valid', () => {
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(testRuntimeToken)
const backendIds = util.getBackendIdsFromToken()
expect(backendIds.workflowRunBackendId).toBe(
'ce7f54c7-61c7-4aae-887f-30da475f5f1a'
)
expect(backendIds.workflowJobRunBackendId).toBe(
'ca395085-040a-526b-2ce8-bdc85f692774'
)
})
it("should throw an error when the token doesn't have the right scope", () => {
jest
.spyOn(config, 'getRuntimeToken')
.mockReturnValue(
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCIsImlhdCI6MTUxNjIzOTAyMn0.K0IEoULZteGevF38G94xiaA8zcZ5UlKWfGfqE6q3dhw'
)
expect(util.getBackendIdsFromToken).toThrowError(
'Failed to get backend IDs: The provided JWT token is invalid'
)
})
it('should throw an error when the token has a malformed scope', () => {
jest
.spyOn(config, 'getRuntimeToken')
.mockReturnValue(
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhIiwiaWF0IjoxNTE2MjM5MDIyfQ.7D0_LRfRFRZFImHQ7GxH2S6ZyFjjZ5U0ujjGCfle1XE'
)
expect(util.getBackendIdsFromToken).toThrowError(
'Failed to get backend IDs: The provided JWT token is invalid'
)
})
it('should throw an error when the token is in an invalid format', () => {
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
expect(util.getBackendIdsFromToken).toThrowError('Invalid token specified')
})
it("should throw an error when the token doesn't have the right field", () => {
jest
.spyOn(config, 'getRuntimeToken')
.mockReturnValue(
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'
)
expect(util.getBackendIdsFromToken).toThrowError(
'Failed to get backend IDs: The provided JWT token is invalid'
)
})
})
jest.mock('@actions/core')
describe('maskSigUrl', () => {
beforeEach(() => {
jest.clearAllMocks()
})
it('does nothing if no sig parameter is present', () => {
const url = 'https://example.com'
maskSigUrl(url)
expect(setSecret).not.toHaveBeenCalled()
})
it('masks the sig parameter in the middle of the URL and sets it as a secret', () => {
const url = 'https://example.com/?param1=value1&sig=12345&param2=value2'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('12345')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
})
it('does nothing if the URL is empty', () => {
const url = ''
maskSigUrl(url)
expect(setSecret).not.toHaveBeenCalled()
})
it('handles URLs with fragments', () => {
const url = 'https://example.com?sig=12345#fragment'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('12345')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
})
})
describe('maskSigUrl handles special characters in signatures', () => {
beforeEach(() => {
jest.clearAllMocks()
})
it('handles signatures with slashes', () => {
const url = 'https://example.com/?sig=abc/123'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('abc/123')
expect(setSecret).toHaveBeenCalledWith('abc%2F123')
})
it('handles signatures with plus signs', () => {
const url = 'https://example.com/?sig=abc+123'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('abc 123')
expect(setSecret).toHaveBeenCalledWith('abc%20123')
})
it('handles signatures with equals signs', () => {
const url = 'https://example.com/?sig=abc=123'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('abc=123')
expect(setSecret).toHaveBeenCalledWith('abc%3D123')
})
it('handles already percent-encoded signatures', () => {
const url = 'https://example.com/?sig=abc%2F123%3D'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('abc/123=')
expect(setSecret).toHaveBeenCalledWith('abc%2F123%3D')
})
it('handles complex Azure SAS signatures', () => {
const url =
'https://example.com/container/file.txt?sig=nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D&se=2023-12-31'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith(
'nXyQIUj//06Cxt80pBRYiiJlYqtPYg5sz/vEh5iHAhw='
)
expect(setSecret).toHaveBeenCalledWith(
'nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D'
)
})
it('handles signatures with multiple special characters', () => {
const url = 'https://example.com/?sig=a/b+c=d&e=f'
maskSigUrl(url)
expect(setSecret).toHaveBeenCalledWith('a/b c=d')
expect(setSecret).toHaveBeenCalledWith('a%2Fb%20c%3Dd')
})
})
describe('maskSecretUrls', () => {
beforeEach(() => {
jest.clearAllMocks()
})
it('masks sig parameters in signed_upload_url and signed_url', () => {
const body = {
signed_upload_url: 'https://upload.com?sig=upload123',
signed_url: 'https://download.com?sig=download123'
}
maskSecretUrls(body)
expect(setSecret).toHaveBeenCalledWith('upload123')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
expect(setSecret).toHaveBeenCalledWith('download123')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
})
it('handles case where only upload_url is present', () => {
const body = {
signed_upload_url: 'https://upload.com?sig=upload123'
}
maskSecretUrls(body)
expect(setSecret).toHaveBeenCalledWith('upload123')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
})
it('handles case where only download_url is present', () => {
const body = {
signed_url: 'https://download.com?sig=download123'
}
maskSecretUrls(body)
expect(setSecret).toHaveBeenCalledWith('download123')
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
})
it('handles case where URLs do not contain sig parameters', () => {
const body = {
signed_upload_url: 'https://upload.com?token=abc',
signed_url: 'https://download.com?token=xyz'
}
maskSecretUrls(body)
expect(setSecret).not.toHaveBeenCalled()
})
it('handles empty string URLs', () => {
const body = {
signed_upload_url: '',
signed_url: ''
}
maskSecretUrls(body)
expect(setSecret).not.toHaveBeenCalled()
})
it('does nothing if body is not an object or is null', () => {
maskSecretUrls(null)
expect(debug).toHaveBeenCalledWith('body is not an object or is null')
expect(setSecret).not.toHaveBeenCalled()
})
it('does nothing if signed_upload_url and signed_url are not strings', () => {
const body = {
signed_upload_url: 123,
signed_url: 456
}
maskSecretUrls(body)
expect(setSecret).not.toHaveBeenCalled()
})
})

View File

@ -1,62 +0,0 @@
# Frequently Asked Questions
- [Frequently Asked Questions](#frequently-asked-questions)
- [Supported Characters](#supported-characters)
- [Compression? ZIP? How is my artifact stored?](#compression-zip-how-is-my-artifact-stored)
- [Which versions of the artifacts packages are compatible?](#which-versions-of-the-artifacts-packages-are-compatible)
- [How long will my artifact be available?](#how-long-will-my-artifact-be-available)
## Supported Characters
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. If they are present in `name` or `files`, the Artifact will be rejected by the server and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
- "
- :
- <
- \>
- |
- \*
- ?
In addition to the aforementioned characters, the inputted `name` also cannot include the following
- \
- /
## Compression? ZIP? How is my artifact stored?
When creating an Artifact, the files are dynamically compressed and streamed into a ZIP archive. Since they are stored in a ZIP, they can be compressed by Zlib in varying levels.
The value can range from 0 to 9:
- 0: No compression
- 1: Best speed
- 6: Default compression (same as GNU Gzip)
- 9: Best compression
Higher levels will result in better compression, but will take longer to complete.
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
## Which versions of the artifacts packages are compatible?
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), leverage [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
| upload-artifact | download-artifact | toolkit |
|---|---|---|
| v4 | v4 | v2 |
| < v3 | < v3 | < v1 |
Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
```yaml
uses: actions/upload-artifact@v4
# ...
uses: actions/download-artifact@v4
# ...
```
**Release Notes:**
Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
## How long will my artifact be available?
The default retention period is **90 days**. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period

View File

@ -1,43 +0,0 @@
@actions/artifact
# @actions/artifact
## Table of contents
### Classes
- [ArtifactNotFoundError](classes/ArtifactNotFoundError.md)
- [DefaultArtifactClient](classes/DefaultArtifactClient.md)
- [FilesNotFoundError](classes/FilesNotFoundError.md)
- [GHESNotSupportedError](classes/GHESNotSupportedError.md)
- [InvalidResponseError](classes/InvalidResponseError.md)
- [NetworkError](classes/NetworkError.md)
- [UsageError](classes/UsageError.md)
### Interfaces
- [Artifact](interfaces/Artifact.md)
- [ArtifactClient](interfaces/ArtifactClient.md)
- [DeleteArtifactResponse](interfaces/DeleteArtifactResponse.md)
- [DownloadArtifactOptions](interfaces/DownloadArtifactOptions.md)
- [DownloadArtifactResponse](interfaces/DownloadArtifactResponse.md)
- [FindOptions](interfaces/FindOptions.md)
- [GetArtifactResponse](interfaces/GetArtifactResponse.md)
- [ListArtifactsOptions](interfaces/ListArtifactsOptions.md)
- [ListArtifactsResponse](interfaces/ListArtifactsResponse.md)
- [UploadArtifactOptions](interfaces/UploadArtifactOptions.md)
- [UploadArtifactResponse](interfaces/UploadArtifactResponse.md)
### Variables
- [default](README.md#default)
## Variables
### default
`Const` **default**: [`ArtifactClient`](interfaces/ArtifactClient.md)
#### Defined in
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)

View File

@ -1,169 +0,0 @@
[@actions/artifact](../README.md) / ArtifactNotFoundError
# Class: ArtifactNotFoundError
## Hierarchy
- `Error`
**`ArtifactNotFoundError`**
## Table of contents
### Constructors
- [constructor](ArtifactNotFoundError.md#constructor)
### Properties
- [message](ArtifactNotFoundError.md#message)
- [name](ArtifactNotFoundError.md#name)
- [stack](ArtifactNotFoundError.md#stack)
- [prepareStackTrace](ArtifactNotFoundError.md#preparestacktrace)
- [stackTraceLimit](ArtifactNotFoundError.md#stacktracelimit)
### Methods
- [captureStackTrace](ArtifactNotFoundError.md#capturestacktrace)
## Constructors
### constructor
**new ArtifactNotFoundError**(`message?`): [`ArtifactNotFoundError`](ArtifactNotFoundError.md)
#### Parameters
| Name | Type | Default value |
| :------ | :------ | :------ |
| `message` | `string` | `'Artifact not found'` |
#### Returns
[`ArtifactNotFoundError`](ArtifactNotFoundError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
## Properties
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4

View File

@ -1,193 +0,0 @@
[@actions/artifact](../README.md) / DefaultArtifactClient
# Class: DefaultArtifactClient
The default artifact client that is used by the artifact action(s).
## Implements
- [`ArtifactClient`](../interfaces/ArtifactClient.md)
## Table of contents
### Constructors
- [constructor](DefaultArtifactClient.md#constructor)
### Methods
- [deleteArtifact](DefaultArtifactClient.md#deleteartifact)
- [downloadArtifact](DefaultArtifactClient.md#downloadartifact)
- [getArtifact](DefaultArtifactClient.md#getartifact)
- [listArtifacts](DefaultArtifactClient.md#listartifacts)
- [uploadArtifact](DefaultArtifactClient.md#uploadartifact)
## Constructors
### constructor
**new DefaultArtifactClient**(): [`DefaultArtifactClient`](DefaultArtifactClient.md)
#### Returns
[`DefaultArtifactClient`](DefaultArtifactClient.md)
## Methods
### deleteArtifact
**deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
Delete an Artifact
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactName` | `string` | The name of the artifact to delete |
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the delete behavior |
#### Returns
`Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
single DeleteArtifactResponse object
#### Implementation of
[ArtifactClient](../interfaces/ArtifactClient.md).[deleteArtifact](../interfaces/ArtifactClient.md#deleteartifact)
#### Defined in
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
___
### downloadArtifact
**downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
Downloads an artifact and unzips the content.
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactId` | `number` | The id of the artifact to download |
| `options?` | [`DownloadArtifactOptions`](../interfaces/DownloadArtifactOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the download behavior |
#### Returns
`Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
single DownloadArtifactResponse object
#### Implementation of
[ArtifactClient](../interfaces/ArtifactClient.md).[downloadArtifact](../interfaces/ArtifactClient.md#downloadartifact)
#### Defined in
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
___
### getArtifact
**getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
Finds an artifact by name.
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
If the artifact is not found, it will throw.
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactName` | `string` | The name of the artifact to find |
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the get behavior |
#### Returns
`Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
#### Implementation of
[ArtifactClient](../interfaces/ArtifactClient.md).[getArtifact](../interfaces/ArtifactClient.md#getartifact)
#### Defined in
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
___
### listArtifacts
**listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
Lists all artifacts that are part of the current workflow run.
This function will return at most 1000 artifacts per workflow run.
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `options?` | [`ListArtifactsOptions`](../interfaces/ListArtifactsOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the list behavior |
#### Returns
`Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
ListArtifactResponse object
#### Implementation of
[ArtifactClient](../interfaces/ArtifactClient.md).[listArtifacts](../interfaces/ArtifactClient.md#listartifacts)
#### Defined in
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
___
### uploadArtifact
**uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
Uploads an artifact.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `name` | `string` | The name of the artifact, required |
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
| `options?` | [`UploadArtifactOptions`](../interfaces/UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
#### Returns
`Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
single UploadArtifactResponse object
#### Implementation of
[ArtifactClient](../interfaces/ArtifactClient.md).[uploadArtifact](../interfaces/ArtifactClient.md#uploadartifact)
#### Defined in
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)

View File

@ -1,180 +0,0 @@
[@actions/artifact](../README.md) / FilesNotFoundError
# Class: FilesNotFoundError
## Hierarchy
- `Error`
**`FilesNotFoundError`**
## Table of contents
### Constructors
- [constructor](FilesNotFoundError.md#constructor)
### Properties
- [files](FilesNotFoundError.md#files)
- [message](FilesNotFoundError.md#message)
- [name](FilesNotFoundError.md#name)
- [stack](FilesNotFoundError.md#stack)
- [prepareStackTrace](FilesNotFoundError.md#preparestacktrace)
- [stackTraceLimit](FilesNotFoundError.md#stacktracelimit)
### Methods
- [captureStackTrace](FilesNotFoundError.md#capturestacktrace)
## Constructors
### constructor
**new FilesNotFoundError**(`files?`): [`FilesNotFoundError`](FilesNotFoundError.md)
#### Parameters
| Name | Type | Default value |
| :------ | :------ | :------ |
| `files` | `string`[] | `[]` |
#### Returns
[`FilesNotFoundError`](FilesNotFoundError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
## Properties
### files
**files**: `string`[]
#### Defined in
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
___
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4

View File

@ -1,169 +0,0 @@
[@actions/artifact](../README.md) / GHESNotSupportedError
# Class: GHESNotSupportedError
## Hierarchy
- `Error`
**`GHESNotSupportedError`**
## Table of contents
### Constructors
- [constructor](GHESNotSupportedError.md#constructor)
### Properties
- [message](GHESNotSupportedError.md#message)
- [name](GHESNotSupportedError.md#name)
- [stack](GHESNotSupportedError.md#stack)
- [prepareStackTrace](GHESNotSupportedError.md#preparestacktrace)
- [stackTraceLimit](GHESNotSupportedError.md#stacktracelimit)
### Methods
- [captureStackTrace](GHESNotSupportedError.md#capturestacktrace)
## Constructors
### constructor
**new GHESNotSupportedError**(`message?`): [`GHESNotSupportedError`](GHESNotSupportedError.md)
#### Parameters
| Name | Type | Default value |
| :------ | :------ | :------ |
| `message` | `string` | `'@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'` |
#### Returns
[`GHESNotSupportedError`](GHESNotSupportedError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
## Properties
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4

View File

@ -1,169 +0,0 @@
[@actions/artifact](../README.md) / InvalidResponseError
# Class: InvalidResponseError
## Hierarchy
- `Error`
**`InvalidResponseError`**
## Table of contents
### Constructors
- [constructor](InvalidResponseError.md#constructor)
### Properties
- [message](InvalidResponseError.md#message)
- [name](InvalidResponseError.md#name)
- [stack](InvalidResponseError.md#stack)
- [prepareStackTrace](InvalidResponseError.md#preparestacktrace)
- [stackTraceLimit](InvalidResponseError.md#stacktracelimit)
### Methods
- [captureStackTrace](InvalidResponseError.md#capturestacktrace)
## Constructors
### constructor
**new InvalidResponseError**(`message`): [`InvalidResponseError`](InvalidResponseError.md)
#### Parameters
| Name | Type |
| :------ | :------ |
| `message` | `string` |
#### Returns
[`InvalidResponseError`](InvalidResponseError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
## Properties
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4

View File

@ -1,201 +0,0 @@
[@actions/artifact](../README.md) / NetworkError
# Class: NetworkError
## Hierarchy
- `Error`
**`NetworkError`**
## Table of contents
### Constructors
- [constructor](NetworkError.md#constructor)
### Properties
- [code](NetworkError.md#code)
- [message](NetworkError.md#message)
- [name](NetworkError.md#name)
- [stack](NetworkError.md#stack)
- [prepareStackTrace](NetworkError.md#preparestacktrace)
- [stackTraceLimit](NetworkError.md#stacktracelimit)
### Methods
- [captureStackTrace](NetworkError.md#capturestacktrace)
- [isNetworkErrorCode](NetworkError.md#isnetworkerrorcode)
## Constructors
### constructor
**new NetworkError**(`code`): [`NetworkError`](NetworkError.md)
#### Parameters
| Name | Type |
| :------ | :------ |
| `code` | `string` |
#### Returns
[`NetworkError`](NetworkError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
## Properties
### code
**code**: `string`
#### Defined in
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
___
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4
___
### isNetworkErrorCode
**isNetworkErrorCode**(`code?`): `boolean`
#### Parameters
| Name | Type |
| :------ | :------ |
| `code?` | `string` |
#### Returns
`boolean`
#### Defined in
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)

View File

@ -1,184 +0,0 @@
[@actions/artifact](../README.md) / UsageError
# Class: UsageError
## Hierarchy
- `Error`
**`UsageError`**
## Table of contents
### Constructors
- [constructor](UsageError.md#constructor)
### Properties
- [message](UsageError.md#message)
- [name](UsageError.md#name)
- [stack](UsageError.md#stack)
- [prepareStackTrace](UsageError.md#preparestacktrace)
- [stackTraceLimit](UsageError.md#stacktracelimit)
### Methods
- [captureStackTrace](UsageError.md#capturestacktrace)
- [isUsageErrorMessage](UsageError.md#isusageerrormessage)
## Constructors
### constructor
**new UsageError**(): [`UsageError`](UsageError.md)
#### Returns
[`UsageError`](UsageError.md)
#### Overrides
Error.constructor
#### Defined in
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
## Properties
### message
**message**: `string`
#### Inherited from
Error.message
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1068
___
### name
**name**: `string`
#### Inherited from
Error.name
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1067
___
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
#### Defined in
node_modules/typescript/lib/lib.es5.d.ts:1069
___
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
#### Type declaration
▸ (`err`, `stackTraces`): `any`
Optional override for formatting stack traces
##### Parameters
| Name | Type |
| :------ | :------ |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Inherited from
Error.prepareStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:11
___
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
#### Defined in
node_modules/@types/node/globals.d.ts:13
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :------ | :------ |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace
#### Defined in
node_modules/@types/node/globals.d.ts:4
___
### isUsageErrorMessage
**isUsageErrorMessage**(`msg?`): `boolean`
#### Parameters
| Name | Type |
| :------ | :------ |
| `msg?` | `string` |
#### Returns
`boolean`
#### Defined in
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)

View File

@ -1,62 +0,0 @@
[@actions/artifact](../README.md) / Artifact
# Interface: Artifact
An Actions Artifact
## Table of contents
### Properties
- [createdAt](Artifact.md#createdat)
- [id](Artifact.md#id)
- [name](Artifact.md#name)
- [size](Artifact.md#size)
## Properties
### createdAt
`Optional` **createdAt**: `Date`
The time when the artifact was created
#### Defined in
[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
___
### id
**id**: `number`
The ID of the artifact
#### Defined in
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
___
### name
**name**: `string`
The name of the artifact
#### Defined in
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
___
### size
**size**: `number`
The size of the artifact in bytes
#### Defined in
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)

View File

@ -1,159 +0,0 @@
[@actions/artifact](../README.md) / ArtifactClient
# Interface: ArtifactClient
Generic interface for the artifact client.
## Implemented by
- [`DefaultArtifactClient`](../classes/DefaultArtifactClient.md)
## Table of contents
### Methods
- [deleteArtifact](ArtifactClient.md#deleteartifact)
- [downloadArtifact](ArtifactClient.md#downloadartifact)
- [getArtifact](ArtifactClient.md#getartifact)
- [listArtifacts](ArtifactClient.md#listartifacts)
- [uploadArtifact](ArtifactClient.md#uploadartifact)
## Methods
### deleteArtifact
**deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
Delete an Artifact
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactName` | `string` | The name of the artifact to delete |
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the delete behavior |
#### Returns
`Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
single DeleteArtifactResponse object
#### Defined in
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
___
### downloadArtifact
**downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
Downloads an artifact and unzips the content.
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactId` | `number` | The id of the artifact to download |
| `options?` | [`DownloadArtifactOptions`](DownloadArtifactOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the download behavior |
#### Returns
`Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
single DownloadArtifactResponse object
#### Defined in
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
___
### getArtifact
**getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
Finds an artifact by name.
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
If the artifact is not found, it will throw.
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `artifactName` | `string` | The name of the artifact to find |
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the get behavior |
#### Returns
`Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
#### Defined in
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
___
### listArtifacts
**listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
Lists all artifacts that are part of the current workflow run.
This function will return at most 1000 artifacts per workflow run.
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `options?` | [`ListArtifactsOptions`](ListArtifactsOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the list behavior |
#### Returns
`Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
ListArtifactResponse object
#### Defined in
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
___
### uploadArtifact
**uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
Uploads an artifact.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `name` | `string` | The name of the artifact, required |
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
| `options?` | [`UploadArtifactOptions`](UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
#### Returns
`Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
single UploadArtifactResponse object
#### Defined in
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)

View File

@ -1,23 +0,0 @@
[@actions/artifact](../README.md) / DeleteArtifactResponse
# Interface: DeleteArtifactResponse
Response from the server when deleting an artifact
## Table of contents
### Properties
- [id](DeleteArtifactResponse.md#id)
## Properties
### id
**id**: `number`
The id of the artifact that was deleted
#### Defined in
[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)

View File

@ -1,23 +0,0 @@
[@actions/artifact](../README.md) / DownloadArtifactOptions
# Interface: DownloadArtifactOptions
Options for downloading an artifact
## Table of contents
### Properties
- [path](DownloadArtifactOptions.md#path)
## Properties
### path
`Optional` **path**: `string`
Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
#### Defined in
[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)

View File

@ -1,23 +0,0 @@
[@actions/artifact](../README.md) / DownloadArtifactResponse
# Interface: DownloadArtifactResponse
Response from the server when downloading an artifact
## Table of contents
### Properties
- [downloadPath](DownloadArtifactResponse.md#downloadpath)
## Properties
### downloadPath
`Optional` **downloadPath**: `string`
The path where the artifact was downloaded to
#### Defined in
[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)

View File

@ -1,30 +0,0 @@
[@actions/artifact](../README.md) / FindOptions
# Interface: FindOptions
## Table of contents
### Properties
- [findBy](FindOptions.md#findby)
## Properties
### findBy
`Optional` **findBy**: `Object`
The criteria for finding Artifact(s) out of the scope of the current run.
#### Type declaration
| Name | Type | Description |
| :------ | :------ | :------ |
| `repositoryName` | `string` | Repository owner (eg. 'toolkit') |
| `repositoryOwner` | `string` | Repository owner (eg. 'actions') |
| `token` | `string` | Token with actions:read permissions |
| `workflowRunId` | `number` | WorkflowRun of the artifact(s) to lookup |
#### Defined in
[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)

View File

@ -1,23 +0,0 @@
[@actions/artifact](../README.md) / GetArtifactResponse
# Interface: GetArtifactResponse
Response from the server when getting an artifact
## Table of contents
### Properties
- [artifact](GetArtifactResponse.md#artifact)
## Properties
### artifact
**artifact**: [`Artifact`](Artifact.md)
Metadata about the artifact that was found
#### Defined in
[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)

View File

@ -1,24 +0,0 @@
[@actions/artifact](../README.md) / ListArtifactsOptions
# Interface: ListArtifactsOptions
Options for listing artifacts
## Table of contents
### Properties
- [latest](ListArtifactsOptions.md#latest)
## Properties
### latest
`Optional` **latest**: `boolean`
Filter the workflow run's artifacts to the latest by name
In the case of reruns, this can be useful to avoid duplicates
#### Defined in
[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)

View File

@ -1,23 +0,0 @@
[@actions/artifact](../README.md) / ListArtifactsResponse
# Interface: ListArtifactsResponse
Response from the server when listing artifacts
## Table of contents
### Properties
- [artifacts](ListArtifactsResponse.md#artifacts)
## Properties
### artifacts
**artifacts**: [`Artifact`](Artifact.md)[]
A list of artifacts that were found
#### Defined in
[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)

View File

@ -1,55 +0,0 @@
[@actions/artifact](../README.md) / UploadArtifactOptions
# Interface: UploadArtifactOptions
Options for uploading an artifact
## Table of contents
### Properties
- [compressionLevel](UploadArtifactOptions.md#compressionlevel)
- [retentionDays](UploadArtifactOptions.md#retentiondays)
## Properties
### compressionLevel
`Optional` **compressionLevel**: `number`
The level of compression for Zlib to be applied to the artifact archive.
The value can range from 0 to 9:
- 0: No compression
- 1: Best speed
- 6: Default compression (same as GNU Gzip)
- 9: Best compression
Higher levels will result in better compression, but will take longer to complete.
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
#### Defined in
[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
___
### retentionDays
`Optional` **retentionDays**: `number`
Duration after which artifact will expire in days.
By default artifact expires after 90 days:
https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
Use this option to override the default expiry.
Min value: 1
Max value: 90 unless changed by repository setting
If this is set to a greater value than the retention settings allowed, the retention on artifacts
will be reduced to match the max value allowed on server, and the upload process will continue. An
input of 0 assumes default retention setting.
#### Defined in
[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)

View File

@ -1,50 +0,0 @@
[@actions/artifact](../README.md) / UploadArtifactResponse
# Interface: UploadArtifactResponse
Response from the server when an artifact is uploaded
## Table of contents
### Properties
- [digest](UploadArtifactResponse.md#digest)
- [id](UploadArtifactResponse.md#id)
- [size](UploadArtifactResponse.md#size)
## Properties
### digest
`Optional` **digest**: `string`
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
#### Defined in
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
___
### id
`Optional` **id**: `number`
The id of the artifact that was created. Not provided if no artifact was uploaded
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
#### Defined in
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
___
### size
`Optional` **size**: `number`
Total size of the artifact in bytes. Not provided if no artifact was uploaded
#### Defined in
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)

File diff suppressed because it is too large Load Diff

View File

@ -1,64 +0,0 @@
{
"name": "@actions/artifact",
"version": "2.3.3",
"preview": true,
"description": "Actions artifact lib",
"keywords": [
"github",
"actions",
"artifact"
],
"homepage": "https://github.com/actions/toolkit/tree/main/packages/artifact",
"license": "MIT",
"main": "lib/artifact.js",
"types": "lib/artifact.d.ts",
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib",
"!.DS_Store"
],
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/toolkit.git",
"directory": "packages/artifact"
},
"scripts": {
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
"test": "cd ../../ && npm run test ./packages/artifact",
"bootstrap": "cd ../../ && npm run bootstrap",
"tsc-run": "tsc",
"tsc": "npm run bootstrap && npm run tsc-run",
"gen:docs": "typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"
},
"bugs": {
"url": "https://github.com/actions/toolkit/issues"
},
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/github": "^6.0.1",
"@actions/http-client": "^2.1.0",
"@azure/storage-blob": "^12.15.0",
"@octokit/core": "^5.2.1",
"@octokit/plugin-request-log": "^1.0.4",
"@octokit/plugin-retry": "^3.0.9",
"@octokit/request": "^8.4.1",
"@octokit/request-error": "^5.1.1",
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
"archiver": "^7.0.1",
"jwt-decode": "^3.1.2",
"unzip-stream": "^0.3.1"
},
"devDependencies": {
"@types/archiver": "^5.3.2",
"@types/unzip-stream": "^0.3.4",
"typedoc": "^0.25.4",
"typedoc-plugin-markdown": "^3.17.1",
"typescript": "^5.2.2"
}
}

View File

@ -1,8 +0,0 @@
import {ArtifactClient, DefaultArtifactClient} from './internal/client'
export * from './internal/shared/interfaces'
export * from './internal/shared/errors'
export * from './internal/client'
const client: ArtifactClient = new DefaultArtifactClient()
export default client

View File

@ -1,277 +0,0 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import { typeofJsonValue } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { PbLong } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone
* or calendar, represented as seconds and fractions of seconds at
* nanosecond resolution in UTC Epoch time. It is encoded using the
* Proleptic Gregorian Calendar which extends the Gregorian calendar
* backwards to year one. It is encoded assuming all minutes are 60
* seconds long, i.e. leap seconds are "smeared" so that no leap second
* table is needed for interpretation. Range is from
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
* By restricting to that range, we ensure that we can convert to
* and from RFC 3339 date strings.
* See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
* with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
* can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends MessageType<Timestamp> {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp {
const msg = this.create();
const ms = Date.now();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date {
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp {
const msg = this.create();
const ms = date.getTime();
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
let ms = PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value?: PartialMessage<Timestamp>): Timestamp {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Timestamp>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export const Timestamp = new Timestamp$Type();

View File

@ -1,748 +0,0 @@
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
import { ScalarType } from "@protobuf-ts/runtime";
import { LongType } from "@protobuf-ts/runtime";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import { WireType } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { reflectionMergePartial } from "@protobuf-ts/runtime";
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends MessageType<DoubleValue> {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<DoubleValue>): DoubleValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<DoubleValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export const DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends MessageType<FloatValue> {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<FloatValue>): FloatValue {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<FloatValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export const FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends MessageType<Int64Value> {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<Int64Value>): Int64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export const Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends MessageType<UInt64Value> {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
return target;
}
create(value?: PartialMessage<UInt64Value>): UInt64Value {
const message = { value: "0" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt64Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export const UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends MessageType<Int32Value> {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<Int32Value>): Int32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<Int32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export const Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends MessageType<UInt32Value> {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
return target;
}
create(value?: PartialMessage<UInt32Value>): UInt32Value {
const message = { value: 0 };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<UInt32Value>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export const UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends MessageType<BoolValue> {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
return target;
}
create(value?: PartialMessage<BoolValue>): BoolValue {
const message = { value: false };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BoolValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export const BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends MessageType<StringValue> {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
return target;
}
create(value?: PartialMessage<StringValue>): StringValue {
const message = { value: "" };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<StringValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export const StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends MessageType<BytesValue> {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
return target;
}
create(value?: PartialMessage<BytesValue>): BytesValue {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
reflectionMergePartial<BytesValue>(this, message, value);
return message;
}
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
let message = target ?? this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export const BytesValue = new BytesValue$Type();

View File

@ -1,4 +0,0 @@
export * from './google/protobuf/timestamp'
export * from './google/protobuf/wrappers'
export * from './results/api/v1/artifact'
export * from './results/api/v1/artifact.twirp-client'

File diff suppressed because it is too large Load Diff

View File

@ -1,232 +0,0 @@
import {
CreateArtifactRequest,
CreateArtifactResponse,
FinalizeArtifactRequest,
FinalizeArtifactResponse,
ListArtifactsRequest,
ListArtifactsResponse,
GetSignedArtifactURLRequest,
GetSignedArtifactURLResponse,
DeleteArtifactRequest,
DeleteArtifactResponse,
} from "./artifact";
//==================================//
// Client Code //
//==================================//
interface Rpc {
request(
service: string,
method: string,
contentType: "application/json" | "application/protobuf",
data: object | Uint8Array
): Promise<object | Uint8Array>;
}
export interface ArtifactServiceClient {
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse>;
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse>;
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse>;
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse>;
}
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/json",
data as object
);
return promise.then((data) =>
CreateArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/json",
data as object
);
return promise.then((data) =>
FinalizeArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/json",
data as object
);
return promise.then((data) =>
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/json",
data as object
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/json",
data as object
);
return promise.then((data) =>
DeleteArtifactResponse.fromJson(data as any, {
ignoreUnknownFields: true,
})
);
}
}
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
private readonly rpc: Rpc;
constructor(rpc: Rpc) {
this.rpc = rpc;
this.CreateArtifact.bind(this);
this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
}
CreateArtifact(
request: CreateArtifactRequest
): Promise<CreateArtifactResponse> {
const data = CreateArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"CreateArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
CreateArtifactResponse.fromBinary(data as Uint8Array)
);
}
FinalizeArtifact(
request: FinalizeArtifactRequest
): Promise<FinalizeArtifactResponse> {
const data = FinalizeArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"FinalizeArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
);
}
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
const data = ListArtifactsRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"ListArtifacts",
"application/protobuf",
data
);
return promise.then((data) =>
ListArtifactsResponse.fromBinary(data as Uint8Array)
);
}
GetSignedArtifactURL(
request: GetSignedArtifactURLRequest
): Promise<GetSignedArtifactURLResponse> {
const data = GetSignedArtifactURLRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"GetSignedArtifactURL",
"application/protobuf",
data
);
return promise.then((data) =>
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
);
}
DeleteArtifact(
request: DeleteArtifactRequest
): Promise<DeleteArtifactResponse> {
const data = DeleteArtifactRequest.toBinary(request);
const promise = this.rpc.request(
"github.actions.results.api.v1.ArtifactService",
"DeleteArtifact",
"application/protobuf",
data
);
return promise.then((data) =>
DeleteArtifactResponse.fromBinary(data as Uint8Array)
);
}
}

View File

@ -1,284 +0,0 @@
import {warning} from '@actions/core'
import {isGhes} from './shared/config'
import {
UploadArtifactOptions,
UploadArtifactResponse,
DownloadArtifactOptions,
GetArtifactResponse,
ListArtifactsOptions,
ListArtifactsResponse,
DownloadArtifactResponse,
FindOptions,
DeleteArtifactResponse
} from './shared/interfaces'
import {uploadArtifact} from './upload/upload-artifact'
import {
downloadArtifactPublic,
downloadArtifactInternal
} from './download/download-artifact'
import {
deleteArtifactPublic,
deleteArtifactInternal
} from './delete/delete-artifact'
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
import {GHESNotSupportedError} from './shared/errors'
/**
* Generic interface for the artifact client.
*/
export interface ArtifactClient {
/**
* Uploads an artifact.
*
* @param name The name of the artifact, required
* @param files A list of absolute or relative paths that denote what files should be uploaded
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
* @param options Extra options for customizing the upload behavior
* @returns single UploadArtifactResponse object
*/
uploadArtifact(
name: string,
files: string[],
rootDirectory: string,
options?: UploadArtifactOptions
): Promise<UploadArtifactResponse>
/**
* Lists all artifacts that are part of the current workflow run.
* This function will return at most 1000 artifacts per workflow run.
*
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
*
* @param options Extra options that allow for the customization of the list behavior
* @returns ListArtifactResponse object
*/
listArtifacts(
options?: ListArtifactsOptions & FindOptions
): Promise<ListArtifactsResponse>
/**
* Finds an artifact by name.
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
* If the artifact is not found, it will throw.
*
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
*
* @param artifactName The name of the artifact to find
* @param options Extra options that allow for the customization of the get behavior
*/
getArtifact(
artifactName: string,
options?: FindOptions
): Promise<GetArtifactResponse>
/**
* Downloads an artifact and unzips the content.
*
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
*
* @param artifactId The id of the artifact to download
* @param options Extra options that allow for the customization of the download behavior
* @returns single DownloadArtifactResponse object
*/
downloadArtifact(
artifactId: number,
options?: DownloadArtifactOptions & FindOptions
): Promise<DownloadArtifactResponse>
/**
* Delete an Artifact
*
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
*
* @param artifactName The name of the artifact to delete
* @param options Extra options that allow for the customization of the delete behavior
* @returns single DeleteArtifactResponse object
*/
deleteArtifact(
artifactName: string,
options?: FindOptions
): Promise<DeleteArtifactResponse>
}
/**
* The default artifact client that is used by the artifact action(s).
*/
export class DefaultArtifactClient implements ArtifactClient {
async uploadArtifact(
name: string,
files: string[],
rootDirectory: string,
options?: UploadArtifactOptions
): Promise<UploadArtifactResponse> {
try {
if (isGhes()) {
throw new GHESNotSupportedError()
}
return uploadArtifact(name, files, rootDirectory, options)
} catch (error) {
warning(
`Artifact upload failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
)
throw error
}
}
async downloadArtifact(
artifactId: number,
options?: DownloadArtifactOptions & FindOptions
): Promise<DownloadArtifactResponse> {
try {
if (isGhes()) {
throw new GHESNotSupportedError()
}
if (options?.findBy) {
const {
findBy: {repositoryOwner, repositoryName, token},
...downloadOptions
} = options
return downloadArtifactPublic(
artifactId,
repositoryOwner,
repositoryName,
token,
downloadOptions
)
}
return downloadArtifactInternal(artifactId, options)
} catch (error) {
warning(
`Download Artifact failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
)
throw error
}
}
async listArtifacts(
options?: ListArtifactsOptions & FindOptions
): Promise<ListArtifactsResponse> {
try {
if (isGhes()) {
throw new GHESNotSupportedError()
}
if (options?.findBy) {
const {
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
} = options
return listArtifactsPublic(
workflowRunId,
repositoryOwner,
repositoryName,
token,
options?.latest
)
}
return listArtifactsInternal(options?.latest)
} catch (error: unknown) {
warning(
`Listing Artifacts failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
)
throw error
}
}
async getArtifact(
artifactName: string,
options?: FindOptions
): Promise<GetArtifactResponse> {
try {
if (isGhes()) {
throw new GHESNotSupportedError()
}
if (options?.findBy) {
const {
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
} = options
return getArtifactPublic(
artifactName,
workflowRunId,
repositoryOwner,
repositoryName,
token
)
}
return getArtifactInternal(artifactName)
} catch (error: unknown) {
warning(
`Get Artifact failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
)
throw error
}
}
async deleteArtifact(
artifactName: string,
options?: FindOptions
): Promise<DeleteArtifactResponse> {
try {
if (isGhes()) {
throw new GHESNotSupportedError()
}
if (options?.findBy) {
const {
findBy: {repositoryOwner, repositoryName, workflowRunId, token}
} = options
return deleteArtifactPublic(
artifactName,
workflowRunId,
repositoryOwner,
repositoryName,
token
)
}
return deleteArtifactInternal(artifactName)
} catch (error) {
warning(
`Delete Artifact failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
)
throw error
}
}
}

View File

@ -1,109 +0,0 @@
import {info, debug} from '@actions/core'
import {getOctokit} from '@actions/github'
import {DeleteArtifactResponse} from '../shared/interfaces'
import {getUserAgentString} from '../shared/user-agent'
import {getRetryOptions} from '../find/retry-options'
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
import {requestLog} from '@octokit/plugin-request-log'
import {retry} from '@octokit/plugin-retry'
import {OctokitOptions} from '@octokit/core/dist-types/types'
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
import {getBackendIdsFromToken} from '../shared/util'
import {
DeleteArtifactRequest,
ListArtifactsRequest,
StringValue
} from '../../generated'
import {getArtifactPublic} from '../find/get-artifact'
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
export async function deleteArtifactPublic(
artifactName: string,
workflowRunId: number,
repositoryOwner: string,
repositoryName: string,
token: string
): Promise<DeleteArtifactResponse> {
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
const opts: OctokitOptions = {
log: undefined,
userAgent: getUserAgentString(),
previews: undefined,
retry: retryOpts,
request: requestOpts
}
const github = getOctokit(token, opts, retry, requestLog)
const getArtifactResp = await getArtifactPublic(
artifactName,
workflowRunId,
repositoryOwner,
repositoryName,
token
)
const deleteArtifactResp = await github.rest.actions.deleteArtifact({
owner: repositoryOwner,
repo: repositoryName,
artifact_id: getArtifactResp.artifact.id
})
if (deleteArtifactResp.status !== 204) {
throw new InvalidResponseError(
`Invalid response from GitHub API: ${deleteArtifactResp.status} (${deleteArtifactResp?.headers?.['x-github-request-id']})`
)
}
return {
id: getArtifactResp.artifact.id
}
}
export async function deleteArtifactInternal(
artifactName
): Promise<DeleteArtifactResponse> {
const artifactClient = internalArtifactTwirpClient()
const {workflowRunBackendId, workflowJobRunBackendId} =
getBackendIdsFromToken()
const listReq: ListArtifactsRequest = {
workflowRunBackendId,
workflowJobRunBackendId,
nameFilter: StringValue.create({value: artifactName})
}
const listRes = await artifactClient.ListArtifacts(listReq)
if (listRes.artifacts.length === 0) {
throw new ArtifactNotFoundError(
`Artifact not found for name: ${artifactName}`
)
}
let artifact = listRes.artifacts[0]
if (listRes.artifacts.length > 1) {
artifact = listRes.artifacts.sort(
(a, b) => Number(b.databaseId) - Number(a.databaseId)
)[0]
debug(
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
)
}
const req: DeleteArtifactRequest = {
workflowRunBackendId: artifact.workflowRunBackendId,
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
name: artifact.name
}
const res = await artifactClient.DeleteArtifact(req)
info(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`)
return {
id: Number(res.artifactId)
}
}

View File

@ -1,254 +0,0 @@
import fs from 'fs/promises'
import * as crypto from 'crypto'
import * as stream from 'stream'
import * as github from '@actions/github'
import * as core from '@actions/core'
import * as httpClient from '@actions/http-client'
import unzip from 'unzip-stream'
import {
DownloadArtifactOptions,
DownloadArtifactResponse,
StreamExtractResponse
} from '../shared/interfaces'
import {getUserAgentString} from '../shared/user-agent'
import {getGitHubWorkspaceDir} from '../shared/config'
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
import {
GetSignedArtifactURLRequest,
Int64Value,
ListArtifactsRequest
} from '../../generated'
import {getBackendIdsFromToken} from '../shared/util'
import {ArtifactNotFoundError} from '../shared/errors'
const scrubQueryParameters = (url: string): string => {
const parsed = new URL(url)
parsed.search = ''
return parsed.toString()
}
async function exists(path: string): Promise<boolean> {
try {
await fs.access(path)
return true
} catch (error) {
if (error.code === 'ENOENT') {
return false
} else {
throw error
}
}
}
async function streamExtract(
url: string,
directory: string
): Promise<StreamExtractResponse> {
let retryCount = 0
while (retryCount < 5) {
try {
return await streamExtractExternal(url, directory)
} catch (error) {
retryCount++
core.debug(
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
)
// wait 5 seconds before retrying
await new Promise(resolve => setTimeout(resolve, 5000))
}
}
throw new Error(`Artifact download failed after ${retryCount} retries.`)
}
export async function streamExtractExternal(
url: string,
directory: string
): Promise<StreamExtractResponse> {
const client = new httpClient.HttpClient(getUserAgentString())
const response = await client.get(url)
if (response.message.statusCode !== 200) {
throw new Error(
`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`
)
}
const timeout = 30 * 1000 // 30 seconds
let sha256Digest: string | undefined = undefined
return new Promise((resolve, reject) => {
const timerFn = (): void => {
response.message.destroy(
new Error(`Blob storage chunk did not respond in ${timeout}ms`)
)
}
const timer = setTimeout(timerFn, timeout)
const hashStream = crypto.createHash('sha256').setEncoding('hex')
const passThrough = new stream.PassThrough()
response.message.pipe(passThrough)
passThrough.pipe(hashStream)
const extractStream = passThrough
extractStream
.on('data', () => {
timer.refresh()
})
.on('error', (error: Error) => {
core.debug(
`response.message: Artifact download failed: ${error.message}`
)
clearTimeout(timer)
reject(error)
})
.pipe(unzip.Extract({path: directory}))
.on('close', () => {
clearTimeout(timer)
if (hashStream) {
hashStream.end()
sha256Digest = hashStream.read() as string
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`)
}
resolve({sha256Digest: `sha256:${sha256Digest}`})
})
.on('error', (error: Error) => {
reject(error)
})
})
}
export async function downloadArtifactPublic(
artifactId: number,
repositoryOwner: string,
repositoryName: string,
token: string,
options?: DownloadArtifactOptions
): Promise<DownloadArtifactResponse> {
const downloadPath = await resolveOrCreateDirectory(options?.path)
const api = github.getOctokit(token)
let digestMismatch = false
core.info(
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
)
const {headers, status} = await api.rest.actions.downloadArtifact({
owner: repositoryOwner,
repo: repositoryName,
artifact_id: artifactId,
archive_format: 'zip',
request: {
redirect: 'manual'
}
})
if (status !== 302) {
throw new Error(`Unable to download artifact. Unexpected status: ${status}`)
}
const {location} = headers
if (!location) {
throw new Error(`Unable to redirect to artifact download url`)
}
core.info(
`Redirecting to blob download url: ${scrubQueryParameters(location)}`
)
try {
core.info(`Starting download of artifact to: ${downloadPath}`)
const extractResponse = await streamExtract(location, downloadPath)
core.info(`Artifact download completed successfully.`)
if (options?.expectedHash) {
if (options?.expectedHash !== extractResponse.sha256Digest) {
digestMismatch = true
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
core.debug(`Expected digest: ${options.expectedHash}`)
}
}
} catch (error) {
throw new Error(`Unable to download and extract artifact: ${error.message}`)
}
return {downloadPath, digestMismatch}
}
export async function downloadArtifactInternal(
artifactId: number,
options?: DownloadArtifactOptions
): Promise<DownloadArtifactResponse> {
const downloadPath = await resolveOrCreateDirectory(options?.path)
const artifactClient = internalArtifactTwirpClient()
let digestMismatch = false
const {workflowRunBackendId, workflowJobRunBackendId} =
getBackendIdsFromToken()
const listReq: ListArtifactsRequest = {
workflowRunBackendId,
workflowJobRunBackendId,
idFilter: Int64Value.create({value: artifactId.toString()})
}
const {artifacts} = await artifactClient.ListArtifacts(listReq)
if (artifacts.length === 0) {
throw new ArtifactNotFoundError(
`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`
)
}
if (artifacts.length > 1) {
core.warning('Multiple artifacts found, defaulting to first.')
}
const signedReq: GetSignedArtifactURLRequest = {
workflowRunBackendId: artifacts[0].workflowRunBackendId,
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
name: artifacts[0].name
}
const {signedUrl} = await artifactClient.GetSignedArtifactURL(signedReq)
core.info(
`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`
)
try {
core.info(`Starting download of artifact to: ${downloadPath}`)
const extractResponse = await streamExtract(signedUrl, downloadPath)
core.info(`Artifact download completed successfully.`)
if (options?.expectedHash) {
if (options?.expectedHash !== extractResponse.sha256Digest) {
digestMismatch = true
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
core.debug(`Expected digest: ${options.expectedHash}`)
}
}
} catch (error) {
throw new Error(`Unable to download and extract artifact: ${error.message}`)
}
return {downloadPath, digestMismatch}
}
async function resolveOrCreateDirectory(
downloadPath = getGitHubWorkspaceDir()
): Promise<string> {
if (!(await exists(downloadPath))) {
core.debug(
`Artifact destination folder does not exist, creating: ${downloadPath}`
)
await fs.mkdir(downloadPath, {recursive: true})
} else {
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
}
return downloadPath
}

View File

@ -1,125 +0,0 @@
import {getOctokit} from '@actions/github'
import {retry} from '@octokit/plugin-retry'
import * as core from '@actions/core'
import {OctokitOptions} from '@octokit/core/dist-types/types'
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
import {getRetryOptions} from './retry-options'
import {requestLog} from '@octokit/plugin-request-log'
import {GetArtifactResponse} from '../shared/interfaces'
import {getBackendIdsFromToken} from '../shared/util'
import {getUserAgentString} from '../shared/user-agent'
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
import {ListArtifactsRequest, StringValue, Timestamp} from '../../generated'
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
export async function getArtifactPublic(
artifactName: string,
workflowRunId: number,
repositoryOwner: string,
repositoryName: string,
token: string
): Promise<GetArtifactResponse> {
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
const opts: OctokitOptions = {
log: undefined,
userAgent: getUserAgentString(),
previews: undefined,
retry: retryOpts,
request: requestOpts
}
const github = getOctokit(token, opts, retry, requestLog)
const getArtifactResp = await github.request(
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}',
{
owner: repositoryOwner,
repo: repositoryName,
run_id: workflowRunId,
name: artifactName
}
)
if (getArtifactResp.status !== 200) {
throw new InvalidResponseError(
`Invalid response from GitHub API: ${getArtifactResp.status} (${getArtifactResp?.headers?.['x-github-request-id']})`
)
}
if (getArtifactResp.data.artifacts.length === 0) {
throw new ArtifactNotFoundError(
`Artifact not found for name: ${artifactName}
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
)
}
let artifact = getArtifactResp.data.artifacts[0]
if (getArtifactResp.data.artifacts.length > 1) {
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]
core.debug(
`More than one artifact found for a single name, returning newest (id: ${artifact.id})`
)
}
return {
artifact: {
name: artifact.name,
id: artifact.id,
size: artifact.size_in_bytes,
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined,
digest: artifact.digest
}
}
}
export async function getArtifactInternal(
artifactName: string
): Promise<GetArtifactResponse> {
const artifactClient = internalArtifactTwirpClient()
const {workflowRunBackendId, workflowJobRunBackendId} =
getBackendIdsFromToken()
const req: ListArtifactsRequest = {
workflowRunBackendId,
workflowJobRunBackendId,
nameFilter: StringValue.create({value: artifactName})
}
const res = await artifactClient.ListArtifacts(req)
if (res.artifacts.length === 0) {
throw new ArtifactNotFoundError(
`Artifact not found for name: ${artifactName}
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
)
}
let artifact = res.artifacts[0]
if (res.artifacts.length > 1) {
artifact = res.artifacts.sort(
(a, b) => Number(b.databaseId) - Number(a.databaseId)
)[0]
core.debug(
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
)
}
return {
artifact: {
name: artifact.name,
id: Number(artifact.databaseId),
size: Number(artifact.size),
createdAt: artifact.createdAt
? Timestamp.toDate(artifact.createdAt)
: undefined,
digest: artifact.digest?.value
}
}
}

View File

@ -1,187 +0,0 @@
import {info, warning, debug} from '@actions/core'
import {getOctokit} from '@actions/github'
import {ListArtifactsResponse, Artifact} from '../shared/interfaces'
import {getUserAgentString} from '../shared/user-agent'
import {getRetryOptions} from './retry-options'
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
import {requestLog} from '@octokit/plugin-request-log'
import {retry} from '@octokit/plugin-retry'
import {OctokitOptions} from '@octokit/core/dist-types/types'
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
import {getBackendIdsFromToken} from '../shared/util'
import {ListArtifactsRequest, Timestamp} from '../../generated'
// Limiting to 1000 for perf reasons
const maximumArtifactCount = 1000
const paginationCount = 100
const maxNumberOfPages = maximumArtifactCount / paginationCount
export async function listArtifactsPublic(
workflowRunId: number,
repositoryOwner: string,
repositoryName: string,
token: string,
latest = false
): Promise<ListArtifactsResponse> {
info(
`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`
)
let artifacts: Artifact[] = []
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
const opts: OctokitOptions = {
log: undefined,
userAgent: getUserAgentString(),
previews: undefined,
retry: retryOpts,
request: requestOpts
}
const github = getOctokit(token, opts, retry, requestLog)
let currentPageNumber = 1
const {data: listArtifactResponse} = await github.request(
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
{
owner: repositoryOwner,
repo: repositoryName,
run_id: workflowRunId,
per_page: paginationCount,
page: currentPageNumber
}
)
let numberOfPages = Math.ceil(
listArtifactResponse.total_count / paginationCount
)
const totalArtifactCount = listArtifactResponse.total_count
if (totalArtifactCount > maximumArtifactCount) {
warning(
`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
)
numberOfPages = maxNumberOfPages
}
// Iterate over the first page
for (const artifact of listArtifactResponse.artifacts) {
artifacts.push({
name: artifact.name,
id: artifact.id,
size: artifact.size_in_bytes,
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined,
digest: (artifact as ArtifactResponse).digest
})
}
// Move to the next page
currentPageNumber++
// Iterate over any remaining pages
for (
currentPageNumber;
currentPageNumber < numberOfPages;
currentPageNumber++
) {
debug(`Fetching page ${currentPageNumber} of artifact list`)
const {data: listArtifactResponse} = await github.request(
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
{
owner: repositoryOwner,
repo: repositoryName,
run_id: workflowRunId,
per_page: paginationCount,
page: currentPageNumber
}
)
for (const artifact of listArtifactResponse.artifacts) {
artifacts.push({
name: artifact.name,
id: artifact.id,
size: artifact.size_in_bytes,
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined,
digest: (artifact as ArtifactResponse).digest
})
}
}
if (latest) {
artifacts = filterLatest(artifacts)
}
info(`Found ${artifacts.length} artifact(s)`)
return {
artifacts
}
}
export async function listArtifactsInternal(
latest = false
): Promise<ListArtifactsResponse> {
const artifactClient = internalArtifactTwirpClient()
const {workflowRunBackendId, workflowJobRunBackendId} =
getBackendIdsFromToken()
const req: ListArtifactsRequest = {
workflowRunBackendId,
workflowJobRunBackendId
}
const res = await artifactClient.ListArtifacts(req)
let artifacts: Artifact[] = res.artifacts.map(artifact => ({
name: artifact.name,
id: Number(artifact.databaseId),
size: Number(artifact.size),
createdAt: artifact.createdAt
? Timestamp.toDate(artifact.createdAt)
: undefined,
digest: artifact.digest?.value
}))
if (latest) {
artifacts = filterLatest(artifacts)
}
info(`Found ${artifacts.length} artifact(s)`)
return {
artifacts
}
}
/**
* This exists so that we don't have to use 'any' when receiving the artifact list from the GitHub API.
* The digest field is not present in OpenAPI/types at time of writing, which necessitates this change.
*/
interface ArtifactResponse {
name: string
id: number
size_in_bytes: number
created_at?: string
digest?: string
}
/**
* Filters a list of artifacts to only include the latest artifact for each name
* @param artifacts The artifacts to filter
* @returns The filtered list of artifacts
*/
function filterLatest(artifacts: Artifact[]): Artifact[] {
artifacts.sort((a, b) => b.id - a.id)
const latestArtifacts: Artifact[] = []
const seenArtifactNames = new Set<string>()
for (const artifact of artifacts) {
if (!seenArtifactNames.has(artifact.name)) {
latestArtifacts.push(artifact)
seenArtifactNames.add(artifact.name)
}
}
return latestArtifacts
}

View File

@ -1,48 +0,0 @@
import * as core from '@actions/core'
import {OctokitOptions} from '@octokit/core/dist-types/types'
import {RequestRequestOptions} from '@octokit/types'
export type RetryOptions = {
doNotRetry?: number[]
enabled?: boolean
}
// Defaults for fetching artifacts
const defaultMaxRetryNumber = 5
const defaultExemptStatusCodes = [400, 401, 403, 404, 422] // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14
export function getRetryOptions(
defaultOptions: OctokitOptions,
retries: number = defaultMaxRetryNumber,
exemptStatusCodes: number[] = defaultExemptStatusCodes
): [RetryOptions, RequestRequestOptions | undefined] {
if (retries <= 0) {
return [{enabled: false}, defaultOptions.request]
}
const retryOptions: RetryOptions = {
enabled: true
}
if (exemptStatusCodes.length > 0) {
retryOptions.doNotRetry = exemptStatusCodes
}
// The GitHub type has some defaults for `options.request`
// see: https://github.com/actions/toolkit/blob/4fbc5c941a57249b19562015edbd72add14be93d/packages/github/src/utils.ts#L15
// We pass these in here so they are not overridden.
const requestOptions: RequestRequestOptions = {
...defaultOptions.request,
retries
}
core.debug(
`GitHub client configured with: (retries: ${
requestOptions.retries
}, retry-exempt-status-code: ${
retryOptions.doNotRetry ?? 'octokit default: [400, 401, 403, 404, 422]'
})`
)
return [retryOptions, requestOptions]
}

View File

@ -1,198 +0,0 @@
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
import {info, debug} from '@actions/core'
import {ArtifactServiceClientJSON} from '../../generated'
import {getResultsServiceUrl, getRuntimeToken} from './config'
import {getUserAgentString} from './user-agent'
import {NetworkError, UsageError} from './errors'
import {maskSecretUrls} from './util'
// The twirp http client must implement this interface
interface Rpc {
request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array>
}
class ArtifactHttpClient implements Rpc {
private httpClient: HttpClient
private baseUrl: string
private maxAttempts = 5
private baseRetryIntervalMilliseconds = 3000
private retryMultiplier = 1.5
constructor(
userAgent: string,
maxAttempts?: number,
baseRetryIntervalMilliseconds?: number,
retryMultiplier?: number
) {
const token = getRuntimeToken()
this.baseUrl = getResultsServiceUrl()
if (maxAttempts) {
this.maxAttempts = maxAttempts
}
if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
}
if (retryMultiplier) {
this.retryMultiplier = retryMultiplier
}
this.httpClient = new HttpClient(userAgent, [
new BearerCredentialHandler(token)
])
}
// This function satisfies the Rpc interface. It is compatible with the JSON
// JSON generated client.
async request(
service: string,
method: string,
contentType: 'application/json' | 'application/protobuf',
data: object | Uint8Array
): Promise<object | Uint8Array> {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
debug(`[Request] ${method} ${url}`)
const headers = {
'Content-Type': contentType
}
try {
const {body} = await this.retryableRequest(async () =>
this.httpClient.post(url, JSON.stringify(data), headers)
)
return body
} catch (error) {
throw new Error(`Failed to ${method}: ${error.message}`)
}
}
async retryableRequest(
operation: () => Promise<HttpClientResponse>
): Promise<{response: HttpClientResponse; body: object}> {
let attempt = 0
let errorMessage = ''
let rawBody = ''
while (attempt < this.maxAttempts) {
let isRetryable = false
try {
const response = await operation()
const statusCode = response.message.statusCode
rawBody = await response.readBody()
debug(`[Response] - ${response.message.statusCode}`)
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
const body = JSON.parse(rawBody)
maskSecretUrls(body)
debug(`Body: ${JSON.stringify(body, null, 2)}`)
if (this.isSuccessStatusCode(statusCode)) {
return {response, body}
}
isRetryable = this.isRetryableHttpStatusCode(statusCode)
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
if (body.msg) {
if (UsageError.isUsageErrorMessage(body.msg)) {
throw new UsageError()
}
errorMessage = `${errorMessage}: ${body.msg}`
}
} catch (error) {
if (error instanceof SyntaxError) {
debug(`Raw Body: ${rawBody}`)
}
if (error instanceof UsageError) {
throw error
}
if (NetworkError.isNetworkErrorCode(error?.code)) {
throw new NetworkError(error?.code)
}
isRetryable = true
errorMessage = error.message
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`)
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
)
}
const retryTimeMilliseconds =
this.getExponentialRetryTimeMilliseconds(attempt)
info(
`Attempt ${attempt + 1} of ${
this.maxAttempts
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
)
await this.sleep(retryTimeMilliseconds)
attempt++
}
throw new Error(`Request failed`)
}
isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
return statusCode >= 200 && statusCode < 300
}
isRetryableHttpStatusCode(statusCode?: number): boolean {
if (!statusCode) return false
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.GatewayTimeout,
HttpCodes.InternalServerError,
HttpCodes.ServiceUnavailable,
HttpCodes.TooManyRequests
]
return retryableStatusCodes.includes(statusCode)
}
async sleep(milliseconds: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
getExponentialRetryTimeMilliseconds(attempt: number): number {
if (attempt < 0) {
throw new Error('attempt should be a positive integer')
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds
}
const minTime =
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
const maxTime = minTime * this.retryMultiplier
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
}
}
export function internalArtifactTwirpClient(options?: {
maxAttempts?: number
retryIntervalMs?: number
retryMultiplier?: number
}): ArtifactServiceClientJSON {
const client = new ArtifactHttpClient(
getUserAgentString(),
options?.maxAttempts,
options?.retryIntervalMs,
options?.retryMultiplier
)
return new ArtifactServiceClientJSON(client)
}

View File

@ -1,99 +0,0 @@
import os from 'os'
import {info} from '@actions/core'
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
export function getUploadChunkSize(): number {
return 8 * 1024 * 1024 // 8 MB Chunks
}
export function getRuntimeToken(): string {
const token = process.env['ACTIONS_RUNTIME_TOKEN']
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
}
return token
}
export function getResultsServiceUrl(): string {
const resultsUrl = process.env['ACTIONS_RESULTS_URL']
if (!resultsUrl) {
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable')
}
return new URL(resultsUrl).origin
}
export function isGhes(): boolean {
const ghUrl = new URL(
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
)
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
const isGitHubHost = hostname === 'GITHUB.COM'
const isGheHost = hostname.endsWith('.GHE.COM')
const isLocalHost = hostname.endsWith('.LOCALHOST')
return !isGitHubHost && !isGheHost && !isLocalHost
}
export function getGitHubWorkspaceDir(): string {
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE']
if (!ghWorkspaceDir) {
throw new Error('Unable to get the GITHUB_WORKSPACE env variable')
}
return ghWorkspaceDir
}
// The maximum value of concurrency is 300.
// This value can be changed with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
export function getConcurrency(): number {
const numCPUs = os.cpus().length
let concurrencyCap = 32
if (numCPUs > 4) {
const concurrency = 16 * numCPUs
concurrencyCap = concurrency > 300 ? 300 : concurrency
}
const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
if (concurrencyOverride) {
const concurrency = parseInt(concurrencyOverride)
if (isNaN(concurrency) || concurrency < 1) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
)
}
if (concurrency < concurrencyCap) {
info(
`Set concurrency based on the value set in ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY.`
)
return concurrency
}
info(
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Set it to the maximum value allowed.`
)
return concurrencyCap
}
// default concurrency to 5
return 5
}
export function getUploadChunkTimeout(): number {
const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
if (!timeoutVar) {
return 300000 // 5 minutes
}
const timeout = parseInt(timeoutVar)
if (isNaN(timeout)) {
throw new Error(
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
)
}
return timeout
}

View File

@ -1,72 +0,0 @@
export class FilesNotFoundError extends Error {
files: string[]
constructor(files: string[] = []) {
let message = 'No files were found to upload'
if (files.length > 0) {
message += `: ${files.join(', ')}`
}
super(message)
this.files = files
this.name = 'FilesNotFoundError'
}
}
export class InvalidResponseError extends Error {
constructor(message: string) {
super(message)
this.name = 'InvalidResponseError'
}
}
export class ArtifactNotFoundError extends Error {
constructor(message = 'Artifact not found') {
super(message)
this.name = 'ArtifactNotFoundError'
}
}
export class GHESNotSupportedError extends Error {
constructor(
message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'
) {
super(message)
this.name = 'GHESNotSupportedError'
}
}
export class NetworkError extends Error {
code: string
constructor(code: string) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
super(message)
this.code = code
this.name = 'NetworkError'
}
static isNetworkErrorCode = (code?: string): boolean => {
if (!code) return false
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code)
}
}
export class UsageError extends Error {
constructor() {
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
super(message)
this.name = 'UsageError'
}
static isUsageErrorMessage = (msg?: string): boolean => {
if (!msg) return false
return msg.includes('insufficient usage')
}
}

View File

@ -1,188 +0,0 @@
/**
* Response from the server when an artifact is uploaded
*/
export interface UploadArtifactResponse {
/**
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
*/
size?: number
/**
* The id of the artifact that was created. Not provided if no artifact was uploaded
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
*/
id?: number
/**
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
*/
digest?: string
}
/**
* Options for uploading an artifact
*/
export interface UploadArtifactOptions {
/**
* Duration after which artifact will expire in days.
*
* By default artifact expires after 90 days:
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
*
* Use this option to override the default expiry.
*
* Min value: 1
* Max value: 90 unless changed by repository setting
*
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
* will be reduced to match the max value allowed on server, and the upload process will continue. An
* input of 0 assumes default retention setting.
*/
retentionDays?: number
/**
* The level of compression for Zlib to be applied to the artifact archive.
* The value can range from 0 to 9:
* - 0: No compression
* - 1: Best speed
* - 6: Default compression (same as GNU Gzip)
* - 9: Best compression
* Higher levels will result in better compression, but will take longer to complete.
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
*/
compressionLevel?: number
}
/**
* Response from the server when getting an artifact
*/
export interface GetArtifactResponse {
/**
* Metadata about the artifact that was found
*/
artifact: Artifact
}
/**
* Options for listing artifacts
*/
export interface ListArtifactsOptions {
/**
* Filter the workflow run's artifacts to the latest by name
* In the case of reruns, this can be useful to avoid duplicates
*/
latest?: boolean
}
/**
* Response from the server when listing artifacts
*/
export interface ListArtifactsResponse {
/**
* A list of artifacts that were found
*/
artifacts: Artifact[]
}
/**
* Response from the server when downloading an artifact
*/
export interface DownloadArtifactResponse {
/**
* The path where the artifact was downloaded to
*/
downloadPath?: string
/**
* Returns true if the digest of the downloaded artifact does not match the expected hash
*/
digestMismatch?: boolean
}
/**
* Options for downloading an artifact
*/
export interface DownloadArtifactOptions {
/**
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
*/
path?: string
/**
* The hash that was computed for the artifact during upload. If provided, the outcome of the download
* will provide a digestMismatch property indicating whether the hash of the downloaded artifact
* matches the expected hash.
*/
expectedHash?: string
}
export interface StreamExtractResponse {
/**
* The SHA256 hash of the downloaded file
*/
sha256Digest?: string
}
/**
* An Actions Artifact
*/
export interface Artifact {
/**
* The name of the artifact
*/
name: string
/**
* The ID of the artifact
*/
id: number
/**
* The size of the artifact in bytes
*/
size: number
/**
* The time when the artifact was created
*/
createdAt?: Date
/**
* The digest of the artifact, computed at time of upload.
*/
digest?: string
}
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
export interface FindOptions {
/**
* The criteria for finding Artifact(s) out of the scope of the current run.
*/
findBy?: {
/**
* Token with actions:read permissions
*/
token: string
/**
* WorkflowRun of the artifact(s) to lookup
*/
workflowRunId: number
/**
* Repository owner (eg. 'actions')
*/
repositoryOwner: string
/**
* Repository owner (eg. 'toolkit')
*/
repositoryName: string
}
}
/**
* Response from the server when deleting an artifact
*/
export interface DeleteArtifactResponse {
/**
* The id of the artifact that was deleted
*/
id: number
}

View File

@ -1,9 +0,0 @@
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json')
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export function getUserAgentString(): string {
return `@actions/artifact-${packageJson.version}`
}

View File

@ -1,145 +0,0 @@
import * as core from '@actions/core'
import {getRuntimeToken} from './config'
import jwt_decode from 'jwt-decode'
import {debug, setSecret} from '@actions/core'
export interface BackendIds {
workflowRunBackendId: string
workflowJobRunBackendId: string
}
interface ActionsToken {
scp: string
}
const InvalidJwtError = new Error(
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
)
// uses the JWT token claims to get the
// workflow run and workflow job run backend ids
export function getBackendIdsFromToken(): BackendIds {
const token = getRuntimeToken()
const decoded = jwt_decode<ActionsToken>(token)
if (!decoded.scp) {
throw InvalidJwtError
}
/*
* example decoded:
* {
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
* }
*/
const scpParts = decoded.scp.split(' ')
if (scpParts.length === 0) {
throw InvalidJwtError
}
/*
* example scpParts:
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
*/
for (const scopes of scpParts) {
const scopeParts = scopes.split(':')
if (scopeParts?.[0] !== 'Actions.Results') {
// not the Actions.Results scope
continue
}
/*
* example scopeParts:
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
*/
if (scopeParts.length !== 3) {
// missing expected number of claims
throw InvalidJwtError
}
const ids = {
workflowRunBackendId: scopeParts[1],
workflowJobRunBackendId: scopeParts[2]
}
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
return ids
}
throw InvalidJwtError
}
/**
* Masks the `sig` parameter in a URL and sets it as a secret.
*
* @param url - The URL containing the signature parameter to mask
* @remarks
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
* If found, it registers both the raw and URL-encoded signature values as secrets using
* the Actions `setSecret` API, which prevents them from being displayed in logs.
*
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
*
* @example
* ```typescript
* // Mask a signature in an Azure SAS token URL
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
* ```
*/
export function maskSigUrl(url: string): void {
if (!url) return
try {
const parsedUrl = new URL(url)
const signature = parsedUrl.searchParams.get('sig')
if (signature) {
setSecret(signature)
setSecret(encodeURIComponent(signature))
}
} catch (error) {
debug(
`Failed to parse URL: ${url} ${
error instanceof Error ? error.message : String(error)
}`
)
}
}
/**
* Masks sensitive information in URLs containing signature parameters.
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
* and 'signed_download_url' properties of the provided object.
*
* @param body - The object should contain a signature
* @remarks
* This function extracts URLs from the object properties and calls maskSigUrl
* on each one to redact sensitive signature information. The function doesn't
* modify the original object; it only marks the signatures as secrets for
* logging purposes.
*
* @example
* ```typescript
* const responseBody = {
* signed_upload_url: 'https://example.com?sig=abc123',
* signed_download_url: 'https://example.com?sig=def456'
* };
* maskSecretUrls(responseBody);
* ```
*/
export function maskSecretUrls(body: Record<string, unknown> | null): void {
if (typeof body !== 'object' || body === null) {
debug('body is not an object or is null')
return
}
if (
'signed_upload_url' in body &&
typeof body.signed_upload_url === 'string'
) {
maskSigUrl(body.signed_upload_url)
}
if ('signed_url' in body && typeof body.signed_url === 'string') {
maskSigUrl(body.signed_url)
}
}

Some files were not shown because too many files have changed in this diff Show More