Compare commits
19 Commits
main
...
artifact-n
| Author | SHA1 | Date |
|---|---|---|
|
|
f236b725b0 | |
|
|
771f20b061 | |
|
|
12fa0be194 | |
|
|
7e8f13b4e6 | |
|
|
6a61612f93 | |
|
|
e5fc7ccac7 | |
|
|
898dd8c2a1 | |
|
|
39a7ba7bbd | |
|
|
a3d4efa112 | |
|
|
3ee6b4b169 | |
|
|
68ccf88d4e | |
|
|
d321d78c4b | |
|
|
b4363a79ae | |
|
|
962fd91d75 | |
|
|
4be2423559 | |
|
|
95cf02468e | |
|
|
4dfce941f1 | |
|
|
6e48b0cfed | |
|
|
c24983bf08 |
|
|
@ -1,5 +1,4 @@
|
|||
node_modules/
|
||||
packages/*/node_modules/
|
||||
packages/*/lib/
|
||||
packages/glob/__tests__/_temp
|
||||
packages/*/src/generated/*/
|
||||
packages/glob/__tests__/_temp
|
||||
|
|
@ -1,13 +1,6 @@
|
|||
{
|
||||
"plugins": [
|
||||
"jest",
|
||||
"@typescript-eslint",
|
||||
"prettier"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:github/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
|
|
@ -15,33 +8,14 @@
|
|||
"project": "./tsconfig.eslint.json"
|
||||
},
|
||||
"rules": {
|
||||
"prettier/prettier": [
|
||||
"error",
|
||||
{
|
||||
"endOfLine": "auto"
|
||||
}
|
||||
],
|
||||
"eslint-comments/no-use": "off",
|
||||
"no-constant-condition": ["error", { "checkLoops": false }],
|
||||
"github/no-then": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-shadow": "off",
|
||||
"no-unused-vars": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"filenames/match-regex": "off",
|
||||
"import/no-commonjs": "off",
|
||||
"import/named": "off",
|
||||
"no-sequences": "off",
|
||||
"import/no-unresolved": "off",
|
||||
"no-undef": "off",
|
||||
"no-only-tests/no-only-tests": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": [
|
||||
"error",
|
||||
{
|
||||
"accessibility": "no-public"
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
|
|
@ -49,16 +23,8 @@
|
|||
"camelcase": "off",
|
||||
"@typescript-eslint/camelcase": "off",
|
||||
"@typescript-eslint/consistent-type-assertions": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{
|
||||
"allowExpressions": true
|
||||
}
|
||||
],
|
||||
"@typescript-eslint/func-call-spacing": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/naming-convention": [
|
||||
"error",
|
||||
{
|
||||
|
|
@ -90,18 +56,15 @@
|
|||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": [
|
||||
"error",
|
||||
"never"
|
||||
],
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error"
|
||||
},
|
||||
"ignorePatterns": "packages/glob/__tests__/_temp/**/",
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
|
||||
### Useful Scripts
|
||||
|
||||
- `npm run bootstrap` This runs `lerna exec -- npm install` which will install dependencies in this repository's packages and cross-link packages where necessary.
|
||||
- `npm run bootstrap` This runs `lerna bootstrap` which will install dependencies in this repository's packages and cross-link packages where necessary.
|
||||
- `npm run build` This compiles TypeScript code in each package (this is especially important if one package relies on changes in another when you're running tests). This is just an alias for `lerna run tsc`.
|
||||
- `npm run format` This checks that formatting has been applied with Prettier.
|
||||
- `npm test` This runs all Jest tests in all packages in this repository.
|
||||
|
|
@ -43,7 +43,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
1. In a new branch, create a new Lerna package:
|
||||
|
||||
```console
|
||||
$ npm run new-package [name]
|
||||
$ npm run create-package new-package
|
||||
```
|
||||
|
||||
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
|
||||
|
|
|
|||
|
|
@ -1,27 +0,0 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/artifact"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
artifact-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/cache"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
cache-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
|
@ -10,8 +10,8 @@ on:
|
|||
- '**.md'
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload
|
||||
build:
|
||||
name: Build
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
|
|
@ -22,12 +22,17 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 16.x
|
||||
|
||||
# In order to upload & download artifacts from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
- name: Set env variables
|
||||
uses: ./packages/artifact/__tests__/ci-test-action/
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
|
@ -40,155 +45,51 @@ jobs:
|
|||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
|
||||
- name: Set artifact file contents
|
||||
shell: bash
|
||||
run: |
|
||||
echo "non-gzip-artifact-content=hello" >> $GITHUB_ENV
|
||||
echo "gzip-artifact-content=Some large amount of text that has a compression ratio that is greater than 100%. If greater than 100%, gzip is used to upload the file" >> $GITHUB_ENV
|
||||
echo "empty-artifact-content=_EMPTY_" >> $GITHUB_ENV
|
||||
|
||||
- name: Create files that will be uploaded
|
||||
run: |
|
||||
mkdir artifact-path
|
||||
echo -n 'hello from file 1' > artifact-path/first.txt
|
||||
echo -n 'hello from file 2' > artifact-path/second.txt
|
||||
echo '${{ env.non-gzip-artifact-content }}' > artifact-path/world.txt
|
||||
echo '${{ env.gzip-artifact-content }}' > artifact-path/gzip.txt
|
||||
touch artifact-path/empty.txt
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifact} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactName = 'my-artifact-${{ matrix.runs-on }}'
|
||||
console.log('artifactName: ' + artifactName)
|
||||
|
||||
const fileContents = ['artifact-path/first.txt','artifact-path/second.txt']
|
||||
|
||||
const uploadResult = await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
console.log(uploadResult)
|
||||
|
||||
const size = uploadResult.size
|
||||
const id = uploadResult.id
|
||||
|
||||
console.log(`Successfully uploaded artifact ${id}`)
|
||||
|
||||
try {
|
||||
await artifact.uploadArtifact(artifactName, fileContents, './')
|
||||
throw new Error('should have failed second upload')
|
||||
} catch (err) {
|
||||
console.log('Successfully blocked second artifact upload')
|
||||
}
|
||||
verify:
|
||||
name: Verify and Delete
|
||||
runs-on: ubuntu-latest
|
||||
needs: [upload]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 24.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
- name: Compile artifact package
|
||||
# We're using node -e to call the functions directly available in the @actions/artifact package
|
||||
- name: Upload artifacts using uploadArtifact()
|
||||
run: |
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/artifact
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-1',['artifact-path/world.txt'], process.argv[1]))" "${{ github.workspace }}"
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-2',['artifact-path/gzip.txt'], process.argv[1]))" "${{ github.workspace }}"
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().uploadArtifact('my-artifact-3',['artifact-path/empty.txt'], process.argv[1]))" "${{ github.workspace }}"
|
||||
|
||||
- name: List and Download Artifacts
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
- name: Download artifacts using downloadArtifact()
|
||||
run: |
|
||||
mkdir artifact-1-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-1','artifact-1-directory'))"
|
||||
mkdir artifact-2-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-2','artifact-2-directory'))"
|
||||
mkdir artifact-3-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadArtifact('my-artifact-3','artifact-3-directory'))"
|
||||
|
||||
const {readFile} = require('fs/promises')
|
||||
const path = require('path')
|
||||
- name: Verify downloadArtifact()
|
||||
shell: bash
|
||||
run: |
|
||||
packages/artifact/__tests__/test-artifact-file.sh "artifact-1-directory/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "artifact-2-directory/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "artifact-3-directory/artifact-path/empty.txt" "${{ env.empty-artifact-content }}"
|
||||
|
||||
const findBy = {
|
||||
repositoryOwner: process.env.GITHUB_REPOSITORY.split('/')[0],
|
||||
repositoryName: process.env.GITHUB_REPOSITORY.split('/')[1],
|
||||
token: '${{ secrets.GITHUB_TOKEN }}',
|
||||
workflowRunId: process.env.GITHUB_RUN_ID
|
||||
}
|
||||
|
||||
const listResult = await artifactClient.listArtifacts({latest: true, findBy})
|
||||
console.log(listResult)
|
||||
|
||||
const artifacts = listResult.artifacts
|
||||
const expected = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
expected.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 3) {
|
||||
console.log('Unexpected length of found artifacts', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 3 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
console.log('Successfully listed artifacts that were uploaded')
|
||||
|
||||
const files = [
|
||||
{name: 'artifact-path/first.txt', content: 'hello from file 1'},
|
||||
{name: 'artifact-path/second.txt', content: 'hello from file 2'}
|
||||
]
|
||||
|
||||
for (const artifact of foundArtifacts) {
|
||||
const {downloadPath} = await artifactClient.downloadArtifact(artifact.id, {
|
||||
path: artifact.name,
|
||||
findBy
|
||||
})
|
||||
|
||||
console.log('Downloaded artifact to:', downloadPath)
|
||||
|
||||
for (const file of files) {
|
||||
const filepath = path.join(
|
||||
process.env.GITHUB_WORKSPACE,
|
||||
downloadPath,
|
||||
file.name
|
||||
)
|
||||
|
||||
console.log('Checking file:', filepath)
|
||||
|
||||
const content = await readFile(filepath, 'utf8')
|
||||
if (content.trim() !== file.content.trim()) {
|
||||
throw new Error(
|
||||
`Expected file '${file.name}' to contain '${file.content}' but found '${content}'`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
- name: Delete Artifacts
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactsToDelete = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
for (const artifactName of artifactsToDelete) {
|
||||
const {id} = await artifactClient.deleteArtifact(artifactName)
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.listArtifacts({latest: true})
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
artifactsToDelete.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 0) {
|
||||
console.log('Unexpected length of found artifacts:', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 0 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
- name: Download artifacts using downloadAllArtifacts()
|
||||
run: |
|
||||
mkdir multi-artifact-directory
|
||||
node -e "Promise.resolve(require('./packages/artifact/lib/artifact-client').create().downloadAllArtifacts('multi-artifact-directory'))"
|
||||
|
||||
- name: Verify downloadAllArtifacts()
|
||||
shell: bash
|
||||
run: |
|
||||
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-1/artifact-path/world.txt" "${{ env.non-gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-2/artifact-path/gzip.txt" "${{ env.gzip-artifact-content }}"
|
||||
packages/artifact/__tests__/test-artifact-file.sh "multi-artifact-directory/my-artifact-3/artifact-path/empty.txt" "${{ env.empty-artifact-content }}"
|
||||
|
|
|
|||
|
|
@ -18,12 +18,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 16.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
run: npm run bootstrap
|
||||
|
||||
- name: audit tools (without allow-list)
|
||||
run: npm audit --audit-level=moderate --omit dev
|
||||
run: npm audit --audit-level=moderate
|
||||
|
||||
- name: audit packages
|
||||
run: npm run audit-all
|
||||
|
|
|
|||
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 16.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
@ -39,11 +39,9 @@ jobs:
|
|||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
# We need to install only runtime dependencies (omit dev dependencies) to verify that what we're shipping is all
|
||||
# that is needed
|
||||
- name: Compile cache package
|
||||
run: |
|
||||
npm ci --omit=dev
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/cache
|
||||
|
||||
|
|
|
|||
|
|
@ -17,16 +17,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
rm "C:\Program Files\Git\usr\bin\tar.exe"
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 12.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 12.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
|
|||
|
|
@ -20,18 +20,18 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: javascript
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
|
|
|||
|
|
@ -1,42 +1,27 @@
|
|||
name: Publish NPM
|
||||
|
||||
run-name: Publish NPM - ${{ github.event.inputs.package }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
type: choice
|
||||
required: true
|
||||
description: 'Which package to release'
|
||||
options:
|
||||
- artifact
|
||||
- attest
|
||||
- cache
|
||||
- core
|
||||
- exec
|
||||
- github
|
||||
- glob
|
||||
- http-client
|
||||
- io
|
||||
- tool-cache
|
||||
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: macos-latest-large
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 16.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -55,22 +40,19 @@ jobs:
|
|||
working-directory: packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
||||
publish:
|
||||
runs-on: macos-latest-large
|
||||
runs-on: macos-latest
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
|
||||
|
|
@ -80,7 +62,7 @@ jobs:
|
|||
NPM_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: publish
|
||||
run: npm publish --provenance *.tgz
|
||||
run: npm publish *.tgz
|
||||
|
||||
- name: notify slack on failure
|
||||
if: failure()
|
||||
|
|
|
|||
|
|
@ -16,23 +16,19 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
|
||||
|
||||
# Node 20 is the currently supported stable Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
|
||||
# Node 24 is the new version being added with support in actions runners
|
||||
node-version: [20.x, 24.x]
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
fail-fast: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 16.x
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 16.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -44,7 +40,7 @@ jobs:
|
|||
run: npm run build
|
||||
|
||||
- name: npm test
|
||||
run: npm test -- --runInBand --forceExit
|
||||
run: npm test -- --runInBand
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
node_modules/
|
||||
packages/*/node_modules/
|
||||
packages/*/lib/
|
||||
packages/glob/__tests__/_temp/**/
|
||||
packages/*/src/generated/*/
|
||||
packages/*/lib/
|
||||
|
|
@ -7,6 +7,5 @@
|
|||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript",
|
||||
"endOfLine": "auto"
|
||||
"parser": "typescript"
|
||||
}
|
||||
|
|
@ -2,4 +2,3 @@
|
|||
|
||||
/packages/artifact/ @actions/artifacts-actions
|
||||
/packages/cache/ @actions/actions-cache
|
||||
/packages/attest/ @actions/package-security
|
||||
|
|
|
|||
45
README.md
45
README.md
|
|
@ -24,7 +24,7 @@ The GitHub Actions ToolKit provides a set of packages to make creating actions e
|
|||
Provides functions for inputs, outputs, results, logging, secrets and variables. Read more [here](packages/core)
|
||||
|
||||
```bash
|
||||
npm install @actions/core
|
||||
$ npm install @actions/core
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -33,7 +33,7 @@ npm install @actions/core
|
|||
Provides functions to exec cli tools and process output. Read more [here](packages/exec)
|
||||
|
||||
```bash
|
||||
npm install @actions/exec
|
||||
$ npm install @actions/exec
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ npm install @actions/exec
|
|||
Provides functions to search for files matching glob patterns. Read more [here](packages/glob)
|
||||
|
||||
```bash
|
||||
npm install @actions/glob
|
||||
$ npm install @actions/glob
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ npm install @actions/glob
|
|||
A lightweight HTTP client optimized for building actions. Read more [here](packages/http-client)
|
||||
|
||||
```bash
|
||||
npm install @actions/http-client
|
||||
$ npm install @actions/http-client
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ npm install @actions/http-client
|
|||
Provides disk i/o functions like cp, mv, rmRF, which etc. Read more [here](packages/io)
|
||||
|
||||
```bash
|
||||
npm install @actions/io
|
||||
$ npm install @actions/io
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ Provides functions for downloading and caching tools. e.g. setup-* actions. Rea
|
|||
See @actions/cache for caching workflow dependencies.
|
||||
|
||||
```bash
|
||||
npm install @actions/tool-cache
|
||||
$ npm install @actions/tool-cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ npm install @actions/tool-cache
|
|||
Provides an Octokit client hydrated with the context that the current action is being run in. Read more [here](packages/github)
|
||||
|
||||
```bash
|
||||
npm install @actions/github
|
||||
$ npm install @actions/github
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -89,7 +89,7 @@ npm install @actions/github
|
|||
Provides functions to interact with actions artifacts. Read more [here](packages/artifact)
|
||||
|
||||
```bash
|
||||
npm install @actions/artifact
|
||||
$ npm install @actions/artifact
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -98,16 +98,7 @@ npm install @actions/artifact
|
|||
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
|
||||
|
||||
```bash
|
||||
npm install @actions/cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
:lock_with_ink_pen: [@actions/attest](packages/attest)
|
||||
|
||||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
npm install @actions/attest
|
||||
$ npm install @actions/cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -227,23 +218,9 @@ console.log(`We can even get context data, like the repo: ${context.repo.repo}`)
|
|||
```
|
||||
<br/>
|
||||
|
||||
## Note
|
||||
## Contributing
|
||||
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](SECURITY.md).
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
We welcome contributions. See [how to contribute](.github/CONTRIBUTING.md).
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v5
|
||||
using: actions/setup-node@v3
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
|
|||
|
|
@ -8,4 +8,4 @@ module.exports = {
|
|||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: true
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"packages": [
|
||||
"packages/**/*"
|
||||
"packages/*"
|
||||
],
|
||||
"version": "independent"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
24
nx.json
24
nx.json
|
|
@ -1,24 +0,0 @@
|
|||
{
|
||||
"tasksRunnerOptions": {
|
||||
"default": {
|
||||
"runner": "nx/tasks-runners/default",
|
||||
"options": {
|
||||
"cacheableOperations": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"affected": {
|
||||
"defaultBase": "master"
|
||||
},
|
||||
"$schema": "./node_modules/nx/schemas/nx-schema.json",
|
||||
"namedInputs": {
|
||||
"default": [
|
||||
"{projectRoot}/**/*",
|
||||
"sharedGlobals"
|
||||
],
|
||||
"sharedGlobals": [],
|
||||
"production": [
|
||||
"default"
|
||||
]
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
48
package.json
48
package.json
|
|
@ -1,50 +1,32 @@
|
|||
{
|
||||
"name": "root",
|
||||
"private": true,
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"audit-all": "lerna run audit-moderate",
|
||||
"bootstrap": "lerna exec -- npm install",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"build": "lerna run tsc",
|
||||
"clean": "lerna clean",
|
||||
"repair": "lerna repair",
|
||||
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build -- -- --noEmit\"",
|
||||
"format": "prettier --write packages/**/*.ts",
|
||||
"format-check": "prettier --check packages/**/*.ts",
|
||||
"lint": "eslint packages/**/*.ts",
|
||||
"lint-fix": "eslint packages/**/*.ts --fix",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest --testTimeout 70000"
|
||||
"test": "jest --testTimeout 10000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/jest": "^27.0.2",
|
||||
"@types/node": "^16.18.1",
|
||||
"@types/signale": "^1.4.1",
|
||||
"@typescript-eslint/parser": "^4.0.0",
|
||||
"concurrently": "^6.1.0",
|
||||
"eslint": "^8.0.1",
|
||||
"eslint-config-prettier": "^8.9.0",
|
||||
"eslint-plugin-github": "^4.9.2",
|
||||
"eslint-plugin-jest": "^27.2.3",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint": "^7.23.0",
|
||||
"eslint-plugin-github": "^4.1.3",
|
||||
"eslint-plugin-jest": "^22.21.0",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^29.6.4",
|
||||
"lerna": "^6.4.1",
|
||||
"nx": "16.6.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"semver": "^7.6.0",
|
||||
"tar": "^6.2.1",
|
||||
"@octokit/plugin-paginate-rest": "^9.2.2",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@octokit/core": "^5.0.3",
|
||||
"tmp": "^0.2.4",
|
||||
"@types/node": "^24.1.0",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
"jest": "^27.2.5",
|
||||
"lerna": "^5.4.0",
|
||||
"prettier": "^1.19.1",
|
||||
"ts-jest": "^27.0.5",
|
||||
"typescript": "^3.9.9"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,44 +1,30 @@
|
|||
# Contributions
|
||||
|
||||
This package is used internally by the v4 versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
This package is used internally by the v2+ versions of [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact). This package can also be used by other actions to interact with artifacts. Any changes or updates to this package will propagate updates to these actions so it is important that major changes or updates get properly tested.
|
||||
|
||||
Any issues or feature requests that are related to the artifact actions should be filled in the appropriate repo.
|
||||
|
||||
A limited range of unit tests run as part of each PR when making changes to the artifact packages. For small contributions and fixes, they should be sufficient.
|
||||
|
||||
If making large changes, there are a few scenarios that should be tested:
|
||||
If making large changes, there are a few scenarios that should be tested.
|
||||
|
||||
- Uploading very large artifacts
|
||||
- Uploading artifacts with lots of small files
|
||||
- Uploading very large artifacts (large artifacts get compressed using gzip so compression/decompression must be tested)
|
||||
- Uploading artifacts with lots of small files (each file is uploaded with its own HTTP call, timeouts and non-success HTTP responses can be expected so they must be properly handled)
|
||||
- Uploading artifacts using a self-hosted runner (uploads and downloads behave differently due to extra latency)
|
||||
- Downloading a single artifact (large and small, if lots of small files are part of an artifact, timeouts and non-success HTTP responses can be expected)
|
||||
- Downloading all artifacts at once
|
||||
|
||||
Large architectural changes can impact upload/download performance so it is important to separately run extra tests. We request that any large contributions/changes have extra detailed testing so we can verify performance and possible regressions.
|
||||
|
||||
Tests will run for every push/pull_request [via Actions](https://github.com/actions/toolkit/blob/main/.github/workflows/artifact-tests.yml).
|
||||
It is not possible to run end-to-end tests for artifacts as part of a PR in this repo because certain env variables such as `ACTIONS_RUNTIME_URL` are only available from the context of an action as opposed to a shell script. These env variables are needed in order to make the necessary API calls.
|
||||
|
||||
# Testing
|
||||
|
||||
## Package tests
|
||||
Any easy way to test changes is to fork the artifact actions and to use `npm link` to test your changes.
|
||||
|
||||
To run unit tests for the `@actions/artifact` package:
|
||||
|
||||
1. Clone `actions/toolkit` locally
|
||||
2. Install dependencies: `npm bootstrap`
|
||||
3. Change working directory to `packages/artifact`
|
||||
4. Run jest tests: `npm run test`
|
||||
|
||||
## Within upload-artifact or download-artifact actions
|
||||
|
||||
Any easy way to test changes for the official upload/download actions is to fork them, compile changes and run them.
|
||||
|
||||
1. For your local `actions/toolkit` changes:
|
||||
1. Change directory to `packages/artifact`
|
||||
2. Compile the changes: `npm run tsc`
|
||||
3. Symlink your package change: `npm link`
|
||||
2. Fork and clone either [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact)
|
||||
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
|
||||
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
|
||||
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
|
||||
4. The format for the above is `<username>/<repository-name>/@<ref>`, i.e. `me/myrepo/@HEAD`
|
||||
1. Fork the [upload-artifact](https://github.com/actions/upload-artifact) and [download-artifact](https://github.com/actions/download-artifact) repos
|
||||
2. Clone the forks locally
|
||||
3. With your local changes to the toolkit repo, type `npm link` after ensuring there are no errors when running `tsc`
|
||||
4. In the locally cloned fork, type `npm link @actions/artifact`
|
||||
4. Create a new release for your local fork using `tsc` and `npm run release` (this will create a new `dist/index.js` file using `@vercel/ncc`)
|
||||
5. Commit and push your local changes, you will then be able to test your changes with your forked action
|
||||
|
|
|
|||
|
|
@ -1,192 +1,95 @@
|
|||
# `@actions/artifact`
|
||||
|
||||
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
|
||||
## Usage
|
||||
|
||||
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
|
||||
You can use this package to interact with the actions artifacts.
|
||||
- [Upload an Artifact](#Upload-an-Artifact)
|
||||
- [Download a Single Artifact](#Download-a-Single-Artifact)
|
||||
- [Download All Artifacts](#Download-all-Artifacts)
|
||||
- [Additional Documentation](#Additional-Documentation)
|
||||
- [Contributions](#Contributions)
|
||||
|
||||
Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory.
|
||||
|
||||
- [`@actions/artifact`](#actionsartifact)
|
||||
- [v2 - What's New](#v2---whats-new)
|
||||
- [Improvements](#improvements)
|
||||
- [Breaking changes](#breaking-changes)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Examples](#examples)
|
||||
- [Upload and Download](#upload-and-download)
|
||||
- [Delete an Artifact](#delete-an-artifact)
|
||||
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
|
||||
- [Speeding up large uploads](#speeding-up-large-uploads)
|
||||
- [Additional Resources](#additional-resources)
|
||||
## Upload an Artifact
|
||||
|
||||
## v2 - What's New
|
||||
Method Name: `uploadArtifact`
|
||||
|
||||
> [!IMPORTANT]
|
||||
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
|
||||
#### Inputs
|
||||
- `name`
|
||||
- The name of the artifact that is being uploaded
|
||||
- Required
|
||||
- `files`
|
||||
- A list of file paths that describe what should be uploaded as part of the artifact
|
||||
- If a path is provided that does not exist, an error will be thrown
|
||||
- Can be absolute or relative. Internally everything is normalized and resolved
|
||||
- Required
|
||||
- `rootDirectory`
|
||||
- A file path that denotes the root directory of the files being uploaded. This path is used to strip the paths provided in `files` to control how they are uploaded and structured
|
||||
- If a file specified in `files` is not in the `rootDirectory`, an error will be thrown
|
||||
- Required
|
||||
- `options`
|
||||
- Extra options that allow for the customization of the upload behavior
|
||||
- Optional
|
||||
|
||||
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
|
||||
#### Available Options
|
||||
|
||||
### Improvements
|
||||
- `retentionDays`
|
||||
- Duration after which artifact will expire in days
|
||||
- Minimum value: 1
|
||||
- Maximum value: 90 unless changed by repository setting
|
||||
- If this is set to a greater value than the retention settings allowed, the retention on artifacts will be reduced to match the max value allowed on the server, and the upload process will continue. An input of 0 assumes default retention value.
|
||||
|
||||
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
|
||||
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
|
||||
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
|
||||
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
|
||||
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
1. Firewall rules required for self-hosted runners.
|
||||
|
||||
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
|
||||
|
||||
e.g.
|
||||
|
||||
```bash
|
||||
curl https://api.github.com/meta | jq .domains.actions
|
||||
```
|
||||
|
||||
2. Uploading to the same named Artifact multiple times.
|
||||
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
|
||||
|
||||
3. Limit of Artifacts for an individual job.
|
||||
|
||||
Each job in a workflow run now has a limit of 10 artifacts.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the package:
|
||||
|
||||
```bash
|
||||
npm i @actions/artifact
|
||||
```
|
||||
|
||||
Import the module:
|
||||
#### Example using Absolute File Paths
|
||||
|
||||
```js
|
||||
// ES6 module
|
||||
import {DefaultArtifactClient} from '@actions/artifact'
|
||||
|
||||
// CommonJS
|
||||
const {DefaultArtifactClient} = require('@actions/artifact')
|
||||
```
|
||||
|
||||
Then instantiate:
|
||||
|
||||
```js
|
||||
const artifact = new DefaultArtifactClient()
|
||||
```
|
||||
|
||||
ℹ️ For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
|
||||
|
||||
## Examples
|
||||
|
||||
### Upload and Download
|
||||
|
||||
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
|
||||
|
||||
```js
|
||||
const {id, size} = await artifact.uploadArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// files to include (supports absolute and relative paths)
|
||||
['/absolute/path/file1.txt', './relative/file2.txt'],
|
||||
{
|
||||
// optional: how long to retain the artifact
|
||||
// if unspecified, defaults to repository/org retention settings (the limit of this value)
|
||||
retentionDays: 10
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
|
||||
|
||||
const {downloadPath} = await artifact.downloadArtifact(id, {
|
||||
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
|
||||
path: '/tmp/dst/path',
|
||||
})
|
||||
|
||||
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
|
||||
```
|
||||
|
||||
### Delete an Artifact
|
||||
|
||||
To delete an artifact, all you need is the name.
|
||||
|
||||
```js
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact'
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}'`)
|
||||
```
|
||||
|
||||
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
|
||||
|
||||
```js
|
||||
const findBy = {
|
||||
// must have actions:write permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
const rootDirectory = '/home/user/files/plz-upload'
|
||||
const options = {
|
||||
continueOnError: true
|
||||
}
|
||||
|
||||
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// options to find by other repo/owner
|
||||
{ findBy }
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
|
||||
const uploadResult = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
```
|
||||
|
||||
### Downloading from other workflow runs or repos
|
||||
#### Example using Relative File Paths
|
||||
```js
|
||||
// Assuming the current working directory is /home/user/files/plz-upload
|
||||
const artifact = require('@actions/artifact');
|
||||
const artifactClient = artifact.create()
|
||||
const artifactName = 'my-artifact';
|
||||
const files = [
|
||||
'file1.txt',
|
||||
'file2.txt',
|
||||
'dir/file3.txt'
|
||||
]
|
||||
|
||||
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
|
||||
|
||||
```ts
|
||||
const findBy = {
|
||||
// must have actions:read permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
const rootDirectory = '.' // Also possible to use __dirname
|
||||
const options = {
|
||||
continueOnError: false
|
||||
}
|
||||
|
||||
await artifact.downloadArtifact(1337, {
|
||||
findBy
|
||||
})
|
||||
|
||||
// can also be used in other methods
|
||||
|
||||
await artifact.getArtifact('my-artifact', {
|
||||
findBy
|
||||
})
|
||||
|
||||
await artifact.listArtifacts({
|
||||
findBy
|
||||
})
|
||||
const uploadResponse = await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options)
|
||||
```
|
||||
|
||||
### Speeding up large uploads
|
||||
#### Upload Result
|
||||
|
||||
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
|
||||
The returned `UploadResponse` will contain the following information
|
||||
|
||||
```ts
|
||||
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
|
||||
// The level of compression for Zlib to be applied to the artifact archive.
|
||||
// - 0: No compression
|
||||
// - 1: Best speed
|
||||
// - 6: Default compression (same as GNU Gzip)
|
||||
// - 9: Best compression
|
||||
compressionLevel: 0
|
||||
})
|
||||
```
|
||||
- `artifactName`
|
||||
- The name of the artifact that was uploaded
|
||||
- `size`
|
||||
- Total size of the artifact that was uploaded in bytes
|
||||
|
||||
## Additional Resources
|
||||
## Contributions
|
||||
|
||||
- [Releases](./RELEASES.md)
|
||||
- [Contribution Guide](./CONTRIBUTIONS.md)
|
||||
- [Frequently Asked Questions](./docs/faq.md)
|
||||
See [contributor guidelines](https://github.com/actions/toolkit/blob/main/.github/CONTRIBUTING.md) for general guidelines and information about toolkit contributions.
|
||||
|
||||
For contributions related to this package, see [artifact contributions](CONTRIBUTIONS.md) for more information.
|
||||
|
|
|
|||
|
|
@ -1,178 +1,15 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 4.0.0
|
||||
### 0.1.0
|
||||
|
||||
- Add support for Node 24 [#2110](https://github.com/actions/toolkit/pull/2110)
|
||||
- Fix: artifact pagination bugs and configurable artifact count limits [#2165](https://github.com/actions/toolkit/pull/2165)
|
||||
- Fix: reject the promise on timeout [#2124](https://github.com/actions/toolkit/pull/2124)
|
||||
- Update dependency versions
|
||||
- Initial release
|
||||
|
||||
### 2.3.3
|
||||
### 0.2.0
|
||||
|
||||
- Dependency updates [#2049](https://github.com/actions/toolkit/pull/2049)
|
||||
|
||||
### 2.3.2
|
||||
|
||||
- Added masking for Shared Access Signature (SAS) artifact URLs [#1982](https://github.com/actions/toolkit/pull/1982)
|
||||
- Change hash to digest for consistent terminology across runner logs [#1991](https://github.com/actions/toolkit/pull/1991)
|
||||
|
||||
### 2.3.1
|
||||
|
||||
- Fix comment typo on expectedHash. [#1986](https://github.com/actions/toolkit/pull/1986)
|
||||
|
||||
### 2.3.0
|
||||
|
||||
- Allow ArtifactClient to perform digest comparisons, if supplied. [#1975](https://github.com/actions/toolkit/pull/1975)
|
||||
|
||||
### 2.2.2
|
||||
|
||||
- Default concurrency to 5 for uploading artifacts [#1962](https://github.com/actions/toolkit/pull/1962)
|
||||
|
||||
### 2.2.1
|
||||
|
||||
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
|
||||
|
||||
### 2.2.0
|
||||
|
||||
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
|
||||
|
||||
### 2.1.11
|
||||
|
||||
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
|
||||
- Use native `crypto` [#1815](https://github.com/actions/toolkit/pull/1815)
|
||||
|
||||
### 2.1.10
|
||||
|
||||
- Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830)
|
||||
- Fixed a regression with chunk timeout [#1786](https://github.com/actions/toolkit/pull/1786)
|
||||
|
||||
### 2.1.9
|
||||
|
||||
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
|
||||
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
|
||||
|
||||
### 2.1.8
|
||||
|
||||
- Allows `*.localhost` domains for hostname checks for local development.
|
||||
|
||||
### 2.1.7
|
||||
|
||||
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
|
||||
|
||||
### 2.1.6
|
||||
|
||||
- Will retry on invalid request responses.
|
||||
|
||||
### 2.1.5
|
||||
|
||||
- Bumped `archiver` dependency to 7.0.1
|
||||
|
||||
### 2.1.4
|
||||
|
||||
- Adds info-level logging for zip extraction
|
||||
|
||||
### 2.1.3
|
||||
|
||||
- Fixes a bug in the extract logic updated in 2.1.2
|
||||
|
||||
### 2.1.2
|
||||
|
||||
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
|
||||
|
||||
### 2.1.1
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
-
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.3.0
|
||||
|
||||
|
|
@ -183,13 +20,77 @@
|
|||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.2.0
|
||||
### 0.3.1
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
|
||||
### 0.1.0
|
||||
### 0.3.2
|
||||
|
||||
- Initial release
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
|
|
|||
|
|
@ -1,348 +0,0 @@
|
|||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {internalArtifactTwirpClient} from '../src/internal/shared/artifact-twirp-client'
|
||||
import {noopLogs} from './common'
|
||||
import {NetworkError, UsageError} from '../src/internal/shared/errors'
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
const clientOptions = {
|
||||
maxAttempts: 5,
|
||||
retryIntervalMs: 1,
|
||||
retryMultiplier: 1.5
|
||||
}
|
||||
|
||||
describe('artifact-http-client', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('http://localhost:8080')
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should successfully create a client', () => {
|
||||
const client = internalArtifactTwirpClient()
|
||||
expect(client).toBeDefined()
|
||||
})
|
||||
|
||||
it('should make a request', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msg = new http.IncomingMessage(new net.Socket())
|
||||
msg.statusCode = 200
|
||||
return {
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient()
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
})
|
||||
|
||||
it('should retry if the request fails', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 500
|
||||
msgFailed.statusMessage = 'Internal Server Error'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should retry if invalid body response', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 502
|
||||
msgFailed.statusMessage = 'Bad Gateway'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve('💥')
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should fail if the request fails 5 times', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 500
|
||||
msgFailed.statusMessage = 'Internal Server Error'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Failed to make request after 5 attempts: Failed request: (500) Internal Server Error'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(5)
|
||||
})
|
||||
|
||||
it('should fail immediately if there is a non-retryable error', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 401
|
||||
msgFailed.statusMessage = 'Unauthorized'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": false}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Received non-retryable error: Failed request: (401) Unauthorized'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should fail with a descriptive error', async () => {
|
||||
// 409 duplicate error
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 409
|
||||
msgFailed.statusMessage = 'Conflict'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "an artifact with this name already exists on the workflow run"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(
|
||||
'Failed to CreateArtifact: Received non-retryable error: Failed request: (409) Conflict: an artifact with this name already exists on the workflow run'
|
||||
)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a network failure', async () => {
|
||||
class FakeNodeError extends Error {
|
||||
code: string
|
||||
constructor(code: string) {
|
||||
super()
|
||||
this.code = code
|
||||
}
|
||||
}
|
||||
|
||||
const mockPost = jest.fn(() => {
|
||||
throw new FakeNodeError('ENOTFOUND')
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new NetworkError('ENOTFOUND').message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should properly describe a usage error', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 403
|
||||
msgFailed.statusMessage = 'Forbidden'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"msg": "insufficient usage to create artifact"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
const client = internalArtifactTwirpClient()
|
||||
await expect(async () => {
|
||||
await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
}).rejects.toThrowError(new UsageError().message)
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(mockPost).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
name: 'Set env variables'
|
||||
description: 'Sets certain env variables so that e2e artifact upload and download can be tested in a shell'
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'index.js'
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
// Certain env variables are not set by default in a shell context and are only available in a node context from a running action
|
||||
// In order to be able to upload and download artifacts e2e in a shell when running CI tests, we need these env variables set
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const filePath = process.env[`GITHUB_ENV`]
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_URL=${process.env.ACTIONS_RUNTIME_URL}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_TOKEN=${process.env.ACTIONS_RUNTIME_TOKEN}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `GITHUB_RUN_ID=${process.env.GITHUB_RUN_ID}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
|
||||
// noopLogs mocks the console.log and core.* functions to prevent output in the console while testing
|
||||
export const noopLogs = (): void => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
}
|
||||
|
|
@ -1,149 +0,0 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import os from 'os'
|
||||
|
||||
// Mock the `cpus()` function in the `os` module
|
||||
jest.mock('os', () => {
|
||||
const osActual = jest.requireActual('os')
|
||||
return {
|
||||
...osActual,
|
||||
cpus: jest.fn()
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
describe('isGhes', () => {
|
||||
it('should return false when the request domain is github.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with .localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain is specific to an enterprise', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadChunkTimeoutEnv', () => {
|
||||
it('should return default 300000 when no env set', () => {
|
||||
expect(config.getUploadChunkTimeout()).toBe(300000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
|
||||
expect(config.getUploadChunkTimeout()).toBe(150000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
|
||||
expect(() => {
|
||||
config.getUploadChunkTimeout()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadConcurrencyEnv', () => {
|
||||
it('Concurrency default to 5', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
expect(config.getConcurrency()).toBe(5)
|
||||
})
|
||||
|
||||
it('Concurrency max out at 300 on systems with many CPUs', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '301'
|
||||
expect(config.getConcurrency()).toBe(300)
|
||||
})
|
||||
|
||||
it('Concurrency can be set to 32 when cpu num is <= 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '32'
|
||||
expect(config.getConcurrency()).toBe(32)
|
||||
})
|
||||
|
||||
it('Concurrency can be set 16 * num of cpu when cpu num is > 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '96'
|
||||
expect(config.getConcurrency()).toBe(96)
|
||||
})
|
||||
|
||||
it('Concurrency can be overridden by env var ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
|
||||
expect(config.getConcurrency()).toBe(10)
|
||||
})
|
||||
|
||||
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getMaxArtifactListCount', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
})
|
||||
|
||||
it('should return default 1000 when no env set', () => {
|
||||
expect(config.getMaxArtifactListCount()).toBe(1000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '2000'
|
||||
expect(config.getMaxArtifactListCount()).toBe(2000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = 'abc'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is < 1', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '0'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is negative', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '-100'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
@ -1,192 +0,0 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
deleteArtifactInternal,
|
||||
deleteArtifactPublic
|
||||
} from '../src/internal/delete/delete-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
type MockedDeleteArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['deleteArtifact']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
deleteArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('delete-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should delete an artifact', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockResolvedValueOnce({
|
||||
status: 204,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: null as never
|
||||
})
|
||||
|
||||
const response = await deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should delete an artifact', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockResolvedValue({
|
||||
ok: true,
|
||||
artifactId: fixtures.artifacts[0].id.toString()
|
||||
})
|
||||
const response = await deleteArtifactInternal(fixtures.artifacts[0].name)
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(
|
||||
deleteArtifactInternal(fixtures.artifacts[0].id)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,652 +0,0 @@
|
|||
import fs from 'fs'
|
||||
import * as http from 'http'
|
||||
import * as net from 'net'
|
||||
import * as path from 'path'
|
||||
import * as github from '@actions/github'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import archiver from 'archiver'
|
||||
|
||||
import {
|
||||
downloadArtifactInternal,
|
||||
downloadArtifactPublic,
|
||||
streamExtractExternal
|
||||
} from '../src/internal/download/download-artifact'
|
||||
import {getUserAgentString} from '../src/internal/shared/user-agent'
|
||||
import {noopLogs} from './common'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
|
||||
type MockedDownloadArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['downloadArtifact']
|
||||
>
|
||||
|
||||
const testDir = path.join(__dirname, '_temp', 'download-artifact')
|
||||
const fixtures = {
|
||||
workspaceDir: path.join(testDir, 'workspace'),
|
||||
exampleArtifact: {
|
||||
path: path.join(testDir, 'artifact.zip'),
|
||||
files: [
|
||||
{
|
||||
path: 'hello.txt',
|
||||
content: 'Hello World!'
|
||||
},
|
||||
{
|
||||
path: 'goodbye.txt',
|
||||
content: 'Goodbye World!'
|
||||
}
|
||||
]
|
||||
},
|
||||
artifactID: 1234,
|
||||
artifactName: 'my-artifact',
|
||||
artifactSize: 123456,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit',
|
||||
token: 'ghp_1234567890',
|
||||
blobStorageUrl: 'https://blob-storage.local?signed=true',
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
}
|
||||
}
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
rest: {
|
||||
actions: {
|
||||
downloadArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
jest.mock('@actions/http-client')
|
||||
|
||||
// Create a zip archive with the contents of the example artifact
|
||||
const createTestArchive = async (): Promise<void> => {
|
||||
const archive = archiver('zip', {
|
||||
zlib: {level: 9}
|
||||
})
|
||||
for (const file of fixtures.exampleArtifact.files) {
|
||||
archive.append(file.content, {name: file.path})
|
||||
}
|
||||
archive.finalize()
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
archive.pipe(fs.createWriteStream(fixtures.exampleArtifact.path))
|
||||
archive.on('error', reject)
|
||||
archive.on('finish', resolve)
|
||||
})
|
||||
}
|
||||
|
||||
const expectExtractedArchive = async (dir: string): Promise<void> => {
|
||||
for (const file of fixtures.exampleArtifact.files) {
|
||||
const filePath = path.join(dir, file.path)
|
||||
expect(fs.readFileSync(filePath, 'utf8')).toEqual(file.content)
|
||||
}
|
||||
}
|
||||
|
||||
const setup = async (): Promise<void> => {
|
||||
noopLogs()
|
||||
await fs.promises.mkdir(testDir, {recursive: true})
|
||||
await createTestArchive()
|
||||
|
||||
process.env['GITHUB_WORKSPACE'] = fixtures.workspaceDir
|
||||
}
|
||||
|
||||
const cleanup = async (): Promise<void> => {
|
||||
jest.restoreAllMocks()
|
||||
await fs.promises.rm(testDir, {recursive: true})
|
||||
delete process.env['GITHUB_WORKSPACE']
|
||||
}
|
||||
|
||||
const mockGetArtifactSuccess = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(fixtures.exampleArtifact.path))
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactHung = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
// Don't push any data or call push(null) to end the stream
|
||||
// This creates a stream that hangs and never completes
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
message.push('Internal Server Error')
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactMalicious = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
afterEach(cleanup)
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should not allow path traversal from malicious artifacts', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactMalicious
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
|
||||
// ensure path traversal was not possible
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
|
||||
).toBe(true)
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
|
||||
).toBe(true)
|
||||
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token,
|
||||
{
|
||||
path: customPath
|
||||
}
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if blob storage storage chunk does not respond within 30s', async () => {
|
||||
// mock http client to delay response data by 30s
|
||||
const msg = new http.IncomingMessage(new net.Socket())
|
||||
msg.statusCode = 200
|
||||
|
||||
const mockGet = jest.fn(async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Reject with an error after 31 seconds
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'))
|
||||
}, 31000) // Timeout after 31 seconds
|
||||
})
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGet
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
streamExtractExternal(fixtures.blobStorageUrl, fixtures.workspaceDir)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
}, 35000) // add longer timeout to allow for timer to run out
|
||||
|
||||
it('should fail if blob storage response is non-200 after 5 retries', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(5)
|
||||
}, 38000)
|
||||
|
||||
it('should retry if blob storage response is non-200 and then succeed with a 200', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockGetArtifact = jest
|
||||
.fn(mockGetArtifactSuccess)
|
||||
.mockImplementationOnce(mockGetArtifactFailure)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactFailure).toHaveBeenCalledTimes(1)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
expect(mockGetArtifactSuccess).toHaveBeenCalledTimes(1)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
}, 28000)
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(async () => {
|
||||
await setup()
|
||||
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await cleanup()
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to $GITHUB_WORKSPACE', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID)
|
||||
|
||||
expectExtractedArchive(fixtures.workspaceDir)
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactSuccess
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactInternal(fixtures.artifactID, {
|
||||
path: customPath
|
||||
})
|
||||
|
||||
expectExtractedArchive(customPath)
|
||||
expect(response.downloadPath).toBe(customPath)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if download artifact API does not respond with location', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
})
|
||||
|
||||
it('should fail if blob storage response is non-200', async () => {
|
||||
const mockListArtifacts = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifactID.toString(),
|
||||
name: fixtures.artifactName,
|
||||
size: fixtures.artifactSize.toString()
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const mockGetSignedArtifactURL = jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'GetSignedArtifactURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
signedUrl: fixtures.blobStorageUrl
|
||||
})
|
||||
)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactFailure
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await expect(
|
||||
downloadArtifactInternal(fixtures.artifactID)
|
||||
).rejects.toBeInstanceOf(Error)
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockListArtifacts).toHaveBeenCalledWith({
|
||||
idFilter: {
|
||||
value: fixtures.artifactID.toString()
|
||||
},
|
||||
...fixtures.backendIds
|
||||
})
|
||||
expect(mockGetSignedArtifactURL).toHaveBeenCalledWith({
|
||||
...fixtures.backendIds,
|
||||
name: fixtures.artifactName
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('streamExtractExternal', () => {
|
||||
it('should fail if the timeout is exceeded', async () => {
|
||||
const mockSlowGetArtifact = jest.fn(mockGetArtifactHung)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockSlowGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
try {
|
||||
await streamExtractExternal(
|
||||
fixtures.blobStorageUrl,
|
||||
fixtures.workspaceDir,
|
||||
{timeout: 2}
|
||||
)
|
||||
expect(true).toBe(false) // should not be called
|
||||
} catch (e) {
|
||||
expect(e).toBeInstanceOf(Error)
|
||||
expect(e.message).toContain('did not respond in 2ms')
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockSlowGetArtifact).toHaveBeenCalledTimes(1)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
Binary file not shown.
|
|
@ -1,239 +0,0 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
getArtifactInternal,
|
||||
getArtifactPublic
|
||||
} from '../src/internal/find/get-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {
|
||||
ArtifactNotFoundError,
|
||||
InvalidResponseError
|
||||
} from '../src/internal/shared/errors'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('get-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return the artifact if it is found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt.toISOString()
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
const response = await getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 404,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {}
|
||||
})
|
||||
|
||||
const response = getArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).rejects.toThrowError(InvalidResponseError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return the artifact if it is found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: [
|
||||
{
|
||||
...fixtures.backendIds,
|
||||
databaseId: fixtures.artifacts[0].id.toString(),
|
||||
name: fixtures.artifacts[0].name,
|
||||
size: fixtures.artifacts[0].size.toString(),
|
||||
createdAt: Timestamp.fromDate(fixtures.artifacts[0].createdAt)
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[0]
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact if multiple are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
|
||||
const response = await getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifact: fixtures.artifacts[1]
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if no artifacts are found', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrowError(ArtifactNotFoundError)
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
|
||||
const response = getArtifactInternal(fixtures.artifacts[0].name)
|
||||
|
||||
expect(response).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,361 +0,0 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||
import {
|
||||
listArtifactsInternal,
|
||||
listArtifactsPublic
|
||||
} from '../src/internal/find/list-artifacts'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {Artifact} from '../src/internal/shared/interfaces'
|
||||
import {RequestInterface} from '@octokit/types'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
listWorkflowRunArtifacts: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const artifactsToListResponse = (
|
||||
artifacts: Artifact[]
|
||||
): RestEndpointMethodTypes['actions']['listWorkflowRunArtifacts']['response']['data'] => {
|
||||
return {
|
||||
total_count: artifacts.length,
|
||||
artifacts: artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size_in_bytes: artifact.size,
|
||||
created_at: artifact.createdAt?.toISOString() || '',
|
||||
run_id: fixtures.runId,
|
||||
// unused fields for tests
|
||||
url: '',
|
||||
archive_download_url: '',
|
||||
expired: false,
|
||||
expires_at: '',
|
||||
node_id: '',
|
||||
run_url: '',
|
||||
type: '',
|
||||
updated_at: ''
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('list-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should return a list of artifacts', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: artifactsToListResponse(fixtures.artifacts)
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
total_count: 0,
|
||||
artifacts: []
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
true
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockRequest.mockRejectedValueOnce(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
|
||||
it('should handle pagination correctly when fetching multiple pages', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 150}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Verify that both API calls were made
|
||||
expect(mockRequest).toHaveBeenCalledTimes(2)
|
||||
|
||||
// Should return all 150 artifacts across both pages
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
|
||||
// Verify we got artifacts from both pages
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[99].name).toBe('artifact-100')
|
||||
expect(response.artifacts[100].name).toBe('artifact-101')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
})
|
||||
|
||||
it('should respect ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT environment variable', async () => {
|
||||
const originalEnv = process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '150'
|
||||
|
||||
jest.resetModules()
|
||||
|
||||
try {
|
||||
const {listArtifactsPublic: listArtifactsPublicReloaded} = await import(
|
||||
'../src/internal/find/list-artifacts'
|
||||
)
|
||||
const githubReloaded = await import('@actions/github')
|
||||
|
||||
const mockRequest = (githubReloaded.getOctokit as jest.Mock)(
|
||||
fixtures.token
|
||||
).request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 200}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublicReloaded(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Should only return 150 artifacts due to the limit
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
} finally {
|
||||
// Restore original environment variable
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = originalEnv
|
||||
} else {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
}
|
||||
|
||||
// Reset modules again to restore original state
|
||||
jest.resetModules()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should return a list of artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: fixtures.artifacts
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
const response = await listArtifactsInternal(true)
|
||||
expect(response).toEqual({
|
||||
artifacts: [fixtures.artifacts[1]]
|
||||
})
|
||||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: []
|
||||
})
|
||||
const response = await listArtifactsInternal(false)
|
||||
expect(response).toEqual({
|
||||
artifacts: []
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(listArtifactsInternal(false)).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
import {
|
||||
validateArtifactName,
|
||||
validateFilePath
|
||||
} from '../src/internal/upload/path-and-artifact-name-validation'
|
||||
|
||||
import {noopLogs} from './common'
|
||||
|
||||
describe('Path and artifact name validation', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
it('Check Artifact Name for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'my\\artifact',
|
||||
'my/artifact',
|
||||
'my"artifact',
|
||||
'my:artifact',
|
||||
'my<artifact',
|
||||
'my>artifact',
|
||||
'my|artifact',
|
||||
'my*artifact',
|
||||
'my?artifact',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
validateArtifactName(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my-normal-artifact',
|
||||
'myNormalArtifact',
|
||||
'm¥ñðrmålÄr†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
validateArtifactName(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
|
||||
it('Check Artifact File Path for any invalid characters', () => {
|
||||
const invalidNames = [
|
||||
'some/invalid"artifact/path',
|
||||
'some/invalid:artifact/path',
|
||||
'some/invalid<artifact/path',
|
||||
'some/invalid>artifact/path',
|
||||
'some/invalid|artifact/path',
|
||||
'some/invalid*artifact/path',
|
||||
'some/invalid?artifact/path',
|
||||
'some/invalid\rartifact/path',
|
||||
'some/invalid\nartifact/path',
|
||||
'some/invalid\r\nartifact/path',
|
||||
''
|
||||
]
|
||||
for (const invalidName of invalidNames) {
|
||||
expect(() => {
|
||||
validateFilePath(invalidName)
|
||||
}).toThrow()
|
||||
}
|
||||
|
||||
const validNames = [
|
||||
'my/perfectly-normal/artifact-path',
|
||||
'my/perfectly\\Normal/Artifact-path',
|
||||
'm¥/ñðrmål/Är†ï£å¢†'
|
||||
]
|
||||
for (const validName of validNames) {
|
||||
expect(() => {
|
||||
validateFilePath(validName)
|
||||
}).not.toThrow()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
import {Timestamp} from '../src/generated'
|
||||
import * as retention from '../src/internal/upload/retention'
|
||||
|
||||
describe('retention', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env['GITHUB_RETENTION_DAYS']
|
||||
})
|
||||
it('should return the inputted retention days if it is less than the max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
process.env['GITHUB_RETENTION_DAYS'] = '90'
|
||||
|
||||
const exp = retention.getExpiration(30)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp)
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 30)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
|
||||
it('should return the max retention days if the inputted retention days is greater than the max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
process.env['GITHUB_RETENTION_DAYS'] = '90'
|
||||
|
||||
const exp = retention.getExpiration(120)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 90)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
|
||||
it('should return undefined if the inputted retention days is undefined', () => {
|
||||
const exp = retention.getExpiration()
|
||||
expect(exp).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return the inputted retention days if there is no max retention days', () => {
|
||||
// setup
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest.useFakeTimers().setSystemTime(mockDate)
|
||||
|
||||
const exp = retention.getExpiration(30)
|
||||
|
||||
expect(exp).toBeDefined()
|
||||
if (exp) {
|
||||
const expDate = Timestamp.toDate(exp) // we check whether exp is defined above
|
||||
const expected = new Date()
|
||||
expected.setDate(expected.getDate() + 30)
|
||||
|
||||
expect(expDate).toEqual(expected)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
#!/bin/bash
|
||||
|
||||
path="$1"
|
||||
expectedContent="$2"
|
||||
|
||||
if [ "$path" == "" ]; then
|
||||
echo "File path not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$expectedContent" == "" ]; then
|
||||
echo "Expected file contents not provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "Expected file $path does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
actualContent=$(cat "$path")
|
||||
if [ "$expectedContent" == "_EMPTY_" ] && [ ! -s "$path" ]; then
|
||||
exit 0
|
||||
elif [ "$actualContent" != "$expectedContent" ]; then
|
||||
echo "File contents are not correct, expected $expectedContent, received $actualContent"
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -1,373 +0,0 @@
|
|||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
import {FilesNotFoundError} from '../src/internal/shared/errors'
|
||||
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import unzip from 'unzip-stream'
|
||||
|
||||
const uploadStreamMock = jest.fn()
|
||||
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
|
||||
uploadStream: uploadStreamMock
|
||||
}))
|
||||
|
||||
jest.mock('@azure/storage-blob', () => ({
|
||||
BlobClient: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
getBlockBlobClient: blockBlobClientMock
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
|
||||
files: [
|
||||
{name: 'file1.txt', content: 'test 1 file content'},
|
||||
{name: 'file2.txt', content: 'test 2 file content'},
|
||||
{name: 'file3.txt', content: 'test 3 file content'},
|
||||
{
|
||||
name: 'real.txt',
|
||||
content: 'from a symlink'
|
||||
},
|
||||
{
|
||||
name: 'relative.txt',
|
||||
content: 'from a symlink',
|
||||
symlink: 'real.txt',
|
||||
relative: true
|
||||
},
|
||||
{
|
||||
name: 'absolute.txt',
|
||||
content: 'from a symlink',
|
||||
symlink: 'real.txt',
|
||||
relative: false
|
||||
}
|
||||
],
|
||||
backendIDs: {
|
||||
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
|
||||
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
|
||||
},
|
||||
runtimeToken: 'test-token',
|
||||
resultsServiceURL: 'http://results.local',
|
||||
inputs: {
|
||||
artifactName: 'test-artifact',
|
||||
files: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
}
|
||||
}
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeAll(() => {
|
||||
fs.mkdirSync(fixtures.uploadDirectory, {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
for (const file of fixtures.files) {
|
||||
if (file.symlink) {
|
||||
let symlinkPath = file.symlink
|
||||
if (!file.relative) {
|
||||
symlinkPath = path.join(fixtures.uploadDirectory, file.symlink)
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) {
|
||||
fs.symlinkSync(
|
||||
symlinkPath,
|
||||
path.join(fixtures.uploadDirectory, file.name),
|
||||
'file'
|
||||
)
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(
|
||||
path.join(fixtures.uploadDirectory, file.name),
|
||||
file.content
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIDs)
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue(
|
||||
fixtures.files.map(file => ({
|
||||
sourcePath: path.join(fixtures.uploadDirectory, file.name),
|
||||
destinationPath: file.name,
|
||||
stats: fs.statSync(path.join(fixtures.uploadDirectory, file.name))
|
||||
}))
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue(fixtures.resultsServiceURL)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should reject if there are no files to upload', async () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockClear()
|
||||
.mockReturnValue([])
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
})
|
||||
|
||||
it('should reject if no backend IDs are found', async () => {
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.reject(new Error('boom')))
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should reject if finalize artifact fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', async () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockRestore()
|
||||
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
|
||||
let loadedBytes = 0
|
||||
const uploadedZip = path.join(
|
||||
fixtures.uploadDirectory,
|
||||
'..',
|
||||
'uploaded.zip'
|
||||
)
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress} = options || {}
|
||||
|
||||
if (fs.existsSync(uploadedZip)) {
|
||||
fs.unlinkSync(uploadedZip)
|
||||
}
|
||||
const uploadedZipStream = fs.createWriteStream(uploadedZip)
|
||||
|
||||
onProgress?.({loadedBytes: 0})
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => {
|
||||
loadedBytes += chunk.length
|
||||
uploadedZipStream.write(chunk)
|
||||
onProgress?.({loadedBytes})
|
||||
})
|
||||
stream.on('end', () => {
|
||||
onProgress?.({loadedBytes})
|
||||
uploadedZipStream.end()
|
||||
resolve({})
|
||||
})
|
||||
stream.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const {id, size, digest} = await uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.files.map(file =>
|
||||
path.join(fixtures.uploadDirectory, file.name)
|
||||
),
|
||||
fixtures.uploadDirectory
|
||||
)
|
||||
|
||||
expect(id).toBe(1)
|
||||
expect(size).toBe(loadedBytes)
|
||||
expect(digest).toBeDefined()
|
||||
expect(digest).toHaveLength(64)
|
||||
|
||||
const extractedDirectory = path.join(
|
||||
fixtures.uploadDirectory,
|
||||
'..',
|
||||
'extracted'
|
||||
)
|
||||
if (fs.existsSync(extractedDirectory)) {
|
||||
fs.rmdirSync(extractedDirectory, {recursive: true})
|
||||
}
|
||||
|
||||
const extract = new Promise((resolve, reject) => {
|
||||
fs.createReadStream(uploadedZip)
|
||||
.pipe(unzip.Extract({path: extractedDirectory}))
|
||||
.on('close', () => {
|
||||
resolve(true)
|
||||
})
|
||||
.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
|
||||
await expect(extract).resolves.toBe(true)
|
||||
for (const file of fixtures.files) {
|
||||
const filePath = path.join(extractedDirectory, file.name)
|
||||
expect(fs.existsSync(filePath)).toBe(true)
|
||||
expect(fs.readFileSync(filePath, 'utf8')).toBe(file.content)
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw an error uploading blob chunks get delayed', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
|
||||
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
|
||||
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress, abortSignal} = options || {}
|
||||
onProgress?.({loadedBytes: 0})
|
||||
return new Promise(resolve => {
|
||||
abortSignal?.addEventListener('abort', () => {
|
||||
resolve({})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
|
||||
})
|
||||
})
|
||||
|
|
@ -1,326 +0,0 @@
|
|||
import * as io from '../../io/src/io'
|
||||
import * as path from 'path'
|
||||
import {promises as fs} from 'fs'
|
||||
import {
|
||||
getUploadZipSpecification,
|
||||
validateRootDirectory
|
||||
} from '../src/internal/upload/upload-zip-specification'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
const root = path.join(__dirname, '_temp', 'upload-specification')
|
||||
const goodItem1Path = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'good-item1.txt'
|
||||
)
|
||||
const goodItem2Path = path.join(root, 'folder-d', 'good-item2.txt')
|
||||
const goodItem3Path = path.join(root, 'folder-d', 'good-item3.txt')
|
||||
const goodItem4Path = path.join(root, 'folder-d', 'good-item4.txt')
|
||||
const goodItem5Path = path.join(root, 'good-item5.txt')
|
||||
const badItem1Path = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'bad-item1.txt'
|
||||
)
|
||||
const badItem2Path = path.join(root, 'folder-d', 'bad-item2.txt')
|
||||
const badItem3Path = path.join(root, 'folder-f', 'bad-item3.txt')
|
||||
const badItem4Path = path.join(root, 'folder-h', 'folder-i', 'bad-item4.txt')
|
||||
const badItem5Path = path.join(root, 'folder-h', 'folder-i', 'bad-item5.txt')
|
||||
const extraFileInFolderCPath = path.join(
|
||||
root,
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
const amazingFileInFolderHPath = path.join(root, 'folder-h', 'amazing-item.txt')
|
||||
|
||||
const artifactFilesToUpload = [
|
||||
goodItem1Path,
|
||||
goodItem2Path,
|
||||
goodItem3Path,
|
||||
goodItem4Path,
|
||||
goodItem5Path,
|
||||
extraFileInFolderCPath,
|
||||
amazingFileInFolderHPath
|
||||
]
|
||||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
noopLogs()
|
||||
|
||||
// clear temp directory
|
||||
await io.rmRF(root)
|
||||
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-e'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-d'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-f'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-g'), {
|
||||
recursive: true
|
||||
})
|
||||
await fs.mkdir(path.join(root, 'folder-h', 'folder-i'), {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
await fs.writeFile(goodItem1Path, 'good item1 file')
|
||||
await fs.writeFile(goodItem2Path, 'good item2 file')
|
||||
await fs.writeFile(goodItem3Path, 'good item3 file')
|
||||
await fs.writeFile(goodItem4Path, 'good item4 file')
|
||||
await fs.writeFile(goodItem5Path, 'good item5 file')
|
||||
|
||||
await fs.writeFile(badItem1Path, 'bad item1 file')
|
||||
await fs.writeFile(badItem2Path, 'bad item2 file')
|
||||
await fs.writeFile(badItem3Path, 'bad item3 file')
|
||||
await fs.writeFile(badItem4Path, 'bad item4 file')
|
||||
await fs.writeFile(badItem5Path, 'bad item5 file')
|
||||
|
||||
await fs.writeFile(extraFileInFolderCPath, 'extra file')
|
||||
|
||||
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
||||
/*
|
||||
Directory structure of files that get created:
|
||||
root/
|
||||
folder-a/
|
||||
folder-b/
|
||||
folder-c/
|
||||
good-item1.txt
|
||||
bad-item1.txt
|
||||
extra-file-in-folder-c.txt
|
||||
folder-e/
|
||||
folder-d/
|
||||
good-item2.txt
|
||||
good-item3.txt
|
||||
good-item4.txt
|
||||
bad-item2.txt
|
||||
folder-f/
|
||||
bad-item3.txt
|
||||
folder-g/
|
||||
folder-h/
|
||||
amazing-item.txt
|
||||
folder-i/
|
||||
bad-item4.txt
|
||||
bad-item5.txt
|
||||
good-item5.txt
|
||||
*/
|
||||
})
|
||||
|
||||
it('Upload Specification - Fail non-existent rootDirectory', async () => {
|
||||
const invalidRootDirectory = path.join(
|
||||
__dirname,
|
||||
'_temp',
|
||||
'upload-specification-invalid'
|
||||
)
|
||||
expect(() => {
|
||||
validateRootDirectory(invalidRootDirectory)
|
||||
}).toThrow(
|
||||
`The provided rootDirectory ${invalidRootDirectory} does not exist`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - Fail invalid rootDirectory', async () => {
|
||||
expect(() => {
|
||||
validateRootDirectory(goodItem1Path)
|
||||
}).toThrow(
|
||||
`The provided rootDirectory ${goodItem1Path} is not a valid directory`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - File does not exist', async () => {
|
||||
const fakeFilePath = path.join(
|
||||
'folder-a',
|
||||
'folder-b',
|
||||
'non-existent-file.txt'
|
||||
)
|
||||
expect(() => {
|
||||
getUploadZipSpecification([fakeFilePath], root)
|
||||
}).toThrow(`File ${fakeFilePath} does not exist`)
|
||||
})
|
||||
|
||||
it('Upload Specification - Non parent directory', async () => {
|
||||
const folderADirectory = path.join(root, 'folder-a')
|
||||
const artifactFiles = [
|
||||
goodItem1Path,
|
||||
badItem1Path,
|
||||
extraFileInFolderCPath,
|
||||
goodItem5Path
|
||||
]
|
||||
expect(() => {
|
||||
getUploadZipSpecification(artifactFiles, folderADirectory)
|
||||
}).toThrow(
|
||||
`The rootDirectory: ${folderADirectory} is not a parent directory of the file: ${goodItem5Path}`
|
||||
)
|
||||
})
|
||||
|
||||
it('Upload Specification - Success', async () => {
|
||||
const specifications = getUploadZipSpecification(
|
||||
artifactFilesToUpload,
|
||||
root
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(goodItem5Path)
|
||||
expect(absolutePaths).toContain(extraFileInFolderCPath)
|
||||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem2Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem3Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem4Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem5Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/good-item5.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === extraFileInFolderCPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join(
|
||||
'/folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.sourcePath === amazingFileInFolderHPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Success with extra slash', async () => {
|
||||
const rootWithSlash = `${root}/`
|
||||
const specifications = getUploadZipSpecification(
|
||||
artifactFilesToUpload,
|
||||
rootWithSlash
|
||||
)
|
||||
expect(specifications.length).toEqual(7)
|
||||
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(goodItem2Path)
|
||||
expect(absolutePaths).toContain(goodItem3Path)
|
||||
expect(absolutePaths).toContain(goodItem4Path)
|
||||
expect(absolutePaths).toContain(goodItem5Path)
|
||||
expect(absolutePaths).toContain(extraFileInFolderCPath)
|
||||
expect(absolutePaths).toContain(amazingFileInFolderHPath)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem2Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item2.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem3Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item3.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem4Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-d', 'good-item4.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === goodItem5Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/good-item5.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === extraFileInFolderCPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join(
|
||||
'/folder-a',
|
||||
'folder-b',
|
||||
'folder-c',
|
||||
'extra-file-in-folder-c.txt'
|
||||
)
|
||||
)
|
||||
} else if (specification.sourcePath === amazingFileInFolderHPath) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-h', 'amazing-item.txt')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Empty Directories are included', async () => {
|
||||
const folderEPath = path.join(root, 'folder-a', 'folder-b', 'folder-e')
|
||||
const filesWithDirectory = [goodItem1Path, folderEPath]
|
||||
const specifications = getUploadZipSpecification(filesWithDirectory, root)
|
||||
expect(specifications.length).toEqual(2)
|
||||
const absolutePaths = specifications.map(item => item.sourcePath)
|
||||
expect(absolutePaths).toContain(goodItem1Path)
|
||||
expect(absolutePaths).toContain(null)
|
||||
|
||||
for (const specification of specifications) {
|
||||
if (specification.sourcePath === goodItem1Path) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-c', 'good-item1.txt')
|
||||
)
|
||||
} else if (specification.sourcePath === null) {
|
||||
expect(specification.destinationPath).toEqual(
|
||||
path.join('/folder-a', 'folder-b', 'folder-e')
|
||||
)
|
||||
} else {
|
||||
throw new Error(
|
||||
'Invalid specification found. This should never be reached'
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Includes symlinks', async () => {
|
||||
const targetPath = path.join(root, 'link-dir', 'symlink-me.txt')
|
||||
await fs.mkdir(path.dirname(targetPath), {recursive: true})
|
||||
await fs.writeFile(targetPath, 'symlink file content')
|
||||
|
||||
const uploadPath = path.join(root, 'upload-dir', 'symlink.txt')
|
||||
await fs.mkdir(path.dirname(uploadPath), {recursive: true})
|
||||
await fs.symlink(targetPath, uploadPath, 'file')
|
||||
|
||||
const specifications = getUploadZipSpecification([uploadPath], root)
|
||||
expect(specifications.length).toEqual(1)
|
||||
expect(specifications[0].sourcePath).toEqual(uploadPath)
|
||||
expect(specifications[0].destinationPath).toEqual(
|
||||
path.join('/upload-dir', 'symlink.txt')
|
||||
)
|
||||
expect(specifications[0].stats.isSymbolicLink()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
|
@ -1,219 +0,0 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {maskSigUrl, maskSecretUrls} from '../src/internal/shared/util'
|
||||
import {setSecret, debug} from '@actions/core'
|
||||
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
||||
describe('get-backend-ids-from-token', () => {
|
||||
it('should return backend ids when the token is valid', () => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(testRuntimeToken)
|
||||
|
||||
const backendIds = util.getBackendIdsFromToken()
|
||||
expect(backendIds.workflowRunBackendId).toBe(
|
||||
'ce7f54c7-61c7-4aae-887f-30da475f5f1a'
|
||||
)
|
||||
expect(backendIds.workflowJobRunBackendId).toBe(
|
||||
'ca395085-040a-526b-2ce8-bdc85f692774'
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw an error when the token doesn't have the right scope", () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCIsImlhdCI6MTUxNjIzOTAyMn0.K0IEoULZteGevF38G94xiaA8zcZ5UlKWfGfqE6q3dhw'
|
||||
)
|
||||
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error when the token has a malformed scope', () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhIiwiaWF0IjoxNTE2MjM5MDIyfQ.7D0_LRfRFRZFImHQ7GxH2S6ZyFjjZ5U0ujjGCfle1XE'
|
||||
)
|
||||
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error when the token is in an invalid format', () => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('token')
|
||||
|
||||
expect(util.getBackendIdsFromToken).toThrowError('Invalid token specified')
|
||||
})
|
||||
|
||||
it("should throw an error when the token doesn't have the right field", () => {
|
||||
jest
|
||||
.spyOn(config, 'getRuntimeToken')
|
||||
.mockReturnValue(
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c'
|
||||
)
|
||||
|
||||
expect(util.getBackendIdsFromToken).toThrowError(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
describe('maskSigUrl', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing if no sig parameter is present', () => {
|
||||
const url = 'https://example.com'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('masks the sig parameter in the middle of the URL and sets it as a secret', () => {
|
||||
const url = 'https://example.com/?param1=value1&sig=12345¶m2=value2'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
|
||||
it('does nothing if the URL is empty', () => {
|
||||
const url = ''
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles URLs with fragments', () => {
|
||||
const url = 'https://example.com?sig=12345#fragment'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSigUrl handles special characters in signatures', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('handles signatures with slashes', () => {
|
||||
const url = 'https://example.com/?sig=abc/123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123')
|
||||
})
|
||||
|
||||
it('handles signatures with plus signs', () => {
|
||||
const url = 'https://example.com/?sig=abc+123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc 123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%20123')
|
||||
})
|
||||
|
||||
it('handles signatures with equals signs', () => {
|
||||
const url = 'https://example.com/?sig=abc=123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc=123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%3D123')
|
||||
})
|
||||
|
||||
it('handles already percent-encoded signatures', () => {
|
||||
const url = 'https://example.com/?sig=abc%2F123%3D'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123=')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123%3D')
|
||||
})
|
||||
|
||||
it('handles complex Azure SAS signatures', () => {
|
||||
const url =
|
||||
'https://example.com/container/file.txt?sig=nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D&se=2023-12-31'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj//06Cxt80pBRYiiJlYqtPYg5sz/vEh5iHAhw='
|
||||
)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles signatures with multiple special characters', () => {
|
||||
const url = 'https://example.com/?sig=a/b+c=d&e=f'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('a/b c=d')
|
||||
expect(setSecret).toHaveBeenCalledWith('a%2Fb%20c%3Dd')
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSecretUrls', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('masks sig parameters in signed_upload_url and signed_url', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123',
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where only upload_url is present', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
})
|
||||
|
||||
it('handles case where only download_url is present', () => {
|
||||
const body = {
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where URLs do not contain sig parameters', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?token=abc',
|
||||
signed_url: 'https://download.com?token=xyz'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles empty string URLs', () => {
|
||||
const body = {
|
||||
signed_upload_url: '',
|
||||
signed_url: ''
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if body is not an object or is null', () => {
|
||||
maskSecretUrls(null)
|
||||
expect(debug).toHaveBeenCalledWith('body is not an object or is null')
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if signed_upload_url and signed_url are not strings', () => {
|
||||
const body = {
|
||||
signed_upload_url: 123,
|
||||
signed_url: 456
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
# Frequently Asked Questions
|
||||
|
||||
- [Frequently Asked Questions](#frequently-asked-questions)
|
||||
- [Supported Characters](#supported-characters)
|
||||
- [Compression? ZIP? How is my artifact stored?](#compression-zip-how-is-my-artifact-stored)
|
||||
- [Which versions of the artifacts packages are compatible?](#which-versions-of-the-artifacts-packages-are-compatible)
|
||||
- [How long will my artifact be available?](#how-long-will-my-artifact-be-available)
|
||||
|
||||
## Supported Characters
|
||||
|
||||
When uploading an artifact, the inputted `name` parameter along with the files specified in `files` cannot contain any of the following characters. If they are present in `name` or `files`, the Artifact will be rejected by the server and the upload will fail. These characters are not allowed due to limitations and restrictions with certain file systems such as NTFS. To maintain platform-agnostic behavior, characters that are not supported by an individual filesystem/platform will not be supported on all filesystems/platforms.
|
||||
|
||||
- "
|
||||
- :
|
||||
- <
|
||||
- \>
|
||||
- |
|
||||
- \*
|
||||
- ?
|
||||
|
||||
In addition to the aforementioned characters, the inputted `name` also cannot include the following
|
||||
- \
|
||||
- /
|
||||
|
||||
## Compression? ZIP? How is my artifact stored?
|
||||
|
||||
When creating an Artifact, the files are dynamically compressed and streamed into a ZIP archive. Since they are stored in a ZIP, they can be compressed by Zlib in varying levels.
|
||||
|
||||
The value can range from 0 to 9:
|
||||
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
## Which versions of the artifacts packages are compatible?
|
||||
[actions/upload-artifact](https://github.com/actions/upload-artifact) and [actions/download-artifact](https://github.com/actions/download-artifact), leverage [GitHub Actions toolkit](https://github.com/actions/toolkit) and are typically used together to upload and download artifacts in your workflows.
|
||||
|
||||
| upload-artifact | download-artifact | toolkit |
|
||||
|---|---|---|
|
||||
| v4 | v4 | v2 |
|
||||
| < v3 | < v3 | < v1 |
|
||||
|
||||
Use matching versions of `actions/upload-artifact` and `actions/download-artifact` to ensure compatibility.
|
||||
|
||||
In your GitHub Actions workflow YAML file, you specify the version of the actions you want to use. For example:
|
||||
|
||||
```yaml
|
||||
uses: actions/upload-artifact@v4
|
||||
# ...
|
||||
uses: actions/download-artifact@v4
|
||||
# ...
|
||||
```
|
||||
|
||||
**Release Notes:**
|
||||
Check the release notes for each repository to see if there are any specific notes about compatibility or changes in behavior.
|
||||
|
||||
## How long will my artifact be available?
|
||||
The default retention period is **90 days**. For more information, visit: https://github.com/actions/upload-artifact?tab=readme-ov-file#retention-period
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
@actions/artifact
|
||||
|
||||
# @actions/artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Classes
|
||||
|
||||
- [ArtifactNotFoundError](classes/ArtifactNotFoundError.md)
|
||||
- [DefaultArtifactClient](classes/DefaultArtifactClient.md)
|
||||
- [FilesNotFoundError](classes/FilesNotFoundError.md)
|
||||
- [GHESNotSupportedError](classes/GHESNotSupportedError.md)
|
||||
- [InvalidResponseError](classes/InvalidResponseError.md)
|
||||
- [NetworkError](classes/NetworkError.md)
|
||||
- [UsageError](classes/UsageError.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [Artifact](interfaces/Artifact.md)
|
||||
- [ArtifactClient](interfaces/ArtifactClient.md)
|
||||
- [DeleteArtifactResponse](interfaces/DeleteArtifactResponse.md)
|
||||
- [DownloadArtifactOptions](interfaces/DownloadArtifactOptions.md)
|
||||
- [DownloadArtifactResponse](interfaces/DownloadArtifactResponse.md)
|
||||
- [FindOptions](interfaces/FindOptions.md)
|
||||
- [GetArtifactResponse](interfaces/GetArtifactResponse.md)
|
||||
- [ListArtifactsOptions](interfaces/ListArtifactsOptions.md)
|
||||
- [ListArtifactsResponse](interfaces/ListArtifactsResponse.md)
|
||||
- [UploadArtifactOptions](interfaces/UploadArtifactOptions.md)
|
||||
- [UploadArtifactResponse](interfaces/UploadArtifactResponse.md)
|
||||
|
||||
### Variables
|
||||
|
||||
- [default](README.md#default)
|
||||
|
||||
## Variables
|
||||
|
||||
### default
|
||||
|
||||
• `Const` **default**: [`ArtifactClient`](interfaces/ArtifactClient.md)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)
|
||||
|
|
@ -1,169 +0,0 @@
|
|||
[@actions/artifact](../README.md) / ArtifactNotFoundError
|
||||
|
||||
# Class: ArtifactNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`ArtifactNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](ArtifactNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](ArtifactNotFoundError.md#message)
|
||||
- [name](ArtifactNotFoundError.md#name)
|
||||
- [stack](ArtifactNotFoundError.md#stack)
|
||||
- [prepareStackTrace](ArtifactNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](ArtifactNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](ArtifactNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new ArtifactNotFoundError**(`message?`): [`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'Artifact not found'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`ArtifactNotFoundError`](ArtifactNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
|
@ -1,193 +0,0 @@
|
|||
[@actions/artifact](../README.md) / DefaultArtifactClient
|
||||
|
||||
# Class: DefaultArtifactClient
|
||||
|
||||
The default artifact client that is used by the artifact action(s).
|
||||
|
||||
## Implements
|
||||
|
||||
- [`ArtifactClient`](../interfaces/ArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](DefaultArtifactClient.md#constructor)
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](DefaultArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](DefaultArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](DefaultArtifactClient.md#getartifact)
|
||||
- [listArtifacts](DefaultArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](DefaultArtifactClient.md#uploadartifact)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new DefaultArtifactClient**(): [`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`DefaultArtifactClient`](DefaultArtifactClient.md)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[deleteArtifact](../interfaces/ArtifactClient.md#deleteartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](../interfaces/DownloadArtifactOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[downloadArtifact](../interfaces/ArtifactClient.md#downloadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](../interfaces/GetArtifactResponse.md)\>
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[getArtifact](../interfaces/ArtifactClient.md#getartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](../interfaces/ListArtifactsOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](../interfaces/ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[listArtifacts](../interfaces/ArtifactClient.md#listartifacts)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](../interfaces/UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](../interfaces/UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[uploadArtifact](../interfaces/ArtifactClient.md#uploadartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)
|
||||
|
|
@ -1,180 +0,0 @@
|
|||
[@actions/artifact](../README.md) / FilesNotFoundError
|
||||
|
||||
# Class: FilesNotFoundError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`FilesNotFoundError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](FilesNotFoundError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [files](FilesNotFoundError.md#files)
|
||||
- [message](FilesNotFoundError.md#message)
|
||||
- [name](FilesNotFoundError.md#name)
|
||||
- [stack](FilesNotFoundError.md#stack)
|
||||
- [prepareStackTrace](FilesNotFoundError.md#preparestacktrace)
|
||||
- [stackTraceLimit](FilesNotFoundError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](FilesNotFoundError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new FilesNotFoundError**(`files?`): [`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `files` | `string`[] | `[]` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`FilesNotFoundError`](FilesNotFoundError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
|
||||
## Properties
|
||||
|
||||
### files
|
||||
|
||||
• **files**: `string`[]
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
|
@ -1,169 +0,0 @@
|
|||
[@actions/artifact](../README.md) / GHESNotSupportedError
|
||||
|
||||
# Class: GHESNotSupportedError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`GHESNotSupportedError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](GHESNotSupportedError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](GHESNotSupportedError.md#message)
|
||||
- [name](GHESNotSupportedError.md#name)
|
||||
- [stack](GHESNotSupportedError.md#stack)
|
||||
- [prepareStackTrace](GHESNotSupportedError.md#preparestacktrace)
|
||||
- [stackTraceLimit](GHESNotSupportedError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](GHESNotSupportedError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new GHESNotSupportedError**(`message?`): [`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Default value |
|
||||
| :------ | :------ | :------ |
|
||||
| `message` | `string` | `'@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`GHESNotSupportedError`](GHESNotSupportedError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
|
@ -1,169 +0,0 @@
|
|||
[@actions/artifact](../README.md) / InvalidResponseError
|
||||
|
||||
# Class: InvalidResponseError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`InvalidResponseError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](InvalidResponseError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](InvalidResponseError.md#message)
|
||||
- [name](InvalidResponseError.md#name)
|
||||
- [stack](InvalidResponseError.md#stack)
|
||||
- [prepareStackTrace](InvalidResponseError.md#preparestacktrace)
|
||||
- [stackTraceLimit](InvalidResponseError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](InvalidResponseError.md#capturestacktrace)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new InvalidResponseError**(`message`): [`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `message` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`InvalidResponseError`](InvalidResponseError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
|
@ -1,201 +0,0 @@
|
|||
[@actions/artifact](../README.md) / NetworkError
|
||||
|
||||
# Class: NetworkError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`NetworkError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](NetworkError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [code](NetworkError.md#code)
|
||||
- [message](NetworkError.md#message)
|
||||
- [name](NetworkError.md#name)
|
||||
- [stack](NetworkError.md#stack)
|
||||
- [prepareStackTrace](NetworkError.md#preparestacktrace)
|
||||
- [stackTraceLimit](NetworkError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](NetworkError.md#capturestacktrace)
|
||||
- [isNetworkErrorCode](NetworkError.md#isnetworkerrorcode)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new NetworkError**(`code`): [`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
|
||||
## Properties
|
||||
|
||||
### code
|
||||
|
||||
• **code**: `string`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isNetworkErrorCode
|
||||
|
||||
▸ **isNetworkErrorCode**(`code?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
[@actions/artifact](../README.md) / UsageError
|
||||
|
||||
# Class: UsageError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`UsageError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](UsageError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](UsageError.md#message)
|
||||
- [name](UsageError.md#name)
|
||||
- [stack](UsageError.md#stack)
|
||||
- [prepareStackTrace](UsageError.md#preparestacktrace)
|
||||
- [stackTraceLimit](UsageError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](UsageError.md#capturestacktrace)
|
||||
- [isUsageErrorMessage](UsageError.md#isusageerrormessage)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new UsageError**(): [`UsageError`](UsageError.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`UsageError`](UsageError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isUsageErrorMessage
|
||||
|
||||
▸ **isUsageErrorMessage**(`msg?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `msg?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
[@actions/artifact](../README.md) / Artifact
|
||||
|
||||
# Interface: Artifact
|
||||
|
||||
An Actions Artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [createdAt](Artifact.md#createdat)
|
||||
- [id](Artifact.md#id)
|
||||
- [name](Artifact.md#name)
|
||||
- [size](Artifact.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### createdAt
|
||||
|
||||
• `Optional` **createdAt**: `Date`
|
||||
|
||||
The time when the artifact was created
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The ID of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
The name of the artifact
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• **size**: `number`
|
||||
|
||||
The size of the artifact in bytes
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
[@actions/artifact](../README.md) / ArtifactClient
|
||||
|
||||
# Interface: ArtifactClient
|
||||
|
||||
Generic interface for the artifact client.
|
||||
|
||||
## Implemented by
|
||||
|
||||
- [`DefaultArtifactClient`](../classes/DefaultArtifactClient.md)
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](ArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](ArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](ArtifactClient.md#getartifact)
|
||||
- [listArtifacts](ArtifactClient.md#listartifacts)
|
||||
- [uploadArtifact](ArtifactClient.md#uploadartifact)
|
||||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
Downloads an artifact and unzips the content.
|
||||
|
||||
If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](DownloadArtifactOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
||||
single DownloadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
|
||||
|
||||
___
|
||||
|
||||
### getArtifact
|
||||
|
||||
▸ **getArtifact**(`artifactName`, `options?`): `Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
Finds an artifact by name.
|
||||
If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
If the artifact is not found, it will throw.
|
||||
|
||||
If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
`@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to find |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the get behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`GetArtifactResponse`](GetArtifactResponse.md)\>
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
|
||||
|
||||
___
|
||||
|
||||
### listArtifacts
|
||||
|
||||
▸ **listArtifacts**(`options?`): `Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
Lists all artifacts that are part of the current workflow run.
|
||||
This function will return at most 1000 artifacts per workflow run.
|
||||
|
||||
If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `options?` | [`ListArtifactsOptions`](ListArtifactsOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the list behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`ListArtifactsResponse`](ListArtifactsResponse.md)\>
|
||||
|
||||
ListArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
|
||||
|
||||
___
|
||||
|
||||
### uploadArtifact
|
||||
|
||||
▸ **uploadArtifact**(`name`, `files`, `rootDirectory`, `options?`): `Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
Uploads an artifact.
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `name` | `string` | The name of the artifact, required |
|
||||
| `files` | `string`[] | A list of absolute or relative paths that denote what files should be uploaded |
|
||||
| `rootDirectory` | `string` | An absolute or relative file path that denotes the root parent directory of the files being uploaded |
|
||||
| `options?` | [`UploadArtifactOptions`](UploadArtifactOptions.md) | Extra options for customizing the upload behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`UploadArtifactResponse`](UploadArtifactResponse.md)\>
|
||||
|
||||
single UploadArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
[@actions/artifact](../README.md) / DeleteArtifactResponse
|
||||
|
||||
# Interface: DeleteArtifactResponse
|
||||
|
||||
Response from the server when deleting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](DeleteArtifactResponse.md#id)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The id of the artifact that was deleted
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactOptions
|
||||
|
||||
# Interface: DownloadArtifactOptions
|
||||
|
||||
Options for downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [path](DownloadArtifactOptions.md#path)
|
||||
|
||||
## Properties
|
||||
|
||||
### path
|
||||
|
||||
• `Optional` **path**: `string`
|
||||
|
||||
Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
[@actions/artifact](../README.md) / DownloadArtifactResponse
|
||||
|
||||
# Interface: DownloadArtifactResponse
|
||||
|
||||
Response from the server when downloading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [downloadPath](DownloadArtifactResponse.md#downloadpath)
|
||||
|
||||
## Properties
|
||||
|
||||
### downloadPath
|
||||
|
||||
• `Optional` **downloadPath**: `string`
|
||||
|
||||
The path where the artifact was downloaded to
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
[@actions/artifact](../README.md) / FindOptions
|
||||
|
||||
# Interface: FindOptions
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [findBy](FindOptions.md#findby)
|
||||
|
||||
## Properties
|
||||
|
||||
### findBy
|
||||
|
||||
• `Optional` **findBy**: `Object`
|
||||
|
||||
The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
|
||||
#### Type declaration
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `repositoryName` | `string` | Repository owner (eg. 'toolkit') |
|
||||
| `repositoryOwner` | `string` | Repository owner (eg. 'actions') |
|
||||
| `token` | `string` | Token with actions:read permissions |
|
||||
| `workflowRunId` | `number` | WorkflowRun of the artifact(s) to lookup |
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
[@actions/artifact](../README.md) / GetArtifactResponse
|
||||
|
||||
# Interface: GetArtifactResponse
|
||||
|
||||
Response from the server when getting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifact](GetArtifactResponse.md#artifact)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifact
|
||||
|
||||
• **artifact**: [`Artifact`](Artifact.md)
|
||||
|
||||
Metadata about the artifact that was found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsOptions
|
||||
|
||||
# Interface: ListArtifactsOptions
|
||||
|
||||
Options for listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [latest](ListArtifactsOptions.md#latest)
|
||||
|
||||
## Properties
|
||||
|
||||
### latest
|
||||
|
||||
• `Optional` **latest**: `boolean`
|
||||
|
||||
Filter the workflow run's artifacts to the latest by name
|
||||
In the case of reruns, this can be useful to avoid duplicates
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
[@actions/artifact](../README.md) / ListArtifactsResponse
|
||||
|
||||
# Interface: ListArtifactsResponse
|
||||
|
||||
Response from the server when listing artifacts
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [artifacts](ListArtifactsResponse.md#artifacts)
|
||||
|
||||
## Properties
|
||||
|
||||
### artifacts
|
||||
|
||||
• **artifacts**: [`Artifact`](Artifact.md)[]
|
||||
|
||||
A list of artifacts that were found
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactOptions
|
||||
|
||||
# Interface: UploadArtifactOptions
|
||||
|
||||
Options for uploading an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [compressionLevel](UploadArtifactOptions.md#compressionlevel)
|
||||
- [retentionDays](UploadArtifactOptions.md#retentiondays)
|
||||
|
||||
## Properties
|
||||
|
||||
### compressionLevel
|
||||
|
||||
• `Optional` **compressionLevel**: `number`
|
||||
|
||||
The level of compression for Zlib to be applied to the artifact archive.
|
||||
The value can range from 0 to 9:
|
||||
- 0: No compression
|
||||
- 1: Best speed
|
||||
- 6: Default compression (same as GNU Gzip)
|
||||
- 9: Best compression
|
||||
Higher levels will result in better compression, but will take longer to complete.
|
||||
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
|
||||
|
||||
___
|
||||
|
||||
### retentionDays
|
||||
|
||||
• `Optional` **retentionDays**: `number`
|
||||
|
||||
Duration after which artifact will expire in days.
|
||||
|
||||
By default artifact expires after 90 days:
|
||||
https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
|
||||
Use this option to override the default expiry.
|
||||
|
||||
Min value: 1
|
||||
Max value: 90 unless changed by repository setting
|
||||
|
||||
If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
input of 0 assumes default retention setting.
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
[@actions/artifact](../README.md) / UploadArtifactResponse
|
||||
|
||||
# Interface: UploadArtifactResponse
|
||||
|
||||
Response from the server when an artifact is uploaded
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [digest](UploadArtifactResponse.md#digest)
|
||||
- [id](UploadArtifactResponse.md#id)
|
||||
- [size](UploadArtifactResponse.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### digest
|
||||
|
||||
• `Optional` **digest**: `string`
|
||||
|
||||
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• `Optional` **id**: `number`
|
||||
|
||||
The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
|
||||
___
|
||||
|
||||
### size
|
||||
|
||||
• `Optional` **size**: `number`
|
||||
|
||||
Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "4.0.0",
|
||||
"version": "1.1.1",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
|
@ -30,40 +30,23 @@
|
|||
},
|
||||
"scripts": {
|
||||
"audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json",
|
||||
"test": "cd ../../ && npm run test ./packages/artifact",
|
||||
"bootstrap": "cd ../../ && npm run bootstrap",
|
||||
"tsc-run": "tsc",
|
||||
"tsc": "npm run bootstrap && npm run tsc-run",
|
||||
"gen:docs": "typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@actions/http-client": "^2.1.0",
|
||||
"@azure/core-http": "^3.0.5",
|
||||
"@azure/storage-blob": "^12.15.0",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-request-log": "^1.0.4",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^7.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"unzip-stream": "^0.3.1"
|
||||
"@types/node": "^20.4.5",
|
||||
"archiver": "^5.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/unzip-stream": "^0.3.4",
|
||||
"typedoc": "^0.28.13",
|
||||
"typedoc-plugin-markdown": "^3.17.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
"twirp-ts": "^2.5.0",
|
||||
"typescript": "^3.9.10"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,16 @@
|
|||
import {ArtifactClient, DefaultArtifactClient} from './internal/client'
|
||||
import { ArtifactClient, Client} from './internal/client'
|
||||
import { UploadOptions } from './internal/upload/upload-options'
|
||||
import { UploadResponse } from './internal/upload/upload-response'
|
||||
|
||||
export * from './internal/shared/interfaces'
|
||||
export * from './internal/shared/errors'
|
||||
export * from './internal/client'
|
||||
/**
|
||||
* Exported functionality that we want to expose for any users of @actions/artifact
|
||||
*/
|
||||
export {
|
||||
ArtifactClient,
|
||||
UploadOptions,
|
||||
UploadResponse,
|
||||
}
|
||||
|
||||
const client: ArtifactClient = new DefaultArtifactClient()
|
||||
export default client
|
||||
export function create(): ArtifactClient {
|
||||
return Client.create()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated by protobuf-ts 2.9.1 with parameter client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
|
|
@ -139,7 +139,7 @@ export interface Timestamp {
|
|||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
seconds: bigint;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
|
|
@ -154,7 +154,7 @@ export interface Timestamp {
|
|||
class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
|
|
@ -164,7 +164,7 @@ class Timestamp$Type extends MessageType<Timestamp> {
|
|||
now(): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toBigInt();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
|
|
@ -180,7 +180,7 @@ class Timestamp$Type extends MessageType<Timestamp> {
|
|||
fromDate(date: Date): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toBigInt();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
|
|
@ -223,14 +223,14 @@ class Timestamp$Type extends MessageType<Timestamp> {
|
|||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = PbLong.from(ms / 1000).toString();
|
||||
target.seconds = PbLong.from(ms / 1000).toBigInt();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
const message = { seconds: 0n, nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Timestamp>(this, message, value);
|
||||
|
|
@ -242,7 +242,7 @@ class Timestamp$Type extends MessageType<Timestamp> {
|
|||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
message.seconds = reader.int64().toBigInt();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
|
|
@ -260,7 +260,7 @@ class Timestamp$Type extends MessageType<Timestamp> {
|
|||
}
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
if (message.seconds !== 0n)
|
||||
writer.tag(1, WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated by protobuf-ts 2.9.1 with parameter client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
|
|
@ -96,7 +96,7 @@ export interface Int64Value {
|
|||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
value: bigint;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
|
|
@ -111,7 +111,7 @@ export interface UInt64Value {
|
|||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
value: bigint;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
|
|
@ -316,7 +316,7 @@ export const FloatValue = new FloatValue$Type();
|
|||
class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
|
|
@ -331,11 +331,11 @@ class Int64Value$Type extends MessageType<Int64Value> {
|
|||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.BIGINT, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value {
|
||||
const message = { value: "0" };
|
||||
const message = { value: 0n };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Int64Value>(this, message, value);
|
||||
|
|
@ -347,7 +347,7 @@ class Int64Value$Type extends MessageType<Int64Value> {
|
|||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
message.value = reader.int64().toBigInt();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
|
|
@ -362,7 +362,7 @@ class Int64Value$Type extends MessageType<Int64Value> {
|
|||
}
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
if (message.value !== 0n)
|
||||
writer.tag(1, WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
|
|
@ -378,7 +378,7 @@ export const Int64Value = new Int64Value$Type();
|
|||
class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/, L: 0 /*LongType.BIGINT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
|
|
@ -393,11 +393,11 @@ class UInt64Value$Type extends MessageType<UInt64Value> {
|
|||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.BIGINT, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value {
|
||||
const message = { value: "0" };
|
||||
const message = { value: 0n };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<UInt64Value>(this, message, value);
|
||||
|
|
@ -409,7 +409,7 @@ class UInt64Value$Type extends MessageType<UInt64Value> {
|
|||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
message.value = reader.uint64().toBigInt();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
|
|
@ -424,7 +424,7 @@ class UInt64Value$Type extends MessageType<UInt64Value> {
|
|||
}
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
if (message.value !== 0n)
|
||||
writer.tag(1, WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
export * from './google/protobuf/timestamp'
|
||||
export * from './google/protobuf/wrappers'
|
||||
export * from './results/api/v1/artifact'
|
||||
export * from './results/api/v1/artifact.twirp-client'
|
||||
export * from "../generated/google/protobuf/timestamp";
|
||||
export * from "../generated/google/protobuf/wrappers";
|
||||
export * from "../generated/results/api/v1/artifact";
|
||||
export * from "../generated/results/api/v1/artifact.twirp";
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated by protobuf-ts 2.9.1 with parameter client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
|
|
@ -12,69 +12,8 @@ import type { PartialMessage } from "@protobuf-ts/runtime";
|
|||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export interface MigrateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export interface MigrateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 3;
|
||||
*/
|
||||
size: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
|
|
@ -132,7 +71,7 @@ export interface FinalizeArtifactRequest {
|
|||
/**
|
||||
* @generated from protobuf field: int64 size = 4;
|
||||
*/
|
||||
size: string;
|
||||
size: bigint;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.StringValue hash = 5;
|
||||
*/
|
||||
|
|
@ -146,382 +85,7 @@ export interface FinalizeArtifactResponse {
|
|||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue; // optional
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value; // optional
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* When the artifact was created in the monolith
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
/**
|
||||
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
|
||||
*/
|
||||
digest?: StringValue;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 3; */
|
||||
if (message.expiresAt)
|
||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 3:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 3; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.size);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
|
|
@ -658,12 +222,12 @@ class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest>
|
|||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ },
|
||||
{ no: 5, name: "hash", kind: "message", T: () => StringValue }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeArtifactRequest>): FinalizeArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" };
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: 0n };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeArtifactRequest>(this, message, value);
|
||||
|
|
@ -684,7 +248,7 @@ class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest>
|
|||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 4:
|
||||
message.size = reader.int64().toString();
|
||||
message.size = reader.int64().toBigInt();
|
||||
break;
|
||||
case /* google.protobuf.StringValue hash */ 5:
|
||||
message.hash = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash);
|
||||
|
|
@ -711,7 +275,7 @@ class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest>
|
|||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 4; */
|
||||
if (message.size !== "0")
|
||||
if (message.size !== 0n)
|
||||
writer.tag(4, WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.StringValue hash = 5; */
|
||||
if (message.hash)
|
||||
|
|
@ -730,12 +294,11 @@ export const FinalizeArtifactRequest = new FinalizeArtifactRequest$Type();
|
|||
class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeArtifactResponse>): FinalizeArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
const message = { ok: false };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeArtifactResponse>(this, message, value);
|
||||
|
|
@ -749,9 +312,6 @@ class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse
|
|||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
|
|
@ -767,9 +327,6 @@ class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse
|
|||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
@ -780,442 +337,10 @@ class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse
|
|||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export const FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export const ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export const ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
||||
{ no: 7, name: "digest", kind: "message", T: () => StringValue }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ListArtifactsResponse_MonolithArtifact>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
case /* google.protobuf.StringValue digest */ 7:
|
||||
message.digest = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.StringValue digest = 7; */
|
||||
if (message.digest)
|
||||
StringValue.internalBinaryWrite(message.digest, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export const ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export const GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetSignedArtifactURLResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export const GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export const DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export const DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export const ArtifactService = new ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: CreateArtifactRequest, O: CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse },
|
||||
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
|
||||
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
|
||||
]);
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse }
|
||||
]);
|
||||
|
|
|
|||
|
|
@ -1,232 +0,0 @@
|
|||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,437 @@
|
|||
import {
|
||||
TwirpContext,
|
||||
TwirpServer,
|
||||
RouterEvents,
|
||||
TwirpError,
|
||||
TwirpErrorCode,
|
||||
Interceptor,
|
||||
TwirpContentType,
|
||||
chainInterceptors
|
||||
} from 'twirp-ts'
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
} from './artifact'
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc
|
||||
this.CreateArtifact.bind(this)
|
||||
this.FinalizeArtifact.bind(this)
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
})
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'CreateArtifact',
|
||||
'application/json',
|
||||
data as object
|
||||
)
|
||||
return promise.then(data =>
|
||||
CreateArtifactResponse.fromJson(data as any, {ignoreUnknownFields: true})
|
||||
)
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
})
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'FinalizeArtifact',
|
||||
'application/json',
|
||||
data as object
|
||||
)
|
||||
return promise.then(data =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc
|
||||
this.CreateArtifact.bind(this)
|
||||
this.FinalizeArtifact.bind(this)
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request)
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'CreateArtifact',
|
||||
'application/protobuf',
|
||||
data
|
||||
)
|
||||
return promise.then(data =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
)
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request)
|
||||
const promise = this.rpc.request(
|
||||
'github.actions.results.api.v1.ArtifactService',
|
||||
'FinalizeArtifact',
|
||||
'application/protobuf',
|
||||
data
|
||||
)
|
||||
return promise.then(data =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//==================================//
|
||||
// Server Code //
|
||||
//==================================//
|
||||
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(
|
||||
ctx: T,
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>
|
||||
FinalizeArtifact(
|
||||
ctx: T,
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>
|
||||
}
|
||||
|
||||
export enum ArtifactServiceMethod {
|
||||
CreateArtifact = 'CreateArtifact',
|
||||
FinalizeArtifact = 'FinalizeArtifact'
|
||||
}
|
||||
|
||||
export const ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact
|
||||
]
|
||||
|
||||
export function createArtifactServiceServer<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(service: ArtifactServiceTwirp<T>) {
|
||||
return new TwirpServer<ArtifactServiceTwirp, T>({
|
||||
service,
|
||||
packageName: 'github.actions.results.api.v1',
|
||||
serviceName: 'ArtifactService',
|
||||
methodList: ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute
|
||||
})
|
||||
}
|
||||
|
||||
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||
method: string,
|
||||
events: RouterEvents<T>
|
||||
) {
|
||||
switch (method) {
|
||||
case 'CreateArtifact':
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = {...ctx, methodName: 'CreateArtifact'}
|
||||
await events.onMatch(ctx)
|
||||
return handleArtifactServiceCreateArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
}
|
||||
case 'FinalizeArtifact':
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = {...ctx, methodName: 'FinalizeArtifact'}
|
||||
await events.onMatch(ctx)
|
||||
return handleArtifactServiceFinalizeArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
}
|
||||
default:
|
||||
events.onNotFound()
|
||||
const msg = `no handler found`
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceCreateArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
default:
|
||||
const msg = 'unexpected Content-Type'
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceFinalizeArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
)
|
||||
default:
|
||||
const msg = 'unexpected Content-Type'
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg)
|
||||
}
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest
|
||||
let response: CreateArtifactResponse
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || '{}')
|
||||
request = CreateArtifactRequest.fromJson(body, {ignoreUnknownFields: true})
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the json request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq)
|
||||
})
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!)
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
}) as string
|
||||
)
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest
|
||||
let response: FinalizeArtifactResponse
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || '{}')
|
||||
request = FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true
|
||||
})
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the json request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq)
|
||||
})
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!)
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false
|
||||
}) as string
|
||||
)
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest
|
||||
let response: CreateArtifactResponse
|
||||
|
||||
try {
|
||||
request = CreateArtifactRequest.fromBinary(data)
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the protobuf request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq)
|
||||
})
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!)
|
||||
}
|
||||
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response))
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest
|
||||
let response: FinalizeArtifactResponse
|
||||
|
||||
try {
|
||||
request = FinalizeArtifactRequest.fromBinary(data)
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = 'the protobuf request could not be decoded'
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true)
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq)
|
||||
})
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!)
|
||||
}
|
||||
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response))
|
||||
}
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
import { HttpCodes, HttpClient, HttpClientResponse } from '@actions/http-client'
|
||||
import { BearerCredentialHandler } from '@actions/http-client/lib/auth'
|
||||
import { info } from '@actions/core'
|
||||
import { getRuntimeToken, getResultsServiceUrl, getRetryMultiplier, getInitialRetryIntervalInMilliseconds, getRetryLimit } from './config'
|
||||
|
||||
interface Rpc { request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
}
|
||||
|
||||
export class ArtifactHttpClient implements Rpc {
|
||||
private httpClient: HttpClient
|
||||
private baseUrl: string
|
||||
|
||||
constructor(userAgent: string) {
|
||||
const token = getRuntimeToken()
|
||||
this.httpClient = new HttpClient(userAgent, [
|
||||
new BearerCredentialHandler(token)
|
||||
])
|
||||
this.baseUrl = getResultsServiceUrl()
|
||||
}
|
||||
|
||||
async request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array> {
|
||||
let url = `${this.baseUrl}/twirp/${service}/${method}`
|
||||
let headers = {
|
||||
"Content-Type": contentType
|
||||
}
|
||||
|
||||
const resp = await this.retry(
|
||||
`${method}`,
|
||||
this.httpClient.post(url, JSON.stringify(data), headers),
|
||||
)
|
||||
const body = await resp.readBody()
|
||||
return JSON.parse(body)
|
||||
}
|
||||
|
||||
async retry(name: string, operation: Promise<HttpClientResponse>): Promise<HttpClientResponse> {
|
||||
let response: HttpClientResponse | undefined = undefined
|
||||
let statusCode: number | undefined = undefined
|
||||
let isRetryable = false
|
||||
let errorMessage = ''
|
||||
let attempt = 1
|
||||
const maxAttempts = getRetryLimit()
|
||||
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = await operation
|
||||
statusCode = response.message.statusCode
|
||||
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return response
|
||||
}
|
||||
|
||||
isRetryable = this.isRetryableStatusCode(statusCode)
|
||||
errorMessage = `Artifact service responded with ${statusCode}`
|
||||
} catch (error: any) {
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
if (!isRetryable) {
|
||||
info(`${name} - Error is not retryable`)
|
||||
if (response) {
|
||||
this.displayHttpDiagnostics(response)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
info(
|
||||
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||
)
|
||||
|
||||
await this.sleep(this.getExponentialRetryTimeInMilliseconds(attempt))
|
||||
attempt++
|
||||
}
|
||||
|
||||
if (response) {
|
||||
this.displayHttpDiagnostics(response)
|
||||
}
|
||||
|
||||
throw Error(`${name} failed: ${errorMessage}`)
|
||||
}
|
||||
|
||||
isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
return statusCode >= 200 && statusCode < 300
|
||||
}
|
||||
|
||||
isRetryableStatusCode(statusCode: number | undefined): boolean {
|
||||
if (!statusCode) {
|
||||
return false
|
||||
}
|
||||
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.InternalServerError,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.TooManyRequests,
|
||||
413 // Payload Too Large
|
||||
]
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
}
|
||||
|
||||
displayHttpDiagnostics(response: HttpClientResponse): void {
|
||||
info(
|
||||
`##### Begin Diagnostic HTTP information #####
|
||||
Status Code: ${response.message.statusCode}
|
||||
Status Message: ${response.message.statusMessage}
|
||||
Header Information: ${JSON.stringify(response.message.headers, undefined, 2)}
|
||||
###### End Diagnostic HTTP information ######`
|
||||
)
|
||||
}
|
||||
|
||||
getExponentialRetryTimeInMilliseconds(
|
||||
retryCount: number
|
||||
): number {
|
||||
if (retryCount < 0) {
|
||||
throw new Error('RetryCount should not be negative')
|
||||
} else if (retryCount === 0) {
|
||||
return getInitialRetryIntervalInMilliseconds()
|
||||
}
|
||||
|
||||
const minTime =
|
||||
getInitialRetryIntervalInMilliseconds() * getRetryMultiplier() * retryCount
|
||||
const maxTime = minTime * getRetryMultiplier()
|
||||
|
||||
// returns a random number between the minTime (inclusive) and the maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
|
||||
}
|
||||
|
||||
async sleep(milliseconds: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,284 +1,44 @@
|
|||
import {warning} from '@actions/core'
|
||||
import {isGhes} from './shared/config'
|
||||
import {
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse,
|
||||
DownloadArtifactOptions,
|
||||
GetArtifactResponse,
|
||||
ListArtifactsOptions,
|
||||
ListArtifactsResponse,
|
||||
DownloadArtifactResponse,
|
||||
FindOptions,
|
||||
DeleteArtifactResponse
|
||||
} from './shared/interfaces'
|
||||
import {uploadArtifact} from './upload/upload-artifact'
|
||||
import {
|
||||
downloadArtifactPublic,
|
||||
downloadArtifactInternal
|
||||
} from './download/download-artifact'
|
||||
import {
|
||||
deleteArtifactPublic,
|
||||
deleteArtifactInternal
|
||||
} from './delete/delete-artifact'
|
||||
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
|
||||
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
|
||||
import {GHESNotSupportedError} from './shared/errors'
|
||||
import { UploadOptions } from './upload/upload-options'
|
||||
import { UploadResponse } from './upload/upload-response'
|
||||
import { uploadArtifact } from './upload/upload-artifact'
|
||||
|
||||
/**
|
||||
* Generic interface for the artifact client.
|
||||
*/
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact.
|
||||
* Uploads an artifact
|
||||
*
|
||||
* @param name The name of the artifact, required
|
||||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadArtifactResponse object
|
||||
* @param name the name of the artifact, required
|
||||
* @param files a list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory an absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options extra options for customizing the upload behavior
|
||||
* @returns single UploadInfo object
|
||||
*/
|
||||
uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse>
|
||||
options?: UploadOptions
|
||||
): Promise<UploadResponse>
|
||||
|
||||
/**
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse>
|
||||
|
||||
/**
|
||||
* Finds an artifact by name.
|
||||
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
* If the artifact is not found, it will throw.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse>
|
||||
|
||||
/**
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse>
|
||||
// TODO Download functionality
|
||||
}
|
||||
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
export class DefaultArtifactClient implements ArtifactClient {
|
||||
export class Client implements ArtifactClient {
|
||||
/**
|
||||
* Constructs a Client
|
||||
*/
|
||||
static create(): Client {
|
||||
return new Client()
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads an artifact
|
||||
*/
|
||||
async uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadArtifactOptions
|
||||
): Promise<UploadArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
return uploadArtifact(name, files, rootDirectory, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Artifact upload failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
return uploadArtifact(name, files, rootDirectory, options)
|
||||
}
|
||||
|
||||
async downloadArtifact(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, token},
|
||||
...downloadOptions
|
||||
} = options
|
||||
|
||||
return downloadArtifactPublic(
|
||||
artifactId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
downloadOptions
|
||||
)
|
||||
}
|
||||
|
||||
return downloadArtifactInternal(artifactId, options)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Download Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async listArtifacts(
|
||||
options?: ListArtifactsOptions & FindOptions
|
||||
): Promise<ListArtifactsResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return listArtifactsPublic(
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token,
|
||||
options?.latest
|
||||
)
|
||||
}
|
||||
|
||||
return listArtifactsInternal(options?.latest)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Listing Artifacts failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async getArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<GetArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {workflowRunId, repositoryOwner, repositoryName, token}
|
||||
} = options
|
||||
|
||||
return getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return getArtifactInternal(artifactName)
|
||||
} catch (error: unknown) {
|
||||
warning(
|
||||
`Get Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, workflowRunId, token}
|
||||
} = options
|
||||
|
||||
return deleteArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return deleteArtifactInternal(artifactName)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN environment variable which is required')
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
export function getResultsServiceUrl(): string {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL']
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL environment variable which is required')
|
||||
}
|
||||
return resultsUrl
|
||||
}
|
||||
|
||||
export function getWorkFlowRunId(): string {
|
||||
const workFlowRunId = process.env['GITHUB_RUN_ID']
|
||||
if (!workFlowRunId) {
|
||||
throw new Error('Unable to get the GITHUB_RUN_ID environment variable which is required')
|
||||
}
|
||||
return workFlowRunId
|
||||
}
|
||||
|
||||
export function getWorkSpaceDirectory(): string {
|
||||
const workspaceDirectory = process.env['GITHUB_WORKSPACE']
|
||||
if (!workspaceDirectory) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE environment variable which is required')
|
||||
}
|
||||
return workspaceDirectory
|
||||
}
|
||||
|
||||
export function getRetentionDays(): string | undefined {
|
||||
return process.env['GITHUB_RETENTION_DAYS']
|
||||
}
|
||||
|
||||
export function getInitialRetryIntervalInMilliseconds(): number {
|
||||
return 3000
|
||||
}
|
||||
|
||||
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
|
||||
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
|
||||
export function getRetryMultiplier(): number {
|
||||
return 1.5
|
||||
}
|
||||
|
||||
// The maximum number of retries that can be attempted before an upload or download fails
|
||||
export function getRetryLimit(): number {
|
||||
return 5
|
||||
}
|
||||
|
|
@ -1,109 +0,0 @@
|
|||
import {info, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {DeleteArtifactResponse} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from '../find/retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {
|
||||
DeleteArtifactRequest,
|
||||
ListArtifactsRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {getArtifactPublic} from '../find/get-artifact'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function deleteArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
|
||||
const deleteArtifactResp = await github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
})
|
||||
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${deleteArtifactResp.status} (${deleteArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteArtifactInternal(
|
||||
artifactName
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const listRes = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = listRes.artifacts[0]
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
const req: DeleteArtifactRequest = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
}
|
||||
|
||||
const res = await artifactClient.DeleteArtifact(req)
|
||||
info(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`)
|
||||
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,256 +0,0 @@
|
|||
import fs from 'fs/promises'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
|
||||
import * as github from '@actions/github'
|
||||
import * as core from '@actions/core'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
import unzip from 'unzip-stream'
|
||||
import {
|
||||
DownloadArtifactOptions,
|
||||
DownloadArtifactResponse,
|
||||
StreamExtractResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
GetSignedArtifactURLRequest,
|
||||
Int64Value,
|
||||
ListArtifactsRequest
|
||||
} from '../../generated'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {ArtifactNotFoundError} from '../shared/errors'
|
||||
|
||||
const scrubQueryParameters = (url: string): string => {
|
||||
const parsed = new URL(url)
|
||||
parsed.search = ''
|
||||
return parsed.toString()
|
||||
}
|
||||
|
||||
async function exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(path)
|
||||
return true
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function streamExtract(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<StreamExtractResponse> {
|
||||
let retryCount = 0
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
return await streamExtractExternal(url, directory)
|
||||
} catch (error) {
|
||||
retryCount++
|
||||
core.debug(
|
||||
`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`
|
||||
)
|
||||
// wait 5 seconds before retrying
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Artifact download failed after ${retryCount} retries.`)
|
||||
}
|
||||
|
||||
export async function streamExtractExternal(
|
||||
url: string,
|
||||
directory: string,
|
||||
opts: {timeout: number} = {timeout: 30 * 1000}
|
||||
): Promise<StreamExtractResponse> {
|
||||
const client = new httpClient.HttpClient(getUserAgentString())
|
||||
const response = await client.get(url)
|
||||
if (response.message.statusCode !== 200) {
|
||||
throw new Error(
|
||||
`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
let sha256Digest: string | undefined = undefined
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = (): void => {
|
||||
const timeoutError = new Error(
|
||||
`Blob storage chunk did not respond in ${opts.timeout}ms`
|
||||
)
|
||||
response.message.destroy(timeoutError)
|
||||
reject(timeoutError)
|
||||
}
|
||||
const timer = setTimeout(timerFn, opts.timeout)
|
||||
|
||||
const hashStream = crypto.createHash('sha256').setEncoding('hex')
|
||||
const passThrough = new stream.PassThrough()
|
||||
|
||||
response.message.pipe(passThrough)
|
||||
passThrough.pipe(hashStream)
|
||||
const extractStream = passThrough
|
||||
|
||||
extractStream
|
||||
.on('data', () => {
|
||||
timer.refresh()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
core.debug(
|
||||
`response.message: Artifact download failed: ${error.message}`
|
||||
)
|
||||
clearTimeout(timer)
|
||||
reject(error)
|
||||
})
|
||||
.pipe(unzip.Extract({path: directory}))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer)
|
||||
if (hashStream) {
|
||||
hashStream.end()
|
||||
sha256Digest = hashStream.read() as string
|
||||
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`)
|
||||
}
|
||||
resolve({sha256Digest: `sha256:${sha256Digest}`})
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function downloadArtifactPublic(
|
||||
artifactId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
core.info(
|
||||
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
||||
)
|
||||
|
||||
const {headers, status} = await api.rest.actions.downloadArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: artifactId,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
if (status !== 302) {
|
||||
throw new Error(`Unable to download artifact. Unexpected status: ${status}`)
|
||||
}
|
||||
|
||||
const {location} = headers
|
||||
if (!location) {
|
||||
throw new Error(`Unable to redirect to artifact download url`)
|
||||
}
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(location)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
const extractResponse = await streamExtract(location, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath, digestMismatch}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
artifactId: number,
|
||||
options?: DownloadArtifactOptions
|
||||
): Promise<DownloadArtifactResponse> {
|
||||
const downloadPath = await resolveOrCreateDirectory(options?.path)
|
||||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: Int64Value.create({value: artifactId.toString()})
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`
|
||||
)
|
||||
}
|
||||
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.')
|
||||
}
|
||||
|
||||
const signedReq: GetSignedArtifactURLRequest = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
}
|
||||
|
||||
const {signedUrl} = await artifactClient.GetSignedArtifactURL(signedReq)
|
||||
|
||||
core.info(
|
||||
`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`
|
||||
)
|
||||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
const extractResponse = await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath, digestMismatch}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
downloadPath = getGitHubWorkspaceDir()
|
||||
): Promise<string> {
|
||||
if (!(await exists(downloadPath))) {
|
||||
core.debug(
|
||||
`Artifact destination folder does not exist, creating: ${downloadPath}`
|
||||
)
|
||||
await fs.mkdir(downloadPath, {recursive: true})
|
||||
} else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`)
|
||||
}
|
||||
|
||||
return downloadPath
|
||||
}
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
import {getOctokit} from '@actions/github'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {GetArtifactResponse} from '../shared/interfaces'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {ListArtifactsRequest, StringValue, Timestamp} from '../../generated'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function getArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
name: artifactName
|
||||
}
|
||||
)
|
||||
|
||||
if (getArtifactResp.status !== 200) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${getArtifactResp.status} (${getArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = getArtifactResp.data.artifacts[0]
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.id})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: artifact.digest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getArtifactInternal(
|
||||
artifactName: string
|
||||
): Promise<GetArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = res.artifacts[0]
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
core.debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,187 +0,0 @@
|
|||
import {info, warning, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {ListArtifactsResponse, Artifact} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from './retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getMaxArtifactListCount} from '../shared/config'
|
||||
import {ListArtifactsRequest, Timestamp} from '../../generated'
|
||||
|
||||
const maximumArtifactCount = getMaxArtifactListCount()
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = Math.ceil(maximumArtifactCount / paginationCount)
|
||||
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string,
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
info(
|
||||
`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`
|
||||
)
|
||||
|
||||
let artifacts: Artifact[] = []
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
let currentPageNumber = 1
|
||||
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
}
|
||||
)
|
||||
|
||||
let numberOfPages = Math.ceil(
|
||||
listArtifactResponse.total_count / paginationCount
|
||||
)
|
||||
const totalArtifactCount = listArtifactResponse.total_count
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
warning(
|
||||
`Workflow run ${workflowRunId} has ${totalArtifactCount} artifacts, exceeding the limit of ${maximumArtifactCount}. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
)
|
||||
numberOfPages = maxNumberOfPages
|
||||
}
|
||||
|
||||
// Iterate over the first page
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
})
|
||||
}
|
||||
// Move to the next page
|
||||
currentPageNumber++
|
||||
// Iterate over any remaining pages
|
||||
for (
|
||||
currentPageNumber;
|
||||
currentPageNumber <= numberOfPages;
|
||||
currentPageNumber++
|
||||
) {
|
||||
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
||||
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
}
|
||||
)
|
||||
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
export async function listArtifactsInternal(
|
||||
latest = false
|
||||
): Promise<ListArtifactsResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const req: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
}
|
||||
|
||||
const res = await artifactClient.ListArtifacts(req)
|
||||
let artifacts: Artifact[] = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
}))
|
||||
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts)
|
||||
}
|
||||
|
||||
info(`Found ${artifacts.length} artifact(s)`)
|
||||
|
||||
return {
|
||||
artifacts
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This exists so that we don't have to use 'any' when receiving the artifact list from the GitHub API.
|
||||
* The digest field is not present in OpenAPI/types at time of writing, which necessitates this change.
|
||||
*/
|
||||
interface ArtifactResponse {
|
||||
name: string
|
||||
id: number
|
||||
size_in_bytes: number
|
||||
created_at?: string
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts: Artifact[]): Artifact[] {
|
||||
artifacts.sort((a, b) => b.id - a.id)
|
||||
const latestArtifacts: Artifact[] = []
|
||||
const seenArtifactNames = new Set<string>()
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact)
|
||||
seenArtifactNames.add(artifact.name)
|
||||
}
|
||||
}
|
||||
return latestArtifacts
|
||||
}
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {RequestRequestOptions} from '@octokit/types'
|
||||
|
||||
export type RetryOptions = {
|
||||
doNotRetry?: number[]
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
// Defaults for fetching artifacts
|
||||
const defaultMaxRetryNumber = 5
|
||||
const defaultExemptStatusCodes = [400, 401, 403, 404, 422] // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14
|
||||
|
||||
export function getRetryOptions(
|
||||
defaultOptions: OctokitOptions,
|
||||
retries: number = defaultMaxRetryNumber,
|
||||
exemptStatusCodes: number[] = defaultExemptStatusCodes
|
||||
): [RetryOptions, RequestRequestOptions | undefined] {
|
||||
if (retries <= 0) {
|
||||
return [{enabled: false}, defaultOptions.request]
|
||||
}
|
||||
|
||||
const retryOptions: RetryOptions = {
|
||||
enabled: true
|
||||
}
|
||||
|
||||
if (exemptStatusCodes.length > 0) {
|
||||
retryOptions.doNotRetry = exemptStatusCodes
|
||||
}
|
||||
|
||||
// The GitHub type has some defaults for `options.request`
|
||||
// see: https://github.com/actions/toolkit/blob/4fbc5c941a57249b19562015edbd72add14be93d/packages/github/src/utils.ts#L15
|
||||
// We pass these in here so they are not overridden.
|
||||
const requestOptions: RequestRequestOptions = {
|
||||
...defaultOptions.request,
|
||||
retries
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`GitHub client configured with: (retries: ${
|
||||
requestOptions.retries
|
||||
}, retry-exempt-status-code: ${
|
||||
retryOptions.doNotRetry ?? 'octokit default: [400, 401, 403, 404, 422]'
|
||||
})`
|
||||
)
|
||||
|
||||
return [retryOptions, requestOptions]
|
||||
}
|
||||
|
|
@ -1,198 +0,0 @@
|
|||
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
||||
import {info, debug} from '@actions/core'
|
||||
import {ArtifactServiceClientJSON} from '../../generated'
|
||||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
import {maskSecretUrls} from './util'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
}
|
||||
|
||||
class ArtifactHttpClient implements Rpc {
|
||||
private httpClient: HttpClient
|
||||
private baseUrl: string
|
||||
private maxAttempts = 5
|
||||
private baseRetryIntervalMilliseconds = 3000
|
||||
private retryMultiplier = 1.5
|
||||
|
||||
constructor(
|
||||
userAgent: string,
|
||||
maxAttempts?: number,
|
||||
baseRetryIntervalMilliseconds?: number,
|
||||
retryMultiplier?: number
|
||||
) {
|
||||
const token = getRuntimeToken()
|
||||
this.baseUrl = getResultsServiceUrl()
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier
|
||||
}
|
||||
|
||||
this.httpClient = new HttpClient(userAgent, [
|
||||
new BearerCredentialHandler(token)
|
||||
])
|
||||
}
|
||||
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
async request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array> {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||
debug(`[Request] ${method} ${url}`)
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
}
|
||||
try {
|
||||
const {body} = await this.retryableRequest(async () =>
|
||||
this.httpClient.post(url, JSON.stringify(data), headers)
|
||||
)
|
||||
|
||||
return body
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async retryableRequest(
|
||||
operation: () => Promise<HttpClientResponse>
|
||||
): Promise<{response: HttpClientResponse; body: object}> {
|
||||
let attempt = 0
|
||||
let errorMessage = ''
|
||||
let rawBody = ''
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false
|
||||
|
||||
try {
|
||||
const response = await operation()
|
||||
const statusCode = response.message.statusCode
|
||||
rawBody = await response.readBody()
|
||||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
maskSecretUrls(body)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return {response, body}
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode)
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
|
||||
if (body.msg) {
|
||||
if (UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new UsageError()
|
||||
}
|
||||
|
||||
errorMessage = `${errorMessage}: ${body.msg}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`)
|
||||
}
|
||||
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(
|
||||
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
const retryTimeMilliseconds =
|
||||
this.getExponentialRetryTimeMilliseconds(attempt)
|
||||
info(
|
||||
`Attempt ${attempt + 1} of ${
|
||||
this.maxAttempts
|
||||
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
|
||||
)
|
||||
await this.sleep(retryTimeMilliseconds)
|
||||
attempt++
|
||||
}
|
||||
|
||||
throw new Error(`Request failed`)
|
||||
}
|
||||
|
||||
isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
return statusCode >= 200 && statusCode < 300
|
||||
}
|
||||
|
||||
isRetryableHttpStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.InternalServerError,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.TooManyRequests
|
||||
]
|
||||
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
}
|
||||
|
||||
async sleep(milliseconds: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||
}
|
||||
|
||||
getExponentialRetryTimeMilliseconds(attempt: number): number {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer')
|
||||
}
|
||||
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds
|
||||
}
|
||||
|
||||
const minTime =
|
||||
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
|
||||
const maxTime = minTime * this.retryMultiplier
|
||||
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
|
||||
}
|
||||
}
|
||||
|
||||
export function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number
|
||||
retryIntervalMs?: number
|
||||
retryMultiplier?: number
|
||||
}): ArtifactServiceClientJSON {
|
||||
const client = new ArtifactHttpClient(
|
||||
getUserAgentString(),
|
||||
options?.maxAttempts,
|
||||
options?.retryIntervalMs,
|
||||
options?.retryMultiplier
|
||||
)
|
||||
return new ArtifactServiceClientJSON(client)
|
||||
}
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
import os from 'os'
|
||||
import {info} from '@actions/core'
|
||||
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
export function getUploadChunkSize(): number {
|
||||
return 8 * 1024 * 1024 // 8 MB Chunks
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
|
||||
}
|
||||
return token
|
||||
}
|
||||
|
||||
export function getResultsServiceUrl(): string {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL']
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable')
|
||||
}
|
||||
|
||||
return new URL(resultsUrl).origin
|
||||
}
|
||||
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost = hostname.endsWith('.GHE.COM')
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost
|
||||
}
|
||||
|
||||
export function getGitHubWorkspaceDir(): string {
|
||||
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE']
|
||||
if (!ghWorkspaceDir) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE env variable')
|
||||
}
|
||||
return ghWorkspaceDir
|
||||
}
|
||||
|
||||
// The maximum value of concurrency is 300.
|
||||
// This value can be changed with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
|
||||
export function getConcurrency(): number {
|
||||
const numCPUs = os.cpus().length
|
||||
let concurrencyCap = 32
|
||||
|
||||
if (numCPUs > 4) {
|
||||
const concurrency = 16 * numCPUs
|
||||
concurrencyCap = concurrency > 300 ? 300 : concurrency
|
||||
}
|
||||
|
||||
const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
|
||||
if (concurrencyOverride) {
|
||||
const concurrency = parseInt(concurrencyOverride)
|
||||
if (isNaN(concurrency) || concurrency < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
|
||||
)
|
||||
}
|
||||
|
||||
if (concurrency < concurrencyCap) {
|
||||
info(
|
||||
`Set concurrency based on the value set in ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY.`
|
||||
)
|
||||
return concurrency
|
||||
}
|
||||
|
||||
info(
|
||||
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Set it to the maximum value allowed.`
|
||||
)
|
||||
return concurrencyCap
|
||||
}
|
||||
|
||||
// default concurrency to 5
|
||||
return 5
|
||||
}
|
||||
|
||||
export function getUploadChunkTimeout(): number {
|
||||
const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
|
||||
if (!timeoutVar) {
|
||||
return 300000 // 5 minutes
|
||||
}
|
||||
|
||||
const timeout = parseInt(timeoutVar)
|
||||
if (isNaN(timeout)) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return timeout
|
||||
}
|
||||
|
||||
// This value can be changed with ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT variable.
|
||||
// Defaults to 1000 as a safeguard for rate limiting.
|
||||
export function getMaxArtifactListCount(): number {
|
||||
const maxCountVar =
|
||||
process.env['ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT'] || '1000'
|
||||
|
||||
const maxCount = parseInt(maxCountVar)
|
||||
if (isNaN(maxCount) || maxCount < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return maxCount
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
export class FilesNotFoundError extends Error {
|
||||
files: string[]
|
||||
|
||||
constructor(files: string[] = []) {
|
||||
let message = 'No files were found to upload'
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`
|
||||
}
|
||||
|
||||
super(message)
|
||||
this.files = files
|
||||
this.name = 'FilesNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidResponseError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
this.name = 'InvalidResponseError'
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message)
|
||||
this.name = 'ArtifactNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class GHESNotSupportedError extends Error {
|
||||
constructor(
|
||||
message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.'
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'GHESNotSupportedError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NetworkError extends Error {
|
||||
code: string
|
||||
|
||||
constructor(code: string) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
|
||||
super(message)
|
||||
this.code = code
|
||||
this.name = 'NetworkError'
|
||||
}
|
||||
|
||||
static isNetworkErrorCode = (code?: string): boolean => {
|
||||
if (!code) return false
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code)
|
||||
}
|
||||
}
|
||||
|
||||
export class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
|
||||
super(message)
|
||||
this.name = 'UsageError'
|
||||
}
|
||||
|
||||
static isUsageErrorMessage = (msg?: string): boolean => {
|
||||
if (!msg) return false
|
||||
return msg.includes('insufficient usage')
|
||||
}
|
||||
}
|
||||
|
|
@ -1,188 +0,0 @@
|
|||
/**
|
||||
* Response from the server when an artifact is uploaded
|
||||
*/
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
*/
|
||||
size?: number
|
||||
|
||||
/**
|
||||
* The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number
|
||||
|
||||
/**
|
||||
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for uploading an artifact
|
||||
*/
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
* The value can range from 0 to 9:
|
||||
* - 0: No compression
|
||||
* - 1: Best speed
|
||||
* - 6: Default compression (same as GNU Gzip)
|
||||
* - 9: Best compression
|
||||
* Higher levels will result in better compression, but will take longer to complete.
|
||||
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
*/
|
||||
compressionLevel?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when getting an artifact
|
||||
*/
|
||||
export interface GetArtifactResponse {
|
||||
/**
|
||||
* Metadata about the artifact that was found
|
||||
*/
|
||||
artifact: Artifact
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsOptions {
|
||||
/**
|
||||
* Filter the workflow run's artifacts to the latest by name
|
||||
* In the case of reruns, this can be useful to avoid duplicates
|
||||
*/
|
||||
latest?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* A list of artifacts that were found
|
||||
*/
|
||||
artifacts: Artifact[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactResponse {
|
||||
/**
|
||||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string
|
||||
|
||||
/**
|
||||
* Returns true if the digest of the downloaded artifact does not match the expected hash
|
||||
*/
|
||||
digestMismatch?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactOptions {
|
||||
/**
|
||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string
|
||||
|
||||
/**
|
||||
* The hash that was computed for the artifact during upload. If provided, the outcome of the download
|
||||
* will provide a digestMismatch property indicating whether the hash of the downloaded artifact
|
||||
* matches the expected hash.
|
||||
*/
|
||||
expectedHash?: string
|
||||
}
|
||||
|
||||
export interface StreamExtractResponse {
|
||||
/**
|
||||
* The SHA256 hash of the downloaded file
|
||||
*/
|
||||
sha256Digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* An Actions Artifact
|
||||
*/
|
||||
export interface Artifact {
|
||||
/**
|
||||
* The name of the artifact
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The ID of the artifact
|
||||
*/
|
||||
id: number
|
||||
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date
|
||||
|
||||
/**
|
||||
* The digest of the artifact, computed at time of upload.
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
export interface FindOptions {
|
||||
/**
|
||||
* The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
*/
|
||||
findBy?: {
|
||||
/**
|
||||
* Token with actions:read permissions
|
||||
*/
|
||||
token: string
|
||||
/**
|
||||
* WorkflowRun of the artifact(s) to lookup
|
||||
*/
|
||||
workflowRunId: number
|
||||
/**
|
||||
* Repository owner (eg. 'actions')
|
||||
*/
|
||||
repositoryOwner: string
|
||||
/**
|
||||
* Repository owner (eg. 'toolkit')
|
||||
*/
|
||||
repositoryName: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json')
|
||||
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export function getUserAgentString(): string {
|
||||
return `@actions/artifact-${packageJson.version}`
|
||||
}
|
||||
|
|
@ -1,145 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
import {debug, setSecret} from '@actions/core'
|
||||
|
||||
export interface BackendIds {
|
||||
workflowRunBackendId: string
|
||||
workflowJobRunBackendId: string
|
||||
}
|
||||
|
||||
interface ActionsToken {
|
||||
scp: string
|
||||
}
|
||||
|
||||
const InvalidJwtError = new Error(
|
||||
'Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'
|
||||
)
|
||||
|
||||
// uses the JWT token claims to get the
|
||||
// workflow run and workflow job run backend ids
|
||||
export function getBackendIdsFromToken(): BackendIds {
|
||||
const token = getRuntimeToken()
|
||||
const decoded = jwt_decode<ActionsToken>(token)
|
||||
if (!decoded.scp) {
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
/*
|
||||
* example decoded:
|
||||
* {
|
||||
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
|
||||
* }
|
||||
*/
|
||||
|
||||
const scpParts = decoded.scp.split(' ')
|
||||
if (scpParts.length === 0) {
|
||||
throw InvalidJwtError
|
||||
}
|
||||
/*
|
||||
* example scpParts:
|
||||
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':')
|
||||
if (scopeParts?.[0] !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue
|
||||
}
|
||||
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
}
|
||||
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`)
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`)
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||
*
|
||||
* @param url - The URL containing the signature parameter to mask
|
||||
* @remarks
|
||||
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||
*
|
||||
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Mask a signature in an Azure SAS token URL
|
||||
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||
* ```
|
||||
*/
|
||||
export function maskSigUrl(url: string): void {
|
||||
if (!url) return
|
||||
try {
|
||||
const parsedUrl = new URL(url)
|
||||
const signature = parsedUrl.searchParams.get('sig')
|
||||
if (signature) {
|
||||
setSecret(signature)
|
||||
setSecret(encodeURIComponent(signature))
|
||||
}
|
||||
} catch (error) {
|
||||
debug(
|
||||
`Failed to parse URL: ${url} ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks sensitive information in URLs containing signature parameters.
|
||||
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||
* and 'signed_download_url' properties of the provided object.
|
||||
*
|
||||
* @param body - The object should contain a signature
|
||||
* @remarks
|
||||
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||
* on each one to redact sensitive signature information. The function doesn't
|
||||
* modify the original object; it only marks the signatures as secrets for
|
||||
* logging purposes.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const responseBody = {
|
||||
* signed_upload_url: 'https://example.com?sig=abc123',
|
||||
* signed_download_url: 'https://example.com?sig=def456'
|
||||
* };
|
||||
* maskSecretUrls(responseBody);
|
||||
* ```
|
||||
*/
|
||||
export function maskSecretUrls(body: Record<string, unknown> | null): void {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
debug('body is not an object or is null')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
'signed_upload_url' in body &&
|
||||
typeof body.signed_upload_url === 'string'
|
||||
) {
|
||||
maskSigUrl(body.signed_upload_url)
|
||||
}
|
||||
if ('signed_url' in body && typeof body.signed_url === 'string') {
|
||||
maskSigUrl(body.signed_url)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/core-http-compat'
|
||||
import {ZipUploadStream} from './zip'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
getConcurrency,
|
||||
getUploadChunkTimeout
|
||||
} from '../shared/config'
|
||||
import * as core from '@actions/core'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
import {NetworkError} from '../shared/errors'
|
||||
|
||||
export interface BlobUploadResponse {
|
||||
/**
|
||||
* The total reported upload size in bytes. Empty if the upload failed
|
||||
*/
|
||||
uploadSize?: number
|
||||
|
||||
/**
|
||||
* The SHA256 hash of the uploaded file. Empty if the upload failed
|
||||
*/
|
||||
sha256Hash?: string
|
||||
}
|
||||
|
||||
export async function uploadZipToBlobStorage(
|
||||
authenticatedUploadURL: string,
|
||||
zipUploadStream: ZipUploadStream
|
||||
): Promise<BlobUploadResponse> {
|
||||
let uploadByteCount = 0
|
||||
let lastProgressTime = Date.now()
|
||||
const abortController = new AbortController()
|
||||
|
||||
const chunkTimer = async (interval: number): Promise<void> =>
|
||||
new Promise((resolve, reject) => {
|
||||
const timer = setInterval(() => {
|
||||
if (Date.now() - lastProgressTime > interval) {
|
||||
reject(new Error('Upload progress stalled.'))
|
||||
}
|
||||
}, interval)
|
||||
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(timer)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
const maxConcurrency = getConcurrency()
|
||||
const bufferSize = getUploadChunkSize()
|
||||
const blobClient = new BlobClient(authenticatedUploadURL)
|
||||
const blockBlobClient = blobClient.getBlockBlobClient()
|
||||
|
||||
core.debug(
|
||||
`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`
|
||||
)
|
||||
|
||||
const uploadCallback = (progress: TransferProgressEvent): void => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
lastProgressTime = Date.now()
|
||||
}
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
blobHTTPHeaders: {blobContentType: 'zip'},
|
||||
onProgress: uploadCallback,
|
||||
abortSignal: abortController.signal
|
||||
}
|
||||
|
||||
let sha256Hash: string | undefined = undefined
|
||||
const uploadStream = new stream.PassThrough()
|
||||
const hashStream = crypto.createHash('sha256')
|
||||
|
||||
zipUploadStream.pipe(uploadStream) // This stream is used for the upload
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex') // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
|
||||
core.info('Beginning upload of artifact content to blob storage')
|
||||
|
||||
try {
|
||||
await Promise.race([
|
||||
blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxConcurrency,
|
||||
options
|
||||
),
|
||||
chunkTimer(getUploadChunkTimeout())
|
||||
])
|
||||
} catch (error) {
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
abortController.abort()
|
||||
}
|
||||
|
||||
core.info('Finished uploading artifact content to blob storage!')
|
||||
|
||||
hashStream.end()
|
||||
sha256Hash = hashStream.read() as string
|
||||
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`)
|
||||
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(
|
||||
`No data was uploaded to blob storage. Reported upload byte count is 0.`
|
||||
)
|
||||
}
|
||||
return {
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
}
|
||||
}
|
||||
|
|
@ -27,11 +27,11 @@ const invalidArtifactNameCharacters = new Map<string, string>([
|
|||
])
|
||||
|
||||
/**
|
||||
* Validates the name of the artifact to check to make sure there are no illegal characters
|
||||
* Scans the name of the artifact to make sure there are no illegal characters
|
||||
*/
|
||||
export function validateArtifactName(name: string): void {
|
||||
export function checkArtifactName(name: string): void {
|
||||
if (!name) {
|
||||
throw new Error(`Provided artifact name input during validation is empty`)
|
||||
throw new Error(`Artifact name: ${name}, is incorrectly provided`)
|
||||
}
|
||||
|
||||
for (const [
|
||||
|
|
@ -40,7 +40,7 @@ export function validateArtifactName(name: string): void {
|
|||
] of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidCharacterKey)) {
|
||||
throw new Error(
|
||||
`The artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter}
|
||||
`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(
|
||||
invalidArtifactNameCharacters.values()
|
||||
|
|
@ -55,11 +55,11 @@ These characters are not allowed in the artifact name due to limitations with ce
|
|||
}
|
||||
|
||||
/**
|
||||
* Validates file paths to check for any illegal characters that can cause problems on different file systems
|
||||
* Scans the name of the filePath used to make sure there are no illegal characters
|
||||
*/
|
||||
export function validateFilePath(path: string): void {
|
||||
export function checkArtifactFilePath(path: string): void {
|
||||
if (!path) {
|
||||
throw new Error(`Provided file path input during validation is empty`)
|
||||
throw new Error(`Artifact path: ${path}, is incorrectly provided`)
|
||||
}
|
||||
|
||||
for (const [
|
||||
|
|
@ -68,7 +68,7 @@ export function validateFilePath(path: string): void {
|
|||
] of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidCharacterKey)) {
|
||||
throw new Error(
|
||||
`The path for one of the files in artifact is not valid: ${path}. Contains the following character: ${errorMessageForCharacter}
|
||||
`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(
|
||||
invalidArtifactFilePathCharacters.values()
|
||||
|
|
@ -79,4 +79,4 @@ The following characters are not allowed in files that are uploaded due to limit
|
|||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import {Timestamp} from '../../generated'
|
||||
import * as core from '@actions/core'
|
||||
|
||||
export function getExpiration(retentionDays?: number): Timestamp | undefined {
|
||||
if (!retentionDays) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const maxRetentionDays = getRetentionDays()
|
||||
if (maxRetentionDays && maxRetentionDays < retentionDays) {
|
||||
core.warning(
|
||||
`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`
|
||||
)
|
||||
retentionDays = maxRetentionDays
|
||||
}
|
||||
|
||||
const expirationDate = new Date()
|
||||
expirationDate.setDate(expirationDate.getDate() + retentionDays)
|
||||
|
||||
return Timestamp.fromDate(expirationDate)
|
||||
}
|
||||
|
||||
function getRetentionDays(): number | undefined {
|
||||
const retentionDays = process.env['GITHUB_RETENTION_DAYS']
|
||||
if (!retentionDays) {
|
||||
return undefined
|
||||
}
|
||||
const days = parseInt(retentionDays)
|
||||
if (isNaN(days)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return days
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
hello there! This is from a.txt
|
||||
|
|
@ -0,0 +1 @@
|
|||
This is from b.txt
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import { TransferProgressEvent } from '@azure/core-http';
|
||||
import * as a from 'archiver'
|
||||
import * as fs from 'fs'
|
||||
import * as stream from 'stream'
|
||||
|
||||
const bufferSize = 1024 * 1024 * 8 // 8 MB
|
||||
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
// See https://github.com/nodejs/node/issues/8855
|
||||
export class ZipUploadStream extends stream.Transform {
|
||||
constructor(bufferSize: number) {
|
||||
super({
|
||||
highWaterMark: bufferSize
|
||||
})
|
||||
}
|
||||
|
||||
_transform(chunk:any, enc:any, cb:any) {
|
||||
cb(null, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// for local testing, run this using ts-node testing.ts
|
||||
export async function test(){
|
||||
let sasURL = "paste here"
|
||||
sasURL = sasURL.replace("http://devstoreaccount1.blob.codedev.localhost", "http://127.0.0.1:11000/devstoreaccount1")
|
||||
|
||||
const blobClient = new BlobClient(sasURL);
|
||||
const zip = a.create('zip', {
|
||||
zlib: { level: 9 } // Sets the compression level.
|
||||
// Available options are 0-9
|
||||
// 0 => no compression
|
||||
// 1 => fastest with low compression
|
||||
// 9 => highest compression ratio but the slowest
|
||||
});
|
||||
|
||||
// append files that are going to be part of the final zip
|
||||
zip.append('this is file 1', { name: 'file1.txt' });
|
||||
zip.append('this is file 2', { name: 'file2.txt' });
|
||||
zip.append('this is file 1 in a directory', { name: 'dir/file1.txt' });
|
||||
zip.append('this is file 2 in a directory', { name: 'dir/file2.txt' });
|
||||
zip.append('this is a live demo!!!', { name: 'dir/alive.txt' });
|
||||
zip.append(fs.createReadStream('a.txt'), { name: 'dir2/a.txt' })
|
||||
zip.append(fs.createReadStream('b.txt'), { name: 'dir2/b.txt' })
|
||||
|
||||
const zipUploadStream = new ZipUploadStream(bufferSize)
|
||||
zip.pipe(zipUploadStream)
|
||||
zip.finalize();
|
||||
|
||||
console.log("Write high watermark value " + zipUploadStream.writableHighWaterMark)
|
||||
console.log("Read high watermark value " + zipUploadStream.readableHighWaterMark)
|
||||
|
||||
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
||||
zip.on('warning', function(err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
console.log("zip error ENOENT")
|
||||
} else {
|
||||
console.log("some other warning ")
|
||||
console.log(err)
|
||||
}
|
||||
});
|
||||
|
||||
// good practice to catch this error explicitly
|
||||
zip.on('error', function(err) {
|
||||
console.log("some error with zip ")
|
||||
console.log(err)
|
||||
});
|
||||
|
||||
zip.on("progress", function(progress: a.ProgressData) {
|
||||
console.log(progress)
|
||||
|
||||
/* This outputs data like this, we could potentially do something with this for even more logging to show the status of the zip creation
|
||||
{
|
||||
entries: { total: 7, processed: 1 },
|
||||
fs: { totalBytes: 0, processedBytes: 0 }
|
||||
}
|
||||
{
|
||||
entries: { total: 7, processed: 2 },
|
||||
fs: { totalBytes: 0, processedBytes: 0 }
|
||||
}
|
||||
*/
|
||||
})
|
||||
|
||||
|
||||
// We can add these to debug logging
|
||||
zip.on('end', function() {
|
||||
console.log("zip ending")
|
||||
});
|
||||
zip.on('finish', function() {
|
||||
console.log("zip finished")
|
||||
});
|
||||
|
||||
// Upload options
|
||||
const maxBuffers = 5
|
||||
const blockBlobClient = blobClient.getBlockBlobClient()
|
||||
|
||||
let uploadByteCount = 0
|
||||
var myCallback = function(progress: TransferProgressEvent) {
|
||||
console.log("Byte upload count " + progress.loadedBytes)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
};
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
blobHTTPHeaders: { "blobContentType": "zip" },
|
||||
onProgress: myCallback
|
||||
}
|
||||
|
||||
// Upload!
|
||||
try {
|
||||
const aa = await blockBlobClient.uploadStream(
|
||||
zipUploadStream,
|
||||
bufferSize,
|
||||
maxBuffers,
|
||||
options
|
||||
);
|
||||
} catch (error){
|
||||
console.log(error)
|
||||
}
|
||||
console.log("final upload size in bytes is " + uploadByteCount)
|
||||
}
|
||||
|
||||
test()
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import { ArtifactHttpClient } from '../../artifact-http-client'
|
||||
import { ArtifactServiceClientJSON } from '../../../generated/results/api/v1/artifact.twirp'
|
||||
|
||||
export async function twirpTest(){
|
||||
const artifactClient = new ArtifactHttpClient('@actions/artifact-upload')
|
||||
const jsonClient = new ArtifactServiceClientJSON(artifactClient)
|
||||
|
||||
try {
|
||||
const createResp = await jsonClient.CreateArtifact({workflowRunBackendId: "ce7f54c7-61c7-4aae-887f-30da475f5f1a", workflowJobRunBackendId: "ca395085-040a-526b-2ce8-bdc85f692774", name: Math.random().toString(), version: 4})
|
||||
|
||||
if (!createResp.ok) {
|
||||
console.log("CreateArtifact failed")
|
||||
return
|
||||
}
|
||||
|
||||
console.log(createResp.signedUploadUrl)
|
||||
|
||||
const finalizeResp = await jsonClient.FinalizeArtifact({workflowRunBackendId: "ce7f54c7-61c7-4aae-887f-30da475f5f1a", workflowJobRunBackendId: "ca395085-040a-526b-2ce8-bdc85f692774", name: Math.random().toString(), size: BigInt(5)})
|
||||
|
||||
if (!finalizeResp.ok) {
|
||||
console.log("FinalizeArtifact failed")
|
||||
return
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
return
|
||||
}
|
||||
|
||||
console.log("FinalizeArtifact succeeded")
|
||||
}
|
||||
|
||||
twirpTest()
|
||||
|
|
@ -1,116 +1,190 @@
|
|||
import * as core from '@actions/core'
|
||||
import {
|
||||
UploadArtifactOptions,
|
||||
UploadArtifactResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getExpiration} from './retention'
|
||||
import {validateArtifactName} from './path-and-artifact-name-validation'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {
|
||||
UploadZipSpecification,
|
||||
getUploadZipSpecification,
|
||||
validateRootDirectory
|
||||
} from './upload-zip-specification'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {uploadZipToBlobStorage} from './blob-upload'
|
||||
import {createZipUploadStream} from './zip'
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
FinalizeArtifactRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {FilesNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
import {checkArtifactName} from './path-and-artifact-name-validation'
|
||||
import {UploadOptions} from './upload-options'
|
||||
import {UploadResponse} from './upload-response'
|
||||
import { UploadSpecification, getUploadSpecification } from './upload-specification'
|
||||
import { ArtifactHttpClient } from '../artifact-http-client'
|
||||
import { ArtifactServiceClientJSON } from '../../generated/results/api/v1/artifact.twirp'
|
||||
|
||||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import { TransferProgressEvent } from '@azure/core-http';
|
||||
import * as a from 'archiver'
|
||||
import * as fs from 'fs'
|
||||
import * as stream from 'stream'
|
||||
|
||||
import {getBackendIds, BackendIds} from '../util'
|
||||
|
||||
const bufferSize = 1024 * 1024 * 8 // 8 MB
|
||||
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
// See https://github.com/nodejs/node/issues/8855
|
||||
export class ZipUploadStream extends stream.Transform {
|
||||
constructor(bufferSize: number) {
|
||||
super({
|
||||
highWaterMark: bufferSize
|
||||
})
|
||||
}
|
||||
|
||||
_transform(chunk:any, enc:any, cb:any) {
|
||||
cb(null, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadArtifact(
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadArtifactOptions | undefined
|
||||
): Promise<UploadArtifactResponse> {
|
||||
validateArtifactName(name)
|
||||
validateRootDirectory(rootDirectory)
|
||||
name: string,
|
||||
files: string[],
|
||||
rootDirectory: string,
|
||||
options?: UploadOptions | undefined
|
||||
): Promise<UploadResponse> {
|
||||
|
||||
const zipSpecification: UploadZipSpecification[] = getUploadZipSpecification(
|
||||
files,
|
||||
rootDirectory
|
||||
)
|
||||
if (zipSpecification.length === 0) {
|
||||
throw new FilesNotFoundError(
|
||||
zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : []))
|
||||
let uploadByteCount = 0
|
||||
|
||||
// Need to keep checking the artifact name
|
||||
checkArtifactName(name)
|
||||
|
||||
// Get specification for the files being uploaded
|
||||
const uploadSpecification: UploadSpecification[] = getUploadSpecification(
|
||||
name,
|
||||
rootDirectory,
|
||||
files
|
||||
)
|
||||
}
|
||||
|
||||
// get the IDs needed for the artifact creation
|
||||
const backendIds = getBackendIdsFromToken()
|
||||
if (uploadSpecification.length === 0) {
|
||||
core.warning(`No files found that can be uploaded`)
|
||||
} else {
|
||||
const artifactClient = new ArtifactHttpClient('@actions/artifact-upload')
|
||||
const jsonClient = new ArtifactServiceClientJSON(artifactClient)
|
||||
|
||||
// create the artifact client
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
const backendIDs: BackendIds = getBackendIds()
|
||||
|
||||
// create the artifact
|
||||
const createArtifactReq: CreateArtifactRequest = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
name,
|
||||
version: 4
|
||||
}
|
||||
console.log("workflow Run Backend ID " + backendIDs.workflowRunBackendId)
|
||||
console.log("workflow Job Run Backend ID " + backendIDs.workflowJobRunBackendId)
|
||||
|
||||
// if there is a retention period, add it to the request
|
||||
const expiresAt = getExpiration(options?.retentionDays)
|
||||
if (expiresAt) {
|
||||
createArtifactReq.expiresAt = expiresAt
|
||||
}
|
||||
console.log("hello Rob!!")
|
||||
|
||||
const createArtifactResp =
|
||||
await artifactClient.CreateArtifact(createArtifactReq)
|
||||
if (!createArtifactResp.ok) {
|
||||
throw new InvalidResponseError(
|
||||
'CreateArtifact: response from backend was not ok'
|
||||
)
|
||||
}
|
||||
try {
|
||||
|
||||
const zipUploadStream = await createZipUploadStream(
|
||||
zipSpecification,
|
||||
options?.compressionLevel
|
||||
)
|
||||
|
||||
// Upload zip to blob storage
|
||||
const uploadResult = await uploadZipToBlobStorage(
|
||||
createArtifactResp.signedUploadUrl,
|
||||
zipUploadStream
|
||||
)
|
||||
|
||||
// finalize the artifact
|
||||
const finalizeArtifactReq: FinalizeArtifactRequest = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
name,
|
||||
size: uploadResult.uploadSize ? uploadResult.uploadSize.toString() : '0'
|
||||
}
|
||||
|
||||
if (uploadResult.sha256Hash) {
|
||||
finalizeArtifactReq.hash = StringValue.create({
|
||||
value: `sha256:${uploadResult.sha256Hash}`
|
||||
})
|
||||
}
|
||||
const createResp = await jsonClient.CreateArtifact({workflowRunBackendId: backendIDs.workflowRunBackendId, workflowJobRunBackendId: backendIDs.workflowJobRunBackendId, name: name, version: 4})
|
||||
|
||||
if (!createResp.ok) {
|
||||
core.error("CreateArtifact failed")
|
||||
}
|
||||
|
||||
console.log(createResp.signedUploadUrl)
|
||||
|
||||
// Blob upload start
|
||||
|
||||
core.info(`Finalizing artifact upload`)
|
||||
const blobClient = new BlobClient(createResp.signedUploadUrl);
|
||||
const zip = a.create('zip', {
|
||||
zlib: { level: 9 } // Sets the compression level.
|
||||
// Available options are 0-9
|
||||
// 0 => no compression
|
||||
// 1 => fastest with low compression
|
||||
// 9 => highest compression ratio but the slowest
|
||||
});
|
||||
|
||||
const finalizeArtifactResp =
|
||||
await artifactClient.FinalizeArtifact(finalizeArtifactReq)
|
||||
if (!finalizeArtifactResp.ok) {
|
||||
throw new InvalidResponseError(
|
||||
'FinalizeArtifact: response from backend was not ok'
|
||||
)
|
||||
}
|
||||
console.log("file specification")
|
||||
for (const file of uploadSpecification) {
|
||||
console.log("uploadPath:" + file.uploadFilePath + " absolute:" + file.absoluteFilePath)
|
||||
zip.append(fs.createReadStream(file.absoluteFilePath), {name: file.uploadFilePath})
|
||||
}
|
||||
|
||||
const artifactId = BigInt(finalizeArtifactResp.artifactId)
|
||||
core.info(
|
||||
`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`
|
||||
)
|
||||
const zipUploadStream = new ZipUploadStream(bufferSize)
|
||||
zip.pipe(zipUploadStream)
|
||||
zip.finalize();
|
||||
|
||||
return {
|
||||
size: uploadResult.uploadSize,
|
||||
digest: uploadResult.sha256Hash,
|
||||
id: Number(artifactId)
|
||||
}
|
||||
}
|
||||
console.log("Write high watermark value " + zipUploadStream.writableHighWaterMark)
|
||||
console.log("Read high watermark value " + zipUploadStream.readableHighWaterMark)
|
||||
|
||||
// good practice to catch warnings (ie stat failures and other non-blocking errors)
|
||||
zip.on('warning', function(err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
console.log("zip error ENOENT")
|
||||
} else {
|
||||
console.log("some other warning ")
|
||||
console.log(err)
|
||||
}
|
||||
});
|
||||
|
||||
// good practice to catch this error explicitly
|
||||
zip.on('error', function(err) {
|
||||
console.log("some error with zip ")
|
||||
console.log(err)
|
||||
});
|
||||
|
||||
zip.on("progress", function(progress: a.ProgressData) {
|
||||
console.log(progress)
|
||||
|
||||
/* This outputs data like this, we could potentially do something with this for even more logging to show the status of the zip creation
|
||||
{
|
||||
entries: { total: 7, processed: 1 },
|
||||
fs: { totalBytes: 0, processedBytes: 0 }
|
||||
}
|
||||
{
|
||||
entries: { total: 7, processed: 2 },
|
||||
fs: { totalBytes: 0, processedBytes: 0 }
|
||||
}
|
||||
*/
|
||||
})
|
||||
|
||||
|
||||
// We can add these to debug logging
|
||||
zip.on('end', function() {
|
||||
console.log("zip ending")
|
||||
});
|
||||
zip.on('finish', function() {
|
||||
console.log("zip finished")
|
||||
});
|
||||
|
||||
// Upload options
|
||||
const maxBuffers = 5
|
||||
const blockBlobClient = blobClient.getBlockBlobClient()
|
||||
|
||||
var myCallback = function(progress: TransferProgressEvent) {
|
||||
console.log("Byte upload count " + progress.loadedBytes)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
};
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
blobHTTPHeaders: { "blobContentType": "zip" },
|
||||
onProgress: myCallback
|
||||
}
|
||||
|
||||
// Upload!
|
||||
try {
|
||||
await blockBlobClient.uploadStream(
|
||||
zipUploadStream,
|
||||
bufferSize,
|
||||
maxBuffers,
|
||||
options
|
||||
);
|
||||
} catch (error){
|
||||
console.log(error)
|
||||
}
|
||||
console.log("final upload size in bytes is " + uploadByteCount)
|
||||
|
||||
console.log("we are done with the blob upload!")
|
||||
// Blob upload end
|
||||
|
||||
const finalizeResp = await jsonClient.FinalizeArtifact({workflowRunBackendId: backendIDs.workflowRunBackendId, workflowJobRunBackendId: backendIDs.workflowJobRunBackendId, name: name, size: BigInt(5)})
|
||||
|
||||
if (!finalizeResp.ok) {
|
||||
core.error("FinalizeArtifact failed")
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
console.log("FinalizeArtifact succeeded")
|
||||
}
|
||||
|
||||
const uploadResponse: UploadResponse = {
|
||||
artifactName: name,
|
||||
size: uploadByteCount
|
||||
}
|
||||
|
||||
return uploadResponse
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
export interface UploadOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
export interface UploadResponse {
|
||||
/**
|
||||
* The name of the artifact that was uploaded
|
||||
*/
|
||||
artifactName: string
|
||||
|
||||
/**
|
||||
* Total size of the artifact that was uploaded in bytes
|
||||
*/
|
||||
size: number
|
||||
}
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
import * as fs from 'fs'
|
||||
import {debug} from '@actions/core'
|
||||
import {join, normalize, resolve} from 'path'
|
||||
import {checkArtifactFilePath} from './path-and-artifact-name-validation'
|
||||
|
||||
export interface UploadSpecification {
|
||||
absoluteFilePath: string
|
||||
uploadFilePath: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a specification that describes how each file that is part of the artifact will be uploaded
|
||||
* @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
|
||||
* @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
|
||||
* @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
|
||||
*/
|
||||
export function getUploadSpecification(
|
||||
artifactName: string,
|
||||
rootDirectory: string,
|
||||
artifactFiles: string[]
|
||||
): UploadSpecification[] {
|
||||
// artifact name was checked earlier on, no need to check again
|
||||
const specifications: UploadSpecification[] = []
|
||||
|
||||
if (!fs.existsSync(rootDirectory)) {
|
||||
throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`)
|
||||
}
|
||||
if (!fs.statSync(rootDirectory).isDirectory()) {
|
||||
throw new Error(
|
||||
`Provided rootDirectory ${rootDirectory} is not a valid directory`
|
||||
)
|
||||
}
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
rootDirectory = normalize(rootDirectory)
|
||||
rootDirectory = resolve(rootDirectory)
|
||||
|
||||
/*
|
||||
Example to demonstrate behavior
|
||||
|
||||
Input:
|
||||
artifactName: my-artifact
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
artifactFiles: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
|
||||
Output:
|
||||
specifications: [
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
|
||||
]
|
||||
*/
|
||||
for (let file of artifactFiles) {
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
if (!fs.statSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = normalize(file)
|
||||
file = resolve(file)
|
||||
if (!file.startsWith(rootDirectory)) {
|
||||
throw new Error(
|
||||
`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`
|
||||
)
|
||||
}
|
||||
|
||||
// Check for forbidden characters in file paths that will be rejected during upload
|
||||
const uploadPath = file.replace(rootDirectory, '')
|
||||
checkArtifactFilePath(uploadPath)
|
||||
|
||||
/*
|
||||
uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
|
||||
be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
|
||||
|
||||
path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
|
||||
join('artifact-name/', 'file-to-upload.txt')
|
||||
join('artifact-name/', '/file-to-upload.txt')
|
||||
join('artifact-name', 'file-to-upload.txt')
|
||||
join('artifact-name', '/file-to-upload.txt')
|
||||
*/
|
||||
specifications.push({
|
||||
absoluteFilePath: file,
|
||||
uploadFilePath: join(artifactName, uploadPath)
|
||||
})
|
||||
} else {
|
||||
// Directories are rejected by the server during upload
|
||||
debug(`Removing ${file} from rawSearchResults because it is a directory`)
|
||||
}
|
||||
}
|
||||
return specifications
|
||||
}
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
import * as fs from 'fs'
|
||||
import {info} from '@actions/core'
|
||||
import {normalize, resolve} from 'path'
|
||||
import {validateFilePath} from './path-and-artifact-name-validation'
|
||||
|
||||
export interface UploadZipSpecification {
|
||||
/**
|
||||
* An absolute source path that points to a file that will be added to a zip. Null if creating a new directory
|
||||
*/
|
||||
sourcePath: string | null
|
||||
|
||||
/**
|
||||
* The destination path in a zip for a file
|
||||
*/
|
||||
destinationPath: string
|
||||
|
||||
/**
|
||||
* Information about the file
|
||||
* https://nodejs.org/api/fs.html#class-fsstats
|
||||
*/
|
||||
stats: fs.Stats
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a root directory exists and is valid
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
export function validateRootDirectory(rootDirectory: string): void {
|
||||
if (!fs.existsSync(rootDirectory)) {
|
||||
throw new Error(
|
||||
`The provided rootDirectory ${rootDirectory} does not exist`
|
||||
)
|
||||
}
|
||||
if (!fs.statSync(rootDirectory).isDirectory()) {
|
||||
throw new Error(
|
||||
`The provided rootDirectory ${rootDirectory} is not a valid directory`
|
||||
)
|
||||
}
|
||||
info(`Root directory input is valid!`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a specification that describes how a zip file will be created for a set of input files
|
||||
* @param filesToZip a list of file that should be included in the zip
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
export function getUploadZipSpecification(
|
||||
filesToZip: string[],
|
||||
rootDirectory: string
|
||||
): UploadZipSpecification[] {
|
||||
const specification: UploadZipSpecification[] = []
|
||||
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
rootDirectory = normalize(rootDirectory)
|
||||
rootDirectory = resolve(rootDirectory)
|
||||
|
||||
/*
|
||||
Example
|
||||
|
||||
Input:
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
artifactFiles: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
|
||||
Output:
|
||||
specifications: [
|
||||
['/home/user/files/plz-upload/file1.txt', '/file1.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', '/file2.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', '/dir/file3.txt']
|
||||
]
|
||||
|
||||
The final zip that is later uploaded will look like this:
|
||||
|
||||
my-artifact.zip
|
||||
- file.txt
|
||||
- file2.txt
|
||||
- dir/
|
||||
- file3.txt
|
||||
*/
|
||||
for (let file of filesToZip) {
|
||||
const stats = fs.lstatSync(file, {throwIfNoEntry: false})
|
||||
if (!stats) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
if (!stats.isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = normalize(file)
|
||||
file = resolve(file)
|
||||
if (!file.startsWith(rootDirectory)) {
|
||||
throw new Error(
|
||||
`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`
|
||||
)
|
||||
}
|
||||
|
||||
// Check for forbidden characters in file paths that may cause ambiguous behavior if downloaded on different file systems
|
||||
const uploadPath = file.replace(rootDirectory, '')
|
||||
validateFilePath(uploadPath)
|
||||
|
||||
specification.push({
|
||||
sourcePath: file,
|
||||
destinationPath: uploadPath,
|
||||
stats
|
||||
})
|
||||
} else {
|
||||
// Empty directory
|
||||
const directoryPath = file.replace(rootDirectory, '')
|
||||
validateFilePath(directoryPath)
|
||||
|
||||
specification.push({
|
||||
sourcePath: null,
|
||||
destinationPath: directoryPath,
|
||||
stats
|
||||
})
|
||||
}
|
||||
}
|
||||
return specification
|
||||
}
|
||||
|
|
@ -1,107 +0,0 @@
|
|||
import * as stream from 'stream'
|
||||
import {realpath} from 'fs/promises'
|
||||
import * as archiver from 'archiver'
|
||||
import * as core from '@actions/core'
|
||||
import {UploadZipSpecification} from './upload-zip-specification'
|
||||
import {getUploadChunkSize} from '../shared/config'
|
||||
|
||||
export const DEFAULT_COMPRESSION_LEVEL = 6
|
||||
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
// See https://github.com/nodejs/node/issues/8855
|
||||
export class ZipUploadStream extends stream.Transform {
|
||||
constructor(bufferSize: number) {
|
||||
super({
|
||||
highWaterMark: bufferSize
|
||||
})
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
_transform(chunk: any, enc: any, cb: any): void {
|
||||
cb(null, chunk)
|
||||
}
|
||||
}
|
||||
|
||||
export async function createZipUploadStream(
|
||||
uploadSpecification: UploadZipSpecification[],
|
||||
compressionLevel: number = DEFAULT_COMPRESSION_LEVEL
|
||||
): Promise<ZipUploadStream> {
|
||||
core.debug(
|
||||
`Creating Artifact archive with compressionLevel: ${compressionLevel}`
|
||||
)
|
||||
|
||||
const zip = archiver.create('zip', {
|
||||
highWaterMark: getUploadChunkSize(),
|
||||
zlib: {level: compressionLevel}
|
||||
})
|
||||
|
||||
// register callbacks for various events during the zip lifecycle
|
||||
zip.on('error', zipErrorCallback)
|
||||
zip.on('warning', zipWarningCallback)
|
||||
zip.on('finish', zipFinishCallback)
|
||||
zip.on('end', zipEndCallback)
|
||||
|
||||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Check if symlink and resolve the source path
|
||||
let sourcePath = file.sourcePath
|
||||
if (file.stats.isSymbolicLink()) {
|
||||
sourcePath = await realpath(file.sourcePath)
|
||||
}
|
||||
|
||||
// Add the file to the zip
|
||||
zip.file(sourcePath, {
|
||||
name: file.destinationPath
|
||||
})
|
||||
} else {
|
||||
// Add a directory to the zip
|
||||
zip.append('', {name: file.destinationPath})
|
||||
}
|
||||
}
|
||||
|
||||
const bufferSize = getUploadChunkSize()
|
||||
const zipUploadStream = new ZipUploadStream(bufferSize)
|
||||
|
||||
core.debug(
|
||||
`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`
|
||||
)
|
||||
core.debug(
|
||||
`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`
|
||||
)
|
||||
|
||||
zip.pipe(zipUploadStream)
|
||||
zip.finalize()
|
||||
|
||||
return zipUploadStream
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const zipErrorCallback = (error: any): void => {
|
||||
core.error('An error has occurred while creating the zip file for upload')
|
||||
core.info(error)
|
||||
|
||||
throw new Error('An error has occurred during zip creation for the artifact')
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const zipWarningCallback = (error: any): void => {
|
||||
if (error.code === 'ENOENT') {
|
||||
core.warning(
|
||||
'ENOENT warning during artifact zip creation. No such file or directory'
|
||||
)
|
||||
core.info(error)
|
||||
} else {
|
||||
core.warning(
|
||||
`A non-blocking warning has occurred during artifact zip creation: ${error.code}`
|
||||
)
|
||||
core.info(error)
|
||||
}
|
||||
}
|
||||
|
||||
const zipFinishCallback = (): void => {
|
||||
core.debug('Zip stream for upload has finished.')
|
||||
}
|
||||
|
||||
const zipEndCallback = (): void => {
|
||||
core.debug('Zip stream for upload has ended.')
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import { getRuntimeToken } from './config';
|
||||
|
||||
export interface BackendIds {
|
||||
workflowRunBackendId: string;
|
||||
workflowJobRunBackendId: string;
|
||||
}
|
||||
|
||||
export function getBackendIds(): BackendIds {
|
||||
const token = getRuntimeToken();
|
||||
const parsedToken = JSON.parse(Buffer.from(token.split('.')[1], 'base64').toString())
|
||||
if (!parsedToken["scp"]) {
|
||||
throw new Error('Unable to get scp from token')
|
||||
}
|
||||
|
||||
const scp = parsedToken["scp"]
|
||||
const scpParts = scp.split(' ')
|
||||
if (scpParts.length == 0) {
|
||||
throw new Error('No scp parts found')
|
||||
}
|
||||
|
||||
for (const part of scpParts) {
|
||||
const partParts = part.split(':')
|
||||
if(partParts.length == 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (partParts[0] == "Actions.Results") {
|
||||
if (partParts.length == 3) {
|
||||
return {workflowRunBackendId: partParts[1], workflowJobRunBackendId: partParts[2]}
|
||||
}
|
||||
throw new Error('Unable to parse Actions.Results scp part')
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unable to find ids')
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue