Compare commits
No commits in common. "main" and "robherley/update-release-notes" have entirely different histories.
main
...
robherley/
|
|
@ -1,27 +0,0 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/artifact"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
artifact-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/cache"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
cache-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
echo -n 'hello from file 2' > artifact-path/second.txt
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifact} = require('./packages/artifact/lib/artifact')
|
||||
|
|
@ -77,12 +77,12 @@ jobs:
|
|||
needs: [upload]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
|
@ -96,7 +96,7 @@ jobs:
|
|||
working-directory: packages/artifact
|
||||
|
||||
- name: List and Download Artifacts
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
|
@ -165,7 +165,7 @@ jobs:
|
|||
}
|
||||
}
|
||||
- name: Delete Artifacts
|
||||
uses: actions/github-script@v8
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
|
|
|||
|
|
@ -18,12 +18,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
|
|||
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
@ -39,11 +39,9 @@ jobs:
|
|||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
# We need to install only runtime dependencies (omit dev dependencies) to verify that what we're shipping is all
|
||||
# that is needed
|
||||
- name: Compile cache package
|
||||
run: |
|
||||
npm ci --omit=dev
|
||||
npm ci
|
||||
npm run tsc
|
||||
working-directory: packages/cache
|
||||
|
||||
|
|
|
|||
|
|
@ -17,16 +17,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
rm "C:\Program Files\Git\usr\bin\tar.exe"
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
|
|||
|
|
@ -6,37 +6,24 @@ on:
|
|||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
type: choice
|
||||
required: true
|
||||
description: 'Which package to release'
|
||||
options:
|
||||
- artifact
|
||||
- attest
|
||||
- cache
|
||||
- core
|
||||
- exec
|
||||
- github
|
||||
- glob
|
||||
- http-client
|
||||
- io
|
||||
- tool-cache
|
||||
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache, attest'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: macos-latest-large
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24.x
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -61,7 +48,7 @@ jobs:
|
|||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
||||
publish:
|
||||
runs-on: macos-latest-large
|
||||
runs-on: macos-latest
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
|
|
|
|||
|
|
@ -16,23 +16,19 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
|
||||
|
||||
# Node 20 is the currently supported stable Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
|
||||
# Node 24 is the new version being added with support in actions runners
|
||||
node-version: [20.x, 24.x]
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
fail-fast: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v5
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
node-version: 20.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
|
|
|||
38
README.md
38
README.md
|
|
@ -24,7 +24,7 @@ The GitHub Actions ToolKit provides a set of packages to make creating actions e
|
|||
Provides functions for inputs, outputs, results, logging, secrets and variables. Read more [here](packages/core)
|
||||
|
||||
```bash
|
||||
npm install @actions/core
|
||||
$ npm install @actions/core
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -33,7 +33,7 @@ npm install @actions/core
|
|||
Provides functions to exec cli tools and process output. Read more [here](packages/exec)
|
||||
|
||||
```bash
|
||||
npm install @actions/exec
|
||||
$ npm install @actions/exec
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ npm install @actions/exec
|
|||
Provides functions to search for files matching glob patterns. Read more [here](packages/glob)
|
||||
|
||||
```bash
|
||||
npm install @actions/glob
|
||||
$ npm install @actions/glob
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ npm install @actions/glob
|
|||
A lightweight HTTP client optimized for building actions. Read more [here](packages/http-client)
|
||||
|
||||
```bash
|
||||
npm install @actions/http-client
|
||||
$ npm install @actions/http-client
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ npm install @actions/http-client
|
|||
Provides disk i/o functions like cp, mv, rmRF, which etc. Read more [here](packages/io)
|
||||
|
||||
```bash
|
||||
npm install @actions/io
|
||||
$ npm install @actions/io
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ Provides functions for downloading and caching tools. e.g. setup-* actions. Rea
|
|||
See @actions/cache for caching workflow dependencies.
|
||||
|
||||
```bash
|
||||
npm install @actions/tool-cache
|
||||
$ npm install @actions/tool-cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ npm install @actions/tool-cache
|
|||
Provides an Octokit client hydrated with the context that the current action is being run in. Read more [here](packages/github)
|
||||
|
||||
```bash
|
||||
npm install @actions/github
|
||||
$ npm install @actions/github
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -89,7 +89,7 @@ npm install @actions/github
|
|||
Provides functions to interact with actions artifacts. Read more [here](packages/artifact)
|
||||
|
||||
```bash
|
||||
npm install @actions/artifact
|
||||
$ npm install @actions/artifact
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ npm install @actions/artifact
|
|||
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
|
||||
|
||||
```bash
|
||||
npm install @actions/cache
|
||||
$ npm install @actions/cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -107,7 +107,7 @@ npm install @actions/cache
|
|||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
npm install @actions/attest
|
||||
$ npm install @actions/attest
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -227,23 +227,9 @@ console.log(`We can even get context data, like the repo: ${context.repo.repo}`)
|
|||
```
|
||||
<br/>
|
||||
|
||||
## Note
|
||||
## Contributing
|
||||
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](SECURITY.md).
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
We welcome contributions. See [how to contribute](.github/CONTRIBUTING.md).
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v5
|
||||
using: actions/setup-node@v4
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "root",
|
||||
"private": true,
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"audit-all": "lerna run audit-moderate",
|
||||
"bootstrap": "lerna exec -- npm install",
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/node": "^20.5.7",
|
||||
"@types/signale": "^1.4.1",
|
||||
"concurrently": "^6.1.0",
|
||||
"eslint": "^8.0.1",
|
||||
|
|
@ -32,19 +32,5 @@
|
|||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"semver": "^7.6.0",
|
||||
"tar": "^6.2.1",
|
||||
"@octokit/plugin-paginate-rest": "^9.2.2",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@octokit/core": "^5.0.3",
|
||||
"tmp": "^0.2.4",
|
||||
"@types/node": "^24.1.0",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
|
|
@ -41,4 +41,3 @@ Any easy way to test changes for the official upload/download actions is to fork
|
|||
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
|
||||
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
|
||||
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
|
||||
4. The format for the above is `<username>/<repository-name>/@<ref>`, i.e. `me/myrepo/@HEAD`
|
||||
|
|
|
|||
|
|
@ -1,41 +1,5 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 4.0.0
|
||||
|
||||
- Add support for Node 24 [#2110](https://github.com/actions/toolkit/pull/2110)
|
||||
- Fix: artifact pagination bugs and configurable artifact count limits [#2165](https://github.com/actions/toolkit/pull/2165)
|
||||
- Fix: reject the promise on timeout [#2124](https://github.com/actions/toolkit/pull/2124)
|
||||
- Update dependency versions
|
||||
|
||||
### 2.3.3
|
||||
|
||||
- Dependency updates [#2049](https://github.com/actions/toolkit/pull/2049)
|
||||
|
||||
### 2.3.2
|
||||
|
||||
- Added masking for Shared Access Signature (SAS) artifact URLs [#1982](https://github.com/actions/toolkit/pull/1982)
|
||||
- Change hash to digest for consistent terminology across runner logs [#1991](https://github.com/actions/toolkit/pull/1991)
|
||||
|
||||
### 2.3.1
|
||||
|
||||
- Fix comment typo on expectedHash. [#1986](https://github.com/actions/toolkit/pull/1986)
|
||||
|
||||
### 2.3.0
|
||||
|
||||
- Allow ArtifactClient to perform digest comparisons, if supplied. [#1975](https://github.com/actions/toolkit/pull/1975)
|
||||
|
||||
### 2.2.2
|
||||
|
||||
- Default concurrency to 5 for uploading artifacts [#1962](https://github.com/actions/toolkit/pull/1962)
|
||||
|
||||
### 2.2.1
|
||||
|
||||
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
|
||||
|
||||
### 2.2.0
|
||||
|
||||
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
|
||||
|
||||
### 2.1.11
|
||||
|
||||
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,4 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import os from 'os'
|
||||
|
||||
// Mock the `cpus()` function in the `os` module
|
||||
jest.mock('os', () => {
|
||||
const osActual = jest.requireActual('os')
|
||||
return {
|
||||
...osActual,
|
||||
cpus: jest.fn()
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
|
|
@ -40,110 +30,3 @@ describe('isGhes', () => {
|
|||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadChunkTimeoutEnv', () => {
|
||||
it('should return default 300000 when no env set', () => {
|
||||
expect(config.getUploadChunkTimeout()).toBe(300000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
|
||||
expect(config.getUploadChunkTimeout()).toBe(150000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
|
||||
expect(() => {
|
||||
config.getUploadChunkTimeout()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadConcurrencyEnv', () => {
|
||||
it('Concurrency default to 5', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
expect(config.getConcurrency()).toBe(5)
|
||||
})
|
||||
|
||||
it('Concurrency max out at 300 on systems with many CPUs', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '301'
|
||||
expect(config.getConcurrency()).toBe(300)
|
||||
})
|
||||
|
||||
it('Concurrency can be set to 32 when cpu num is <= 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '32'
|
||||
expect(config.getConcurrency()).toBe(32)
|
||||
})
|
||||
|
||||
it('Concurrency can be set 16 * num of cpu when cpu num is > 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '96'
|
||||
expect(config.getConcurrency()).toBe(96)
|
||||
})
|
||||
|
||||
it('Concurrency can be overridden by env var ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
|
||||
expect(config.getConcurrency()).toBe(10)
|
||||
})
|
||||
|
||||
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getMaxArtifactListCount', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
})
|
||||
|
||||
it('should return default 1000 when no env set', () => {
|
||||
expect(config.getMaxArtifactListCount()).toBe(1000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '2000'
|
||||
expect(config.getMaxArtifactListCount()).toBe(2000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = 'abc'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is < 1', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '0'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is negative', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '-100'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -111,16 +111,6 @@ const mockGetArtifactSuccess = jest.fn(() => {
|
|||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactHung = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
// Don't push any data or call push(null) to end the stream
|
||||
// This creates a stream that hangs and never completes
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
|
|
@ -329,6 +319,14 @@ describe('download-artifact', () => {
|
|||
|
||||
const mockGet = jest.fn(async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Resolve with a 200 status code immediately
|
||||
resolve({
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": true}`)
|
||||
}
|
||||
})
|
||||
|
||||
// Reject with an error after 31 seconds
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'))
|
||||
|
|
@ -621,32 +619,4 @@ describe('download-artifact', () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('streamExtractExternal', () => {
|
||||
it('should fail if the timeout is exceeded', async () => {
|
||||
const mockSlowGetArtifact = jest.fn(mockGetArtifactHung)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockSlowGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
try {
|
||||
await streamExtractExternal(
|
||||
fixtures.blobStorageUrl,
|
||||
fixtures.workspaceDir,
|
||||
{timeout: 2}
|
||||
)
|
||||
expect(true).toBe(false) // should not be called
|
||||
} catch (e) {
|
||||
expect(e).toBeInstanceOf(Error)
|
||||
expect(e.message).toContain('did not respond in 2ms')
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockSlowGetArtifact).toHaveBeenCalledTimes(1)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||
import {
|
||||
listArtifactsInternal,
|
||||
|
|
@ -9,13 +10,13 @@ import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
|||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {Artifact} from '../src/internal/shared/interfaces'
|
||||
import {RequestInterface} from '@octokit/types'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
type MockedListWorkflowRunArtifacts = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['listWorkflowRunArtifacts']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
listWorkflowRunArtifacts: jest.fn()
|
||||
|
|
@ -80,10 +81,10 @@ describe('list-artifact', () => {
|
|||
|
||||
describe('public', () => {
|
||||
it('should return a list of artifacts', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -104,10 +105,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -128,10 +129,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -155,10 +156,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
|
||||
mockRequest.mockRejectedValueOnce(new Error('boom'))
|
||||
mockListArtifacts.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
listArtifactsPublic(
|
||||
|
|
@ -170,126 +171,6 @@ describe('list-artifact', () => {
|
|||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
|
||||
it('should handle pagination correctly when fetching multiple pages', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 150}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Verify that both API calls were made
|
||||
expect(mockRequest).toHaveBeenCalledTimes(2)
|
||||
|
||||
// Should return all 150 artifacts across both pages
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
|
||||
// Verify we got artifacts from both pages
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[99].name).toBe('artifact-100')
|
||||
expect(response.artifacts[100].name).toBe('artifact-101')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
})
|
||||
|
||||
it('should respect ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT environment variable', async () => {
|
||||
const originalEnv = process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '150'
|
||||
|
||||
jest.resetModules()
|
||||
|
||||
try {
|
||||
const {listArtifactsPublic: listArtifactsPublicReloaded} = await import(
|
||||
'../src/internal/find/list-artifacts'
|
||||
)
|
||||
const githubReloaded = await import('@actions/github')
|
||||
|
||||
const mockRequest = (githubReloaded.getOctokit as jest.Mock)(
|
||||
fixtures.token
|
||||
).request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 200}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublicReloaded(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Should only return 150 artifacts due to the limit
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
} finally {
|
||||
// Restore original environment variable
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = originalEnv
|
||||
} else {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
}
|
||||
|
||||
// Reset modules again to restore original state
|
||||
jest.resetModules()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ describe('upload-artifact', () => {
|
|||
fixtures.files.map(file => ({
|
||||
sourcePath: path.join(fixtures.uploadDirectory, file.name),
|
||||
destinationPath: file.name,
|
||||
stats: fs.statSync(path.join(fixtures.uploadDirectory, file.name))
|
||||
stats: new fs.Stats()
|
||||
}))
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
|
||||
|
|
@ -281,7 +281,7 @@ describe('upload-artifact', () => {
|
|||
}
|
||||
)
|
||||
|
||||
const {id, size, digest} = await uploadArtifact(
|
||||
const {id, size} = await uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.files.map(file =>
|
||||
path.join(fixtures.uploadDirectory, file.name)
|
||||
|
|
@ -291,8 +291,6 @@ describe('upload-artifact', () => {
|
|||
|
||||
expect(id).toBe(1)
|
||||
expect(size).toBe(loadedBytes)
|
||||
expect(digest).toBeDefined()
|
||||
expect(digest).toHaveLength(64)
|
||||
|
||||
const extractedDirectory = path.join(
|
||||
fixtures.uploadDirectory,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {maskSigUrl, maskSecretUrls} from '../src/internal/shared/util'
|
||||
import {setSecret, debug} from '@actions/core'
|
||||
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
|
@ -61,159 +59,3 @@ describe('get-backend-ids-from-token', () => {
|
|||
)
|
||||
})
|
||||
})
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
describe('maskSigUrl', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing if no sig parameter is present', () => {
|
||||
const url = 'https://example.com'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('masks the sig parameter in the middle of the URL and sets it as a secret', () => {
|
||||
const url = 'https://example.com/?param1=value1&sig=12345¶m2=value2'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
|
||||
it('does nothing if the URL is empty', () => {
|
||||
const url = ''
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles URLs with fragments', () => {
|
||||
const url = 'https://example.com?sig=12345#fragment'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSigUrl handles special characters in signatures', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('handles signatures with slashes', () => {
|
||||
const url = 'https://example.com/?sig=abc/123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123')
|
||||
})
|
||||
|
||||
it('handles signatures with plus signs', () => {
|
||||
const url = 'https://example.com/?sig=abc+123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc 123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%20123')
|
||||
})
|
||||
|
||||
it('handles signatures with equals signs', () => {
|
||||
const url = 'https://example.com/?sig=abc=123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc=123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%3D123')
|
||||
})
|
||||
|
||||
it('handles already percent-encoded signatures', () => {
|
||||
const url = 'https://example.com/?sig=abc%2F123%3D'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123=')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123%3D')
|
||||
})
|
||||
|
||||
it('handles complex Azure SAS signatures', () => {
|
||||
const url =
|
||||
'https://example.com/container/file.txt?sig=nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D&se=2023-12-31'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj//06Cxt80pBRYiiJlYqtPYg5sz/vEh5iHAhw='
|
||||
)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles signatures with multiple special characters', () => {
|
||||
const url = 'https://example.com/?sig=a/b+c=d&e=f'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('a/b c=d')
|
||||
expect(setSecret).toHaveBeenCalledWith('a%2Fb%20c%3Dd')
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSecretUrls', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('masks sig parameters in signed_upload_url and signed_url', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123',
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where only upload_url is present', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
})
|
||||
|
||||
it('handles case where only download_url is present', () => {
|
||||
const body = {
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where URLs do not contain sig parameters', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?token=abc',
|
||||
signed_url: 'https://download.com?token=xyz'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles empty string URLs', () => {
|
||||
const body = {
|
||||
signed_upload_url: '',
|
||||
signed_url: ''
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if body is not an object or is null', () => {
|
||||
maskSecretUrls(null)
|
||||
expect(debug).toHaveBeenCalledWith('body is not an object or is null')
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if signed_upload_url and signed_url are not strings', () => {
|
||||
const body = {
|
||||
signed_upload_url: 123,
|
||||
signed_url: 456
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -40,4 +40,4 @@
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/artifact.ts#L7)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ single DeleteArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L248)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -92,7 +92,7 @@ single DownloadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L138)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -127,7 +127,7 @@ If there are multiple artifacts with the same name in the same workflow run this
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L212)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -159,7 +159,7 @@ ListArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L176)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -190,4 +190,4 @@ single UploadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L113)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
@ -59,7 +59,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
|
||||
___
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -198,4 +198,4 @@ ___
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L49)
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
@ -181,4 +181,4 @@ ___
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/errors.ts#L68)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ The time when the artifact was created
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -35,7 +35,7 @@ The ID of the artifact
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ The name of the artifact
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L108)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -59,4 +59,4 @@ The size of the artifact in bytes
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ single DeleteArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L103)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ single DownloadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L89)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -101,7 +101,7 @@ If there are multiple artifacts with the same name in the same workflow run this
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L75)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -129,7 +129,7 @@ ListArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L57)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -156,4 +156,4 @@ single UploadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/client.ts#L40)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ The id of the artifact that was deleted
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)
|
||||
[src/internal/shared/interfaces.ts:158](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L158)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ Denotes where the artifact will be downloaded to. If not specified then the arti
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)
|
||||
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L98)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ The path where the artifact was downloaded to
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)
|
||||
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L88)
|
||||
|
|
|
|||
|
|
@ -27,4 +27,4 @@ The criteria for finding Artifact(s) out of the scope of the current run.
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)
|
||||
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L131)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ Metadata about the artifact that was found
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)
|
||||
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L57)
|
||||
|
|
|
|||
|
|
@ -21,4 +21,4 @@ In the case of reruns, this can be useful to avoid duplicates
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)
|
||||
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L68)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ A list of artifacts that were found
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)
|
||||
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L78)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ For large files that are not easily compressed, a value of 0 is recommended for
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
|
||||
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L47)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -52,4 +52,4 @@ input of 0 assumes default retention setting.
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)
|
||||
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L36)
|
||||
|
|
|
|||
|
|
@ -8,24 +8,11 @@ Response from the server when an artifact is uploaded
|
|||
|
||||
### Properties
|
||||
|
||||
- [digest](UploadArtifactResponse.md#digest)
|
||||
- [id](UploadArtifactResponse.md#id)
|
||||
- [size](UploadArtifactResponse.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### digest
|
||||
|
||||
• `Optional` **digest**: `string`
|
||||
|
||||
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• `Optional` **id**: `number`
|
||||
|
|
@ -35,7 +22,7 @@ This ID can be used as input to other APIs to download, delete or get more infor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -47,4 +34,4 @@ Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/daf23ba/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "4.0.0",
|
||||
"version": "2.1.11",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
|
@ -41,29 +41,24 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@actions/http-client": "^2.1.0",
|
||||
"@azure/core-http": "^3.0.5",
|
||||
"@azure/storage-blob": "^12.15.0",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/core": "^3.5.1",
|
||||
"@octokit/plugin-request-log": "^1.0.4",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^7.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
"unzip-stream": "^0.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/unzip-stream": "^0.3.4",
|
||||
"typedoc": "^0.28.13",
|
||||
"typedoc": "^0.25.4",
|
||||
"typedoc-plugin-markdown": "^3.17.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
export * from './google/protobuf/timestamp'
|
||||
export * from './google/protobuf/wrappers'
|
||||
export * from './results/api/v1/artifact'
|
||||
export * from './results/api/v1/artifact.twirp-client'
|
||||
export * from './results/api/v1/artifact.twirp'
|
||||
|
|
|
|||
|
|
@ -15,66 +15,6 @@ import { MessageType } from "@protobuf-ts/runtime";
|
|||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export interface MigrateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export interface MigrateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 3;
|
||||
*/
|
||||
size: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
|
|
@ -229,12 +169,6 @@ export interface ListArtifactsResponse_MonolithArtifact {
|
|||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
/**
|
||||
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
|
||||
*/
|
||||
digest?: StringValue;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
|
|
@ -293,236 +227,6 @@ export interface DeleteArtifactResponse {
|
|||
artifactId: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 3; */
|
||||
if (message.expiresAt)
|
||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 3:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 3; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.size);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||
|
|
@ -904,8 +608,7 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
||||
{ no: 7, name: "digest", kind: "message", T: () => StringValue }
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
|
|
@ -938,9 +641,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
case /* google.protobuf.StringValue digest */ 7:
|
||||
message.digest = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
|
|
@ -971,9 +671,6 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.StringValue digest = 7; */
|
||||
if (message.digest)
|
||||
StringValue.internalBinaryWrite(message.digest, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
@ -1215,7 +912,5 @@ export const ArtifactService = new ServiceType("github.actions.results.api.v1.Ar
|
|||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse },
|
||||
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
|
||||
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
|
||||
]);
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse }
|
||||
]);
|
||||
|
|
|
|||
|
|
@ -1,232 +0,0 @@
|
|||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,976 @@
|
|||
import {
|
||||
TwirpContext,
|
||||
TwirpServer,
|
||||
RouterEvents,
|
||||
TwirpError,
|
||||
TwirpErrorCode,
|
||||
Interceptor,
|
||||
TwirpContentType,
|
||||
chainInterceptors,
|
||||
} from "twirp-ts";
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//==================================//
|
||||
// Server Code //
|
||||
//==================================//
|
||||
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(
|
||||
ctx: T,
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
ctx: T,
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(
|
||||
ctx: T,
|
||||
request: ListArtifactsRequest
|
||||
): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
ctx: T,
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
ctx: T,
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export enum ArtifactServiceMethod {
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
DeleteArtifact = "DeleteArtifact",
|
||||
}
|
||||
|
||||
export const ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
ArtifactServiceMethod.DeleteArtifact,
|
||||
];
|
||||
|
||||
export function createArtifactServiceServer<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(service: ArtifactServiceTwirp<T>) {
|
||||
return new TwirpServer<ArtifactServiceTwirp, T>({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
|
||||
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||
method: string,
|
||||
events: RouterEvents<T>
|
||||
) {
|
||||
switch (method) {
|
||||
case "CreateArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "CreateArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "FinalizeArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "FinalizeArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "ListArtifacts":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "ListArtifacts" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "GetSignedArtifactURL":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "GetSignedArtifactURL" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "DeleteArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "DeleteArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceDeleteArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceCreateArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceFinalizeArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceListArtifactsRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceDeleteArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceDeleteArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceDeleteArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceDeleteArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
) {
|
||||
let request: DeleteArtifactRequest;
|
||||
let response: DeleteArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = DeleteArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.DeleteArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
DeleteArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
request = CreateArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
request = FinalizeArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
request = ListArtifactsRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(ListArtifactsResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
request = GetSignedArtifactURLRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(GetSignedArtifactURLResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceDeleteArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, DeleteArtifactRequest, DeleteArtifactResponse>[]
|
||||
) {
|
||||
let request: DeleteArtifactRequest;
|
||||
let response: DeleteArtifactResponse;
|
||||
|
||||
try {
|
||||
request = DeleteArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.DeleteArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(DeleteArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
|
@ -1,15 +1,11 @@
|
|||
import fs from 'fs/promises'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
|
||||
import * as github from '@actions/github'
|
||||
import * as core from '@actions/core'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
import unzip from 'unzip-stream'
|
||||
import {
|
||||
DownloadArtifactOptions,
|
||||
DownloadArtifactResponse,
|
||||
StreamExtractResponse
|
||||
DownloadArtifactResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
|
|
@ -41,14 +37,12 @@ async function exists(path: string): Promise<boolean> {
|
|||
}
|
||||
}
|
||||
|
||||
async function streamExtract(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<StreamExtractResponse> {
|
||||
async function streamExtract(url: string, directory: string): Promise<void> {
|
||||
let retryCount = 0
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
return await streamExtractExternal(url, directory)
|
||||
await streamExtractExternal(url, directory)
|
||||
return
|
||||
} catch (error) {
|
||||
retryCount++
|
||||
core.debug(
|
||||
|
|
@ -64,9 +58,8 @@ async function streamExtract(
|
|||
|
||||
export async function streamExtractExternal(
|
||||
url: string,
|
||||
directory: string,
|
||||
opts: {timeout: number} = {timeout: 30 * 1000}
|
||||
): Promise<StreamExtractResponse> {
|
||||
directory: string
|
||||
): Promise<void> {
|
||||
const client = new httpClient.HttpClient(getUserAgentString())
|
||||
const response = await client.get(url)
|
||||
if (response.message.statusCode !== 200) {
|
||||
|
|
@ -75,26 +68,17 @@ export async function streamExtractExternal(
|
|||
)
|
||||
}
|
||||
|
||||
let sha256Digest: string | undefined = undefined
|
||||
const timeout = 30 * 1000 // 30 seconds
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = (): void => {
|
||||
const timeoutError = new Error(
|
||||
`Blob storage chunk did not respond in ${opts.timeout}ms`
|
||||
response.message.destroy(
|
||||
new Error(`Blob storage chunk did not respond in ${timeout}ms`)
|
||||
)
|
||||
response.message.destroy(timeoutError)
|
||||
reject(timeoutError)
|
||||
}
|
||||
const timer = setTimeout(timerFn, opts.timeout)
|
||||
const timer = setTimeout(timerFn, timeout)
|
||||
|
||||
const hashStream = crypto.createHash('sha256').setEncoding('hex')
|
||||
const passThrough = new stream.PassThrough()
|
||||
|
||||
response.message.pipe(passThrough)
|
||||
passThrough.pipe(hashStream)
|
||||
const extractStream = passThrough
|
||||
|
||||
extractStream
|
||||
response.message
|
||||
.on('data', () => {
|
||||
timer.refresh()
|
||||
})
|
||||
|
|
@ -108,12 +92,7 @@ export async function streamExtractExternal(
|
|||
.pipe(unzip.Extract({path: directory}))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer)
|
||||
if (hashStream) {
|
||||
hashStream.end()
|
||||
sha256Digest = hashStream.read() as string
|
||||
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`)
|
||||
}
|
||||
resolve({sha256Digest: `sha256:${sha256Digest}`})
|
||||
resolve()
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
reject(error)
|
||||
|
|
@ -132,8 +111,6 @@ export async function downloadArtifactPublic(
|
|||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
core.info(
|
||||
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
||||
)
|
||||
|
|
@ -163,20 +140,13 @@ export async function downloadArtifactPublic(
|
|||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
const extractResponse = await streamExtract(location, downloadPath)
|
||||
await streamExtract(location, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath, digestMismatch}
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
|
|
@ -187,8 +157,6 @@ export async function downloadArtifactInternal(
|
|||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
|
|
@ -224,20 +192,13 @@ export async function downloadArtifactInternal(
|
|||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
const extractResponse = await streamExtract(signedUrl, downloadPath)
|
||||
await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath, digestMismatch}
|
||||
return {downloadPath}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
|
|
|
|||
|
|
@ -68,10 +68,7 @@ export async function getArtifactPublic(
|
|||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: artifact.digest
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -118,8 +115,7 @@ export async function getArtifactInternal(
|
|||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ import {retry} from '@octokit/plugin-retry'
|
|||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getMaxArtifactListCount} from '../shared/config'
|
||||
import {ListArtifactsRequest, Timestamp} from '../../generated'
|
||||
|
||||
const maximumArtifactCount = getMaxArtifactListCount()
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = Math.ceil(maximumArtifactCount / paginationCount)
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount
|
||||
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
|
|
@ -41,17 +41,14 @@ export async function listArtifactsPublic(
|
|||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
let currentPageNumber = 1
|
||||
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
let numberOfPages = Math.ceil(
|
||||
listArtifactResponse.total_count / paginationCount
|
||||
|
|
@ -59,7 +56,7 @@ export async function listArtifactsPublic(
|
|||
const totalArtifactCount = listArtifactResponse.total_count
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
warning(
|
||||
`Workflow run ${workflowRunId} has ${totalArtifactCount} artifacts, exceeding the limit of ${maximumArtifactCount}. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
)
|
||||
numberOfPages = maxNumberOfPages
|
||||
}
|
||||
|
|
@ -70,32 +67,27 @@ export async function listArtifactsPublic(
|
|||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
})
|
||||
}
|
||||
// Move to the next page
|
||||
currentPageNumber++
|
||||
|
||||
// Iterate over any remaining pages
|
||||
for (
|
||||
currentPageNumber;
|
||||
currentPageNumber <= numberOfPages;
|
||||
currentPageNumber < numberOfPages;
|
||||
currentPageNumber++
|
||||
) {
|
||||
currentPageNumber++
|
||||
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
||||
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
|
|
@ -104,8 +96,7 @@ export async function listArtifactsPublic(
|
|||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
: undefined
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -141,8 +132,7 @@ export async function listArtifactsInternal(
|
|||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
: undefined
|
||||
}))
|
||||
|
||||
if (latest) {
|
||||
|
|
@ -156,18 +146,6 @@ export async function listArtifactsInternal(
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This exists so that we don't have to use 'any' when receiving the artifact list from the GitHub API.
|
||||
* The digest field is not present in OpenAPI/types at time of writing, which necessitates this change.
|
||||
*/
|
||||
interface ArtifactResponse {
|
||||
name: string
|
||||
id: number
|
||||
size_in_bytes: number
|
||||
created_at?: string
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import {ArtifactServiceClientJSON} from '../../generated'
|
|||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
import {maskSecretUrls} from './util'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
|
|
@ -87,7 +86,6 @@ class ArtifactHttpClient implements Rpc {
|
|||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
maskSecretUrls(body)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return {response, body}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os from 'os'
|
||||
import {info} from '@actions/core'
|
||||
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
|
|
@ -45,71 +44,20 @@ export function getGitHubWorkspaceDir(): string {
|
|||
return ghWorkspaceDir
|
||||
}
|
||||
|
||||
// The maximum value of concurrency is 300.
|
||||
// This value can be changed with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
export function getConcurrency(): number {
|
||||
const numCPUs = os.cpus().length
|
||||
let concurrencyCap = 32
|
||||
|
||||
if (numCPUs > 4) {
|
||||
const concurrency = 16 * numCPUs
|
||||
concurrencyCap = concurrency > 300 ? 300 : concurrency
|
||||
if (numCPUs <= 4) {
|
||||
return 32
|
||||
}
|
||||
|
||||
const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
|
||||
if (concurrencyOverride) {
|
||||
const concurrency = parseInt(concurrencyOverride)
|
||||
if (isNaN(concurrency) || concurrency < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
|
||||
)
|
||||
}
|
||||
|
||||
if (concurrency < concurrencyCap) {
|
||||
info(
|
||||
`Set concurrency based on the value set in ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY.`
|
||||
)
|
||||
return concurrency
|
||||
}
|
||||
|
||||
info(
|
||||
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Set it to the maximum value allowed.`
|
||||
)
|
||||
return concurrencyCap
|
||||
}
|
||||
|
||||
// default concurrency to 5
|
||||
return 5
|
||||
const concurrency = 16 * numCPUs
|
||||
return concurrency > 300 ? 300 : concurrency
|
||||
}
|
||||
|
||||
export function getUploadChunkTimeout(): number {
|
||||
const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
|
||||
if (!timeoutVar) {
|
||||
return 300000 // 5 minutes
|
||||
}
|
||||
|
||||
const timeout = parseInt(timeoutVar)
|
||||
if (isNaN(timeout)) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return timeout
|
||||
}
|
||||
|
||||
// This value can be changed with ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT variable.
|
||||
// Defaults to 1000 as a safeguard for rate limiting.
|
||||
export function getMaxArtifactListCount(): number {
|
||||
const maxCountVar =
|
||||
process.env['ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT'] || '1000'
|
||||
|
||||
const maxCount = parseInt(maxCountVar)
|
||||
if (isNaN(maxCount) || maxCount < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return maxCount
|
||||
return 300_000 // 5 minutes
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,11 +12,6 @@ export interface UploadArtifactResponse {
|
|||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number
|
||||
|
||||
/**
|
||||
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -91,11 +86,6 @@ export interface DownloadArtifactResponse {
|
|||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string
|
||||
|
||||
/**
|
||||
* Returns true if the digest of the downloaded artifact does not match the expected hash
|
||||
*/
|
||||
digestMismatch?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -106,20 +96,6 @@ export interface DownloadArtifactOptions {
|
|||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string
|
||||
|
||||
/**
|
||||
* The hash that was computed for the artifact during upload. If provided, the outcome of the download
|
||||
* will provide a digestMismatch property indicating whether the hash of the downloaded artifact
|
||||
* matches the expected hash.
|
||||
*/
|
||||
expectedHash?: string
|
||||
}
|
||||
|
||||
export interface StreamExtractResponse {
|
||||
/**
|
||||
* The SHA256 hash of the downloaded file
|
||||
*/
|
||||
sha256Digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -145,11 +121,6 @@ export interface Artifact {
|
|||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date
|
||||
|
||||
/**
|
||||
* The digest of the artifact, computed at time of upload.
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
import {debug, setSecret} from '@actions/core'
|
||||
|
||||
export interface BackendIds {
|
||||
workflowRunBackendId: string
|
||||
|
|
@ -70,76 +69,3 @@ export function getBackendIdsFromToken(): BackendIds {
|
|||
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||
*
|
||||
* @param url - The URL containing the signature parameter to mask
|
||||
* @remarks
|
||||
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||
*
|
||||
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Mask a signature in an Azure SAS token URL
|
||||
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||
* ```
|
||||
*/
|
||||
export function maskSigUrl(url: string): void {
|
||||
if (!url) return
|
||||
try {
|
||||
const parsedUrl = new URL(url)
|
||||
const signature = parsedUrl.searchParams.get('sig')
|
||||
if (signature) {
|
||||
setSecret(signature)
|
||||
setSecret(encodeURIComponent(signature))
|
||||
}
|
||||
} catch (error) {
|
||||
debug(
|
||||
`Failed to parse URL: ${url} ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks sensitive information in URLs containing signature parameters.
|
||||
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||
* and 'signed_download_url' properties of the provided object.
|
||||
*
|
||||
* @param body - The object should contain a signature
|
||||
* @remarks
|
||||
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||
* on each one to redact sensitive signature information. The function doesn't
|
||||
* modify the original object; it only marks the signatures as secrets for
|
||||
* logging purposes.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const responseBody = {
|
||||
* signed_upload_url: 'https://example.com?sig=abc123',
|
||||
* signed_download_url: 'https://example.com?sig=def456'
|
||||
* };
|
||||
* maskSecretUrls(responseBody);
|
||||
* ```
|
||||
*/
|
||||
export function maskSecretUrls(body: Record<string, unknown> | null): void {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
debug('body is not an object or is null')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
'signed_upload_url' in body &&
|
||||
typeof body.signed_upload_url === 'string'
|
||||
) {
|
||||
maskSigUrl(body.signed_upload_url)
|
||||
}
|
||||
if ('signed_url' in body && typeof body.signed_url === 'string') {
|
||||
maskSigUrl(body.signed_url)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/core-http-compat'
|
||||
import {TransferProgressEvent} from '@azure/core-http'
|
||||
import {ZipUploadStream} from './zip'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
|
|
@ -98,7 +98,7 @@ export async function uploadZipToBlobStorage(
|
|||
|
||||
hashStream.end()
|
||||
sha256Hash = hashStream.read() as string
|
||||
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`)
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`)
|
||||
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(
|
||||
|
|
|
|||
|
|
@ -110,7 +110,6 @@ export async function uploadArtifact(
|
|||
|
||||
return {
|
||||
size: uploadResult.uploadSize,
|
||||
digest: uploadResult.sha256Hash,
|
||||
id: Number(artifactId)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,8 @@ async function run() {
|
|||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attest({
|
||||
subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}],
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
predicateType: 'https://in-toto.io/attestation/release',
|
||||
predicate: { . . . },
|
||||
token: ghToken
|
||||
|
|
@ -48,12 +49,11 @@ The `attest` function supports the following options:
|
|||
|
||||
```typescript
|
||||
export type AttestOptions = {
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectName?: string
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectDigest?: Record<string, string>
|
||||
// Collection of subjects to be attested
|
||||
subjects?: Subject[]
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
|
|
@ -68,13 +68,6 @@ export type AttestOptions = {
|
|||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
|
||||
export type Subject = {
|
||||
// Name of the subject.
|
||||
name: string
|
||||
// Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
|
||||
digest: Record<string, string>
|
||||
}
|
||||
```
|
||||
|
||||
### `attestProvenance`
|
||||
|
|
@ -112,13 +105,12 @@ The `attestProvenance` function supports the following options:
|
|||
|
||||
```typescript
|
||||
export type AttestProvenanceOptions = {
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectName?: string
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectDigest?: Record<string, string>
|
||||
// Collection of subjects to be attested
|
||||
subjects?: Subject[]
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
|
|
|
|||
|
|
@ -1,28 +1,8 @@
|
|||
# @actions/attest Releases
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Add support for Node 24 [#2110](https://github.com/actions/toolkit/pull/2110)
|
||||
- Bump @sigstore/bundle from 3.0.0 to 3.1.0
|
||||
- Bump @sigstore/sign from 3.0.0 to 3.1.0
|
||||
- Bump jose from 5.2.3 to 5.10.0
|
||||
|
||||
### 1.6.0
|
||||
|
||||
- Update `buildSLSAProvenancePredicate` to populate `workflow.ref` field from the `ref` claim in the OIDC token [#1969](https://github.com/actions/toolkit/pull/1969)
|
||||
|
||||
### 1.5.0
|
||||
|
||||
- Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847)
|
||||
- Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
|
||||
- Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
|
||||
- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865)
|
||||
- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863)
|
||||
|
||||
### 1.4.2
|
||||
|
||||
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
|
||||
|
||||
### 1.4.1
|
||||
|
||||
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ exports[`provenance functions buildSLSAProvenancePredicate returns a provenance
|
|||
"externalParameters": {
|
||||
"workflow": {
|
||||
"path": ".github/workflows/main.yml",
|
||||
"ref": "refs/heads/main",
|
||||
"ref": "main",
|
||||
"repository": "https://foo.ghe.com/owner/repo",
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,16 +0,0 @@
|
|||
import {attest} from '../src/attest'
|
||||
|
||||
describe('attest', () => {
|
||||
describe('when no subject information is provided', () => {
|
||||
it('throws an error', async () => {
|
||||
const options = {
|
||||
predicateType: 'foo',
|
||||
predicate: {bar: 'baz'},
|
||||
token: 'token'
|
||||
}
|
||||
expect(attest(options)).rejects.toThrowError(
|
||||
'Must provide either subjectName and subjectDigest or subjects'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -17,7 +17,7 @@ describe('buildIntotoStatement', () => {
|
|||
}
|
||||
|
||||
it('returns an intoto statement', () => {
|
||||
const statement = buildIntotoStatement([subject], predicate)
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
expect(statement).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -33,7 +33,15 @@ describe('provenance functions', () => {
|
|||
runner_environment: 'github-hosted'
|
||||
}
|
||||
|
||||
const mockIssuer = async (claims: jose.JWTPayload): Promise<void> => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
|
||||
GITHUB_SERVER_URL: 'https://foo.ghe.com',
|
||||
GITHUB_REPOSITORY: claims.repository
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
const key = await jose.generateKeyPair('PS256')
|
||||
|
||||
|
|
@ -52,18 +60,6 @@ describe('provenance functions', () => {
|
|||
|
||||
// Mock OIDC token endpoint for populating the provenance
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
|
||||
GITHUB_SERVER_URL: 'https://foo.ghe.com',
|
||||
GITHUB_REPOSITORY: claims.repository
|
||||
}
|
||||
|
||||
await mockIssuer(claims)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -119,7 +115,8 @@ describe('provenance functions', () => {
|
|||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'github'
|
||||
})
|
||||
|
|
@ -146,7 +143,8 @@ describe('provenance functions', () => {
|
|||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
|
|
@ -180,7 +178,8 @@ describe('provenance functions', () => {
|
|||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good'
|
||||
})
|
||||
|
|
@ -207,7 +206,8 @@ describe('provenance functions', () => {
|
|||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/attest",
|
||||
"version": "2.0.0",
|
||||
"version": "1.4.2",
|
||||
"description": "Actions attestation lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
|
|
@ -35,24 +35,24 @@
|
|||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sigstore/mock": "^0.10.0",
|
||||
"@sigstore/rekor-types": "^3.0.0",
|
||||
"@sigstore/mock": "^0.7.4",
|
||||
"@sigstore/rekor-types": "^2.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.6",
|
||||
"nock": "^13.5.1",
|
||||
"undici": "^6.20.0"
|
||||
"undici": "^5.28.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/http-client": "^2.2.3",
|
||||
"@octokit/plugin-retry": "^6.0.1",
|
||||
"@sigstore/bundle": "^3.1.0",
|
||||
"@sigstore/sign": "^3.1.0",
|
||||
"jose": "^5.10.0"
|
||||
"@sigstore/bundle": "^2.3.2",
|
||||
"@sigstore/sign": "^2.3.2",
|
||||
"jose": "^5.2.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@octokit/plugin-retry": {
|
||||
"@octokit/core": "^5.2.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,16 +14,11 @@ const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
|
|||
* Options for attesting a subject / predicate.
|
||||
*/
|
||||
export type AttestOptions = {
|
||||
/**
|
||||
* @deprecated Use `subjects` instead.
|
||||
**/
|
||||
subjectName?: string
|
||||
/**
|
||||
* @deprecated Use `subjects` instead.
|
||||
**/
|
||||
subjectDigest?: Record<string, string>
|
||||
// Subjects to be attested.
|
||||
subjects?: Subject[]
|
||||
// The name of the subject to be attested.
|
||||
subjectName: string
|
||||
// The digest of the subject to be attested. Should be a map of digest
|
||||
// algorithms to their hex-encoded values.
|
||||
subjectDigest: Record<string, string>
|
||||
// Content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
|
|
@ -47,24 +42,15 @@ export type AttestOptions = {
|
|||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attest(options: AttestOptions): Promise<Attestation> {
|
||||
let subjects: Subject[]
|
||||
|
||||
if (options.subjects) {
|
||||
subjects = options.subjects
|
||||
} else if (options.subjectName && options.subjectDigest) {
|
||||
subjects = [{name: options.subjectName, digest: options.subjectDigest}]
|
||||
} else {
|
||||
throw new Error(
|
||||
'Must provide either subjectName and subjectDigest or subjects'
|
||||
)
|
||||
const subject: Subject = {
|
||||
name: options.subjectName,
|
||||
digest: options.subjectDigest
|
||||
}
|
||||
|
||||
const predicate: Predicate = {
|
||||
type: options.predicateType,
|
||||
params: options.predicate
|
||||
}
|
||||
|
||||
const statement = buildIntotoStatement(subjects, predicate)
|
||||
const statement = buildIntotoStatement(subject, predicate)
|
||||
|
||||
// Sign the provenance statement
|
||||
const payload: Payload = {
|
||||
|
|
|
|||
|
|
@ -20,12 +20,12 @@ export type InTotoStatement = {
|
|||
* @returns The constructed in-toto statement.
|
||||
*/
|
||||
export const buildIntotoStatement = (
|
||||
subjects: Subject[],
|
||||
subject: Subject,
|
||||
predicate: Predicate
|
||||
): InTotoStatement => {
|
||||
return {
|
||||
_type: INTOTO_STATEMENT_V1_TYPE,
|
||||
subject: subjects,
|
||||
subject: [subject],
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ export const buildSLSAProvenancePredicate = async (
|
|||
// Split just the path and ref from the workflow string.
|
||||
// owner/repo/.github/workflows/main.yml@main =>
|
||||
// .github/workflows/main.yml, main
|
||||
const [workflowPath] = claims.workflow_ref
|
||||
const [workflowPath, workflowRef] = claims.workflow_ref
|
||||
.replace(`${claims.repository}/`, '')
|
||||
.split('@')
|
||||
|
||||
|
|
@ -41,7 +41,7 @@ export const buildSLSAProvenancePredicate = async (
|
|||
buildType: GITHUB_BUILD_TYPE,
|
||||
externalParameters: {
|
||||
workflow: {
|
||||
ref: claims.ref,
|
||||
ref: workflowRef,
|
||||
repository: `${serverURL}/${claims.repository}`,
|
||||
path: workflowPath
|
||||
}
|
||||
|
|
|
|||
|
|
@ -86,6 +86,7 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
|
|||
witnesses.push(
|
||||
new RekorWitness({
|
||||
rekorBaseURL: opts.rekorURL,
|
||||
entryType: 'dsse',
|
||||
fetchOnConflict: true,
|
||||
timeout,
|
||||
retry
|
||||
|
|
@ -105,5 +106,5 @@ const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
|
|||
|
||||
// Build the bundle with the singleCertificate option which will
|
||||
// trigger the creation of v0.3 DSSE bundles
|
||||
return new DSSEBundleBuilder({signer, witnesses})
|
||||
return new DSSEBundleBuilder({signer, witnesses, singleCertificate: true})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -29,11 +29,7 @@ export const writeAttestation = async (
|
|||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
headers: options.headers,
|
||||
bundle: attestation as {
|
||||
mediaType?: string
|
||||
verificationMaterial?: {[key: string]: unknown}
|
||||
dsseEnvelope?: {[key: string]: unknown}
|
||||
}
|
||||
data: {bundle: attestation}
|
||||
})
|
||||
|
||||
const data =
|
||||
|
|
|
|||
|
|
@ -6,20 +6,6 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
|||
|
||||
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
|
||||
|
||||
## ⚠️ Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
## Usage
|
||||
|
||||
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
|
||||
|
|
@ -61,3 +47,5 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
|||
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,67 +1,9 @@
|
|||
# @actions/cache Releases
|
||||
|
||||
### 4.1.0
|
||||
|
||||
- Remove client side 10GiB cache size limit check & update twirp client [#2118](https://github.com/actions/toolkit/pull/2118)
|
||||
|
||||
### 4.0.5
|
||||
|
||||
- Reintroduce @protobuf-ts/runtime-rpc as a runtime dependency [#2113](https://github.com/actions/toolkit/pull/2113)
|
||||
|
||||
### 4.0.4
|
||||
|
||||
⚠️ Faulty patch release. Upgrade to 4.0.5 instead.
|
||||
|
||||
- Optimized cache dependencies by moving `@protobuf-ts/plugin` to dev dependencies [#2106](https://github.com/actions/toolkit/pull/2106)
|
||||
- Improved cache service availability determination for different cache service versions (v1 and v2) [#2100](https://github.com/actions/toolkit/pull/2100)
|
||||
- Enhanced server error handling: 5xx HTTP errors are now logged as errors instead of warnings [#2099](https://github.com/actions/toolkit/pull/2099)
|
||||
- Fixed cache hit logging to properly distinguish between exact key matches and restore key matches [#2101](https://github.com/actions/toolkit/pull/2101)
|
||||
|
||||
### 4.0.3
|
||||
|
||||
- Added masking for Shared Access Signature (SAS) cache entry URLs [#1982](https://github.com/actions/toolkit/pull/1982)
|
||||
- Improved debugging by logging both the cache version alongside the keys requested when a cache restore fails [#1994](https://github.com/actions/toolkit/pull/1994)
|
||||
|
||||
### 4.0.2
|
||||
|
||||
- Wrap create failures in ReserveCacheError [#1966](https://github.com/actions/toolkit/pull/1966)
|
||||
|
||||
### 4.0.1
|
||||
|
||||
- Remove runtime dependency on `twirp-ts` [#1947](https://github.com/actions/toolkit/pull/1947)
|
||||
- Cache miss as debug, not warning annotation [#1954](https://github.com/actions/toolkit/pull/1954)
|
||||
|
||||
### 4.0.0
|
||||
|
||||
#### Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
#### Minor changes
|
||||
|
||||
- Update `@actions/core` to `1.11.0`
|
||||
- Update `semver` `6.3.1`
|
||||
- Add `twirp-ts` `2.5.0` to dependencies
|
||||
|
||||
### 3.3.0
|
||||
|
||||
- Update `@actions/core` to `1.11.1`
|
||||
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
|
||||
|
||||
### 3.2.4
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
|
||||
### 3.2.3
|
||||
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
|
|
|
|||
|
|
@ -3,15 +3,12 @@
|
|||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const filePath = process.env[`GITHUB_ENV`]
|
||||
fs.appendFileSync(filePath, `ACTIONS_CACHE_SERVICE_V2=true${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_RESULTS_URL=${process.env.ACTIONS_RESULTS_URL}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_TOKEN=${process.env.ACTIONS_RUNTIME_TOKEN}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_CACHE_URL=${process.env.ACTIONS_CACHE_URL}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `GITHUB_RUN_ID=${process.env.GITHUB_RUN_ID}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
|
|
@ -1,69 +1,14 @@
|
|||
import * as cache from '../src/cache'
|
||||
|
||||
describe('isFeatureAvailable', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
process.env = {...originalEnv}
|
||||
// Clean cache-related environment variables
|
||||
test('isFeatureAvailable returns true if server url is set', () => {
|
||||
try {
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
} finally {
|
||||
delete process.env['ACTIONS_CACHE_URL']
|
||||
delete process.env['ACTIONS_RESULTS_URL']
|
||||
delete process.env['ACTIONS_CACHE_SERVICE_V2']
|
||||
delete process.env['GITHUB_SERVER_URL']
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
test('returns true for cache service v1 when ACTIONS_CACHE_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns false for cache service v1 when only ACTIONS_RESULTS_URL is set', () => {
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns true for cache service v1 when both URLs are set', () => {
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns true for cache service v2 when ACTIONS_RESULTS_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns false for cache service v2 when only ACTIONS_CACHE_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns false when no cache URLs are set', () => {
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns false for cache service v2 when no URLs are set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns true for GHES with v1 even when v2 flag is set', () => {
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://my-enterprise.github.com'
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns false for GHES with only ACTIONS_RESULTS_URL', () => {
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://my-enterprise.github.com'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
test('isFeatureAvailable returns false if server url is not set', () => {
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import {downloadCache} from '../src/internal/cacheHttpClient'
|
||||
import {getCacheVersion} from '../src/internal/cacheUtils'
|
||||
import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient'
|
||||
import {CompressionMethod} from '../src/internal/constants'
|
||||
import * as downloadUtils from '../src/internal/downloadUtils'
|
||||
import {DownloadOptions, getDownloadOptions} from '../src/options'
|
||||
|
|
|
|||
|
|
@ -42,3 +42,23 @@ test('resolvePaths works on github workspace directory', async () => {
|
|||
const paths = await cacheUtils.resolvePaths([workspace])
|
||||
expect(paths.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('isGhes returns false for github.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns true for enterprise URL', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(cacheUtils.isGhes()).toBe(true)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(cacheUtils.isGhes()).toBe(false)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,25 +0,0 @@
|
|||
import * as config from '../src/internal/config'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
test('isGhes returns false for github.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns true for enterprise URL', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
|
@ -11,6 +11,8 @@ const downloadConcurrency = 8
|
|||
const timeoutInMs = 30000
|
||||
const segmentTimeoutInMs = 600000
|
||||
const lookupOnly = false
|
||||
const uploadConcurrency = 4
|
||||
const uploadChunkSize = 32 * 1024 * 1024
|
||||
|
||||
test('getDownloadOptions sets defaults', async () => {
|
||||
const actualOptions = getDownloadOptions()
|
||||
|
|
@ -41,21 +43,18 @@ test('getDownloadOptions overrides all settings', async () => {
|
|||
})
|
||||
|
||||
test('getUploadOptions sets defaults', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 4,
|
||||
uploadChunkSize: 32 * 1024 * 1024,
|
||||
useAzureSdk: false
|
||||
}
|
||||
const actualOptions = getUploadOptions()
|
||||
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
expect(actualOptions).toEqual({
|
||||
uploadConcurrency,
|
||||
uploadChunkSize
|
||||
})
|
||||
})
|
||||
|
||||
test('getUploadOptions overrides all settings', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 2,
|
||||
uploadChunkSize: 16 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
uploadChunkSize: 16 * 1024 * 1024
|
||||
}
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
|
|
@ -63,34 +62,6 @@ test('getUploadOptions overrides all settings', async () => {
|
|||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('env variables override all getUploadOptions settings', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 16,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
process.env.CACHE_UPLOAD_CONCURRENCY = '16'
|
||||
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('env variables override all getUploadOptions settings but do not exceed caps', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 32,
|
||||
uploadChunkSize: 128 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
process.env.CACHE_UPLOAD_CONCURRENCY = '64'
|
||||
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('getDownloadOptions overrides download timeout minutes', async () => {
|
||||
const expectedOptions: DownloadOptions = {
|
||||
useAzureSdk: false,
|
||||
|
|
|
|||
|
|
@ -6,8 +6,6 @@ import * as cacheUtils from '../src/internal/cacheUtils'
|
|||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||
import {ArtifactCacheEntry} from '../src/internal/contracts'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
|
||||
|
||||
jest.mock('../src/internal/cacheHttpClient')
|
||||
jest.mock('../src/internal/cacheUtils')
|
||||
|
|
@ -75,28 +73,18 @@ test('restore with no cache found', async () => {
|
|||
test('restore with server error should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const logErrorMock = jest.spyOn(core, 'error')
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
|
||||
// Set cache service to V2 to test error logging for server errors
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'https://results.local/'
|
||||
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||
.mockImplementation(() => {
|
||||
throw new HttpClientError('HTTP Error Occurred', 500)
|
||||
})
|
||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
|
||||
throw new Error('HTTP Error Occurred')
|
||||
})
|
||||
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
expect(cacheKey).toBe(undefined)
|
||||
expect(logErrorMock).toHaveBeenCalledTimes(1)
|
||||
expect(logErrorMock).toHaveBeenCalledWith(
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
'Failed to restore: HTTP Error Occurred'
|
||||
)
|
||||
|
||||
// Clean up environment
|
||||
delete process.env['ACTIONS_CACHE_SERVICE_V2']
|
||||
delete process.env['ACTIONS_RESULTS_URL']
|
||||
})
|
||||
|
||||
test('restore with restore keys and no cache found', async () => {
|
||||
|
|
|
|||
|
|
@ -1,426 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as path from 'path'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import * as config from '../src/internal/config'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||
import {restoreCache} from '../src/cache'
|
||||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
|
||||
import {DownloadOptions} from '../src/options'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
|
||||
jest.mock('../src/internal/cacheHttpClient')
|
||||
jest.mock('../src/internal/cacheUtils')
|
||||
jest.mock('../src/internal/config')
|
||||
jest.mock('../src/internal/tar')
|
||||
|
||||
let logDebugMock: jest.SpyInstance
|
||||
let logInfoMock: jest.SpyInstance
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
|
||||
jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => {
|
||||
const actualUtils = jest.requireActual('../src/internal/cacheUtils')
|
||||
return actualUtils.getCacheFileName(cm)
|
||||
})
|
||||
|
||||
// Ensure that we're using v2 for these tests
|
||||
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
|
||||
|
||||
logDebugMock = jest.spyOn(core, 'debug')
|
||||
logInfoMock = jest.spyOn(core, 'info')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
|
||||
})
|
||||
|
||||
test('restore with no path should fail', async () => {
|
||||
const paths: string[] = []
|
||||
const key = 'node-test'
|
||||
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||
`Path Validation Error: At least one directory or file path is required`
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with too many keys should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString())
|
||||
await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with large key should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'foo'.repeat(512) // Over the 512 character limit
|
||||
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with invalid key should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'comma,comma'
|
||||
await expect(restoreCache(paths, key)).rejects.toThrowError(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with no cache found', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: false,
|
||||
signedDownloadUrl: '',
|
||||
matchedKey: ''
|
||||
})
|
||||
)
|
||||
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
|
||||
expect(cacheKey).toBe(undefined)
|
||||
})
|
||||
|
||||
test('restore with server error should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const logErrorMock = jest.spyOn(core, 'error')
|
||||
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||
.mockImplementation(() => {
|
||||
throw new HttpClientError('HTTP Error Occurred', 500)
|
||||
})
|
||||
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
expect(cacheKey).toBe(undefined)
|
||||
expect(logErrorMock).toHaveBeenCalledTimes(1)
|
||||
expect(logErrorMock).toHaveBeenCalledWith(
|
||||
'Failed to restore: HTTP Error Occurred'
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with restore keys and no cache found', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const restoreKeys = ['node-']
|
||||
const cacheVersion =
|
||||
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
|
||||
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: false,
|
||||
signedDownloadUrl: '',
|
||||
matchedKey: ''
|
||||
})
|
||||
)
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, restoreKeys)
|
||||
|
||||
expect(cacheKey).toBe(undefined)
|
||||
expect(logDebugMock).toHaveBeenCalledWith(
|
||||
`Cache not found for version ${cacheVersion} of keys: ${[
|
||||
key,
|
||||
...restoreKeys
|
||||
].join(', ')}`
|
||||
)
|
||||
})
|
||||
|
||||
test('restore with gzip compressed cache found', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const compressionMethod = CompressionMethod.Gzip
|
||||
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||
const cacheVersion =
|
||||
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
|
||||
const options = {useAzureSdk: true} as DownloadOptions
|
||||
|
||||
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||
|
||||
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||
|
||||
const getCacheDownloadURLMock = jest.spyOn(
|
||||
CacheServiceClientJSON.prototype,
|
||||
'GetCacheEntryDownloadURL'
|
||||
)
|
||||
getCacheDownloadURLMock.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedDownloadUrl,
|
||||
matchedKey: key
|
||||
})
|
||||
)
|
||||
|
||||
const tempPath = '/foo/bar'
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||
createTempDirectoryMock.mockImplementation(async () => {
|
||||
return Promise.resolve(tempPath)
|
||||
})
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip)
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 142
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, [], options)
|
||||
|
||||
expect(cacheKey).toBe(key)
|
||||
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||
paths,
|
||||
compressionMethod,
|
||||
false
|
||||
)
|
||||
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
restoreKeys: [],
|
||||
version: cacheVersion
|
||||
})
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
signedDownloadUrl,
|
||||
archivePath,
|
||||
options
|
||||
)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('restore with zstd compressed cache found', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const compressionMethod = CompressionMethod.Zstd
|
||||
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||
const cacheVersion =
|
||||
'8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d'
|
||||
const options = {useAzureSdk: true} as DownloadOptions
|
||||
|
||||
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||
|
||||
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||
|
||||
const getCacheDownloadURLMock = jest.spyOn(
|
||||
CacheServiceClientJSON.prototype,
|
||||
'GetCacheEntryDownloadURL'
|
||||
)
|
||||
getCacheDownloadURLMock.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedDownloadUrl,
|
||||
matchedKey: key
|
||||
})
|
||||
)
|
||||
|
||||
const tempPath = '/foo/bar'
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||
createTempDirectoryMock.mockImplementation(async () => {
|
||||
return Promise.resolve(tempPath)
|
||||
})
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd)
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 62915000
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, [], options)
|
||||
|
||||
expect(cacheKey).toBe(key)
|
||||
expect(logInfoMock).toHaveBeenCalledWith(`Cache hit for: ${key}`)
|
||||
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||
paths,
|
||||
compressionMethod,
|
||||
false
|
||||
)
|
||||
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
restoreKeys: [],
|
||||
version: cacheVersion
|
||||
})
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
signedDownloadUrl,
|
||||
archivePath,
|
||||
options
|
||||
)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('restore with cache found for restore key', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const restoreKeys = ['node-']
|
||||
const compressionMethod = CompressionMethod.Gzip
|
||||
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||
const cacheVersion =
|
||||
'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed'
|
||||
const options = {useAzureSdk: true} as DownloadOptions
|
||||
|
||||
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||
|
||||
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||
|
||||
const getCacheDownloadURLMock = jest.spyOn(
|
||||
CacheServiceClientJSON.prototype,
|
||||
'GetCacheEntryDownloadURL'
|
||||
)
|
||||
getCacheDownloadURLMock.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedDownloadUrl,
|
||||
matchedKey: restoreKeys[0]
|
||||
})
|
||||
)
|
||||
|
||||
const tempPath = '/foo/bar'
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||
createTempDirectoryMock.mockImplementation(async () => {
|
||||
return Promise.resolve(tempPath)
|
||||
})
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip)
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const fileSize = 142
|
||||
const getArchiveFileSizeInBytesMock = jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValue(fileSize)
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, 'extractTar')
|
||||
const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile')
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, restoreKeys, options)
|
||||
|
||||
expect(cacheKey).toBe(restoreKeys[0])
|
||||
expect(logInfoMock).toHaveBeenCalledWith(
|
||||
`Cache hit for restore-key: ${restoreKeys[0]}`
|
||||
)
|
||||
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||
paths,
|
||||
compressionMethod,
|
||||
false
|
||||
)
|
||||
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
restoreKeys,
|
||||
version: cacheVersion
|
||||
})
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1)
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
signedDownloadUrl,
|
||||
archivePath,
|
||||
options
|
||||
)
|
||||
expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath)
|
||||
expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`)
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod)
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1)
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath)
|
||||
|
||||
expect(compressionMethodMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('restore with lookup only enabled', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const compressionMethod = CompressionMethod.Gzip
|
||||
const signedDownloadUrl = 'https://blob-storage.local?signed=true'
|
||||
const cacheVersion =
|
||||
'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b'
|
||||
const options = {lookupOnly: true, useAzureSdk: true} as DownloadOptions
|
||||
|
||||
const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion')
|
||||
getCacheVersionMock.mockReturnValue(cacheVersion)
|
||||
|
||||
const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod))
|
||||
|
||||
const getCacheDownloadURLMock = jest.spyOn(
|
||||
CacheServiceClientJSON.prototype,
|
||||
'GetCacheEntryDownloadURL'
|
||||
)
|
||||
getCacheDownloadURLMock.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedDownloadUrl,
|
||||
matchedKey: key
|
||||
})
|
||||
)
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory')
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, 'downloadCache')
|
||||
|
||||
const cacheKey = await restoreCache(paths, key, undefined, options)
|
||||
|
||||
expect(cacheKey).toBe(key)
|
||||
expect(getCacheVersionMock).toHaveBeenCalledWith(
|
||||
paths,
|
||||
compressionMethod,
|
||||
false
|
||||
)
|
||||
expect(getCacheDownloadURLMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
restoreKeys: [],
|
||||
version: cacheVersion
|
||||
})
|
||||
expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download')
|
||||
|
||||
// creating a tempDir and downloading the cache are skipped
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(0)
|
||||
expect(downloadCacheMock).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
|
@ -3,20 +3,17 @@ import * as path from 'path'
|
|||
import {saveCache} from '../src/cache'
|
||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
import * as config from '../src/internal/config'
|
||||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import {TypedResponse} from '@actions/http-client/lib/interfaces'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
import {
|
||||
ReserveCacheResponse,
|
||||
ITypedResponseWithError
|
||||
} from '../src/internal/contracts'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
|
||||
jest.mock('../src/internal/cacheHttpClient')
|
||||
jest.mock('../src/internal/cacheUtils')
|
||||
jest.mock('../src/internal/config')
|
||||
jest.mock('../src/internal/tar')
|
||||
|
||||
beforeAll(() => {
|
||||
|
|
@ -97,7 +94,7 @@ test('save with large cache outputs should fail in GHES with error message', asy
|
|||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
|
||||
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true)
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'reserveCache')
|
||||
|
|
@ -149,7 +146,7 @@ test('save with large cache outputs should fail in GHES without error message',
|
|||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
jest.spyOn(config, 'isGhes').mockReturnValueOnce(true)
|
||||
jest.spyOn(cacheUtils, 'isGhes').mockReturnValueOnce(true)
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'reserveCache')
|
||||
|
|
@ -224,55 +221,46 @@ test('save with reserve cache failure should fail', async () => {
|
|||
test('save with server error should fail', async () => {
|
||||
const filePath = 'node_modules'
|
||||
const primaryKey = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const logErrorMock = jest.spyOn(core, 'error')
|
||||
|
||||
// Mock cache service version to V2
|
||||
const getCacheServiceVersionMock = jest
|
||||
.spyOn(config, 'getCacheServiceVersion')
|
||||
.mockReturnValue('v2')
|
||||
|
||||
// Mock V2 CreateCacheEntry to succeed
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://blob-storage.local?signed=true',
|
||||
message: ''
|
||||
})
|
||||
)
|
||||
|
||||
// Mock the FinalizeCacheEntryUpload to succeed (since the error should happen in saveCache)
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, entryId: '4', message: 'Success'})
|
||||
)
|
||||
const cachePaths = [path.resolve(filePath)]
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const cacheId = 4
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'reserveCache')
|
||||
.mockImplementation(async () => {
|
||||
const response: TypedResponse<ReserveCacheResponse> = {
|
||||
statusCode: 500,
|
||||
result: {cacheId},
|
||||
headers: {}
|
||||
}
|
||||
return response
|
||||
})
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
|
||||
// Mock the saveCache call to throw a server error
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'saveCache')
|
||||
.mockImplementationOnce(() => {
|
||||
throw new HttpClientError('HTTP Error Occurred', 500)
|
||||
throw new Error('HTTP Error Occurred')
|
||||
})
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
await saveCache([filePath], primaryKey)
|
||||
|
||||
expect(logErrorMock).toHaveBeenCalledTimes(1)
|
||||
expect(logErrorMock).toHaveBeenCalledWith(
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
'Failed to save: HTTP Error Occurred'
|
||||
)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledTimes(1)
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, [filePath], {
|
||||
cacheSize: undefined,
|
||||
compressionMethod: compression,
|
||||
enableCrossOsArchive: false
|
||||
})
|
||||
const archiveFolder = '/foo/bar'
|
||||
const cachePaths = [path.resolve(filePath)]
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
|
|
@ -280,10 +268,8 @@ test('save with server error should fail', async () => {
|
|||
compression
|
||||
)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Restore the getCacheServiceVersion mock to its original state
|
||||
getCacheServiceVersionMock.mockRestore()
|
||||
})
|
||||
|
||||
test('save with valid inputs uploads a cache', async () => {
|
||||
|
|
@ -327,12 +313,7 @@ test('save with valid inputs uploads a cache', async () => {
|
|||
compression
|
||||
)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
cacheId,
|
||||
archiveFile,
|
||||
'',
|
||||
undefined
|
||||
)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile, undefined)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -1,587 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import * as path from 'path'
|
||||
import {saveCache} from '../src/cache'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||
import * as config from '../src/internal/config'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
|
||||
import * as cacheHttpClient from '../src/internal/cacheHttpClient'
|
||||
import {UploadOptions} from '../src/options'
|
||||
|
||||
let logDebugMock: jest.SpyInstance
|
||||
|
||||
jest.mock('../src/internal/tar')
|
||||
|
||||
const uploadFileMock = jest.fn()
|
||||
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
|
||||
uploadFile: uploadFileMock
|
||||
}))
|
||||
jest.mock('@azure/storage-blob', () => ({
|
||||
BlobClient: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
getBlockBlobClient: blockBlobClientMock
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
beforeAll(() => {
|
||||
process.env['ACTIONS_RUNTIME_TOKEN'] = 'token'
|
||||
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||
jest.spyOn(core, 'error').mockImplementation(() => {})
|
||||
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async filePaths => {
|
||||
return filePaths.map(x => path.resolve(x))
|
||||
})
|
||||
jest.spyOn(cacheUtils, 'createTempDirectory').mockImplementation(async () => {
|
||||
return Promise.resolve('/foo/bar')
|
||||
})
|
||||
|
||||
// Ensure that we're using v2 for these tests
|
||||
jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2')
|
||||
|
||||
logDebugMock = jest.spyOn(core, 'debug')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2')
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
test('save with missing input should fail', async () => {
|
||||
const paths: string[] = []
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
|
||||
await expect(saveCache(paths, key)).rejects.toThrowError(
|
||||
`Path Validation Error: At least one directory or file path is required`
|
||||
)
|
||||
})
|
||||
|
||||
test('create cache entry failure on non-ok response', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const infoLogMock = jest.spyOn(core, 'info')
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockResolvedValue({ok: false, signedUploadUrl: '', message: ''})
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const finalizeCacheEntryMock = jest.spyOn(
|
||||
CacheServiceClientJSON.prototype,
|
||||
'FinalizeCacheEntryUpload'
|
||||
)
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockResolvedValueOnce(compression)
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const cacheId = await saveCache(paths, key)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(infoLogMock).toHaveBeenCalledWith(
|
||||
`Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
||||
)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion
|
||||
})
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledTimes(0)
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0)
|
||||
})
|
||||
|
||||
test('create cache entry fails on rejected promise', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const infoLogMock = jest.spyOn(core, 'info')
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockRejectedValue(new Error('Failed to create cache entry'))
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockResolvedValueOnce(compression)
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = await saveCache(paths, key)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(infoLogMock).toHaveBeenCalledWith(
|
||||
`Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
||||
)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheUtils.getCacheVersion(paths, compression)
|
||||
})
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('save cache fails if a signedUploadURL was not passed', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const signedUploadURL = ''
|
||||
const archiveFileSize = 1024
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize, // These should always match
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = await saveCache([paths], key, options)
|
||||
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion
|
||||
})
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('finalize save cache failure', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const archiveFileSize = 1024
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize, // These should always match
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'saveCache')
|
||||
.mockResolvedValue(Promise.resolve())
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const finalizeCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(Promise.resolve({ok: false, entryId: '', message: ''}))
|
||||
|
||||
const cacheId = await saveCache([paths], key, options)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion
|
||||
})
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion,
|
||||
sizeBytes: archiveFileSize.toString()
|
||||
})
|
||||
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Failed to save: Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
|
||||
)
|
||||
})
|
||||
|
||||
test('save with valid inputs uploads a cache', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const archiveFileSize = 1024
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize, // These should always match
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = 4
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValue(Promise.resolve(compression))
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
|
||||
const finalizeCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, entryId: cacheId.toString(), message: ''})
|
||||
)
|
||||
|
||||
const expectedCacheId = await saveCache([paths], key)
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion,
|
||||
sizeBytes: archiveFileSize.toString()
|
||||
})
|
||||
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(expectedCacheId).toBe(cacheId)
|
||||
})
|
||||
|
||||
test('save with extremely large cache should succeed in v2 (no size limit)', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
// Simulate a very large cache (20GB)
|
||||
const archiveFileSize = 20 * 1024 * 1024 * 1024 // 20GB
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize,
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = 4
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValue(Promise.resolve(compression))
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
|
||||
const finalizeCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, entryId: cacheId.toString(), message: ''})
|
||||
)
|
||||
|
||||
const expectedCacheId = await saveCache([paths], key)
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion,
|
||||
sizeBytes: archiveFileSize.toString()
|
||||
})
|
||||
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(expectedCacheId).toBe(cacheId)
|
||||
})
|
||||
|
||||
test('save with create cache entry failure and specific error message', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const infoLogMock = jest.spyOn(core, 'info')
|
||||
const warningLogMock = jest.spyOn(core, 'warning')
|
||||
const errorMessage = 'Cache storage quota exceeded for repository'
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockResolvedValue({ok: false, signedUploadUrl: '', message: errorMessage})
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockResolvedValueOnce(compression)
|
||||
const archiveFileSize = 1024
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = await saveCache(paths, key)
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(warningLogMock).toHaveBeenCalledWith(
|
||||
`Cache reservation failed: ${errorMessage}`
|
||||
)
|
||||
expect(infoLogMock).toHaveBeenCalledWith(
|
||||
`Failed to save: Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
||||
)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheUtils.getCacheVersion(paths, compression)
|
||||
})
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
test('save with finalize cache entry failure and specific error message', async () => {
|
||||
const paths = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = [path.resolve(paths)]
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const archiveFileSize = 1024
|
||||
const errorMessage =
|
||||
'Cache entry finalization failed due to concurrent access'
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize,
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
const createCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, 'saveCache')
|
||||
.mockResolvedValue()
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValueOnce(Promise.resolve(compression))
|
||||
|
||||
const cacheVersion = cacheUtils.getCacheVersion([paths], compression)
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const finalizeCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: false, entryId: '', message: errorMessage})
|
||||
)
|
||||
|
||||
const cacheId = await saveCache([paths], key, options)
|
||||
|
||||
expect(createCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion
|
||||
})
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion,
|
||||
sizeBytes: archiveFileSize.toString()
|
||||
})
|
||||
|
||||
expect(cacheId).toBe(-1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith(errorMessage)
|
||||
})
|
||||
|
||||
test('save with multiple large caches should succeed in v2 (testing 50GB)', async () => {
|
||||
const paths = ['large-dataset', 'node_modules', 'build-artifacts']
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
const cachePaths = paths.map(p => path.resolve(p))
|
||||
const signedUploadURL = 'https://blob-storage.local?signed=true'
|
||||
const createTarMock = jest.spyOn(tar, 'createTar')
|
||||
// Simulate an extremely large cache (50GB)
|
||||
const archiveFileSize = 50 * 1024 * 1024 * 1024 // 50GB
|
||||
const options: UploadOptions = {
|
||||
archiveSizeBytes: archiveFileSize,
|
||||
useAzureSdk: true,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
uploadConcurrency: 8
|
||||
}
|
||||
|
||||
jest
|
||||
.spyOn(cacheUtils, 'getArchiveFileSizeInBytes')
|
||||
.mockReturnValueOnce(archiveFileSize)
|
||||
|
||||
const cacheId = 7
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'CreateCacheEntry')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, signedUploadUrl: signedUploadURL, message: ''})
|
||||
)
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, 'saveCache')
|
||||
|
||||
const compression = CompressionMethod.Zstd
|
||||
const getCompressionMock = jest
|
||||
.spyOn(cacheUtils, 'getCompressionMethod')
|
||||
.mockReturnValue(Promise.resolve(compression))
|
||||
const cacheVersion = cacheUtils.getCacheVersion(paths, compression)
|
||||
|
||||
const finalizeCacheEntryMock = jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'FinalizeCacheEntryUpload')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({ok: true, entryId: cacheId.toString(), message: ''})
|
||||
)
|
||||
|
||||
const expectedCacheId = await saveCache(paths, key)
|
||||
|
||||
const archiveFolder = '/foo/bar'
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd)
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
-1,
|
||||
archiveFile,
|
||||
signedUploadURL,
|
||||
options
|
||||
)
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
)
|
||||
|
||||
expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
|
||||
key,
|
||||
version: cacheVersion,
|
||||
sizeBytes: archiveFileSize.toString()
|
||||
})
|
||||
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1)
|
||||
expect(expectedCacheId).toBe(cacheId)
|
||||
})
|
||||
|
||||
test('save with non existing path should not save cache using v2 saveCache', async () => {
|
||||
const path = 'node_modules'
|
||||
const key = 'Linux-node-bb828da54c148048dd17899ba9fda624811cfb43'
|
||||
jest.spyOn(cacheUtils, 'resolvePaths').mockImplementation(async () => {
|
||||
return []
|
||||
})
|
||||
await expect(saveCache([path], key)).rejects.toThrowError(
|
||||
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
|
||||
)
|
||||
})
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
import * as uploadUtils from '../src/internal/uploadUtils'
|
||||
import {TransferProgressEvent} from '@azure/ms-rest-js'
|
||||
|
||||
test('upload progress tracked correctly', () => {
|
||||
const progress = new uploadUtils.UploadProgress(1000)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(0)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(0)
|
||||
expect(progress.isDone()).toBe(false)
|
||||
|
||||
progress.onProgress()({loadedBytes: 0} as TransferProgressEvent)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(0)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(0)
|
||||
expect(progress.isDone()).toBe(false)
|
||||
|
||||
progress.onProgress()({loadedBytes: 250} as TransferProgressEvent)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(250)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(250)
|
||||
expect(progress.isDone()).toBe(false)
|
||||
|
||||
progress.onProgress()({loadedBytes: 500} as TransferProgressEvent)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(500)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(500)
|
||||
expect(progress.isDone()).toBe(false)
|
||||
|
||||
progress.onProgress()({loadedBytes: 750} as TransferProgressEvent)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(750)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(750)
|
||||
expect(progress.isDone()).toBe(false)
|
||||
|
||||
progress.onProgress()({loadedBytes: 1000} as TransferProgressEvent)
|
||||
|
||||
expect(progress.contentLength).toBe(1000)
|
||||
expect(progress.sentBytes).toBe(1000)
|
||||
expect(progress.displayedComplete).toBe(false)
|
||||
expect(progress.timeoutHandle).toBeUndefined()
|
||||
expect(progress.getTransferredBytes()).toBe(1000)
|
||||
expect(progress.isDone()).toBe(true)
|
||||
})
|
||||
|
|
@ -1,158 +0,0 @@
|
|||
import {maskSigUrl, maskSecretUrls} from '../src/internal/shared/util'
|
||||
import {setSecret, debug} from '@actions/core'
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
describe('maskSigUrl', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing if no sig parameter is present', () => {
|
||||
const url = 'https://example.com'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('masks the sig parameter in the middle of the URL and sets it as a secret', () => {
|
||||
const url = 'https://example.com/?param1=value1&sig=12345¶m2=value2'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
|
||||
it('does nothing if the URL is empty', () => {
|
||||
const url = ''
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles URLs with fragments', () => {
|
||||
const url = 'https://example.com?sig=12345#fragment'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSigUrl handles special characters in signatures', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('handles signatures with slashes', () => {
|
||||
const url = 'https://example.com/?sig=abc/123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123')
|
||||
})
|
||||
|
||||
it('handles signatures with plus signs', () => {
|
||||
const url = 'https://example.com/?sig=abc+123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc 123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%20123')
|
||||
})
|
||||
|
||||
it('handles signatures with equals signs', () => {
|
||||
const url = 'https://example.com/?sig=abc=123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc=123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%3D123')
|
||||
})
|
||||
|
||||
it('handles already percent-encoded signatures', () => {
|
||||
const url = 'https://example.com/?sig=abc%2F123%3D'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123=')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123%3D')
|
||||
})
|
||||
|
||||
it('handles complex Azure SAS signatures', () => {
|
||||
const url =
|
||||
'https://example.com/container/file.txt?sig=nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D&se=2023-12-31'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj//06Cxt80pBRYiiJlYqtPYg5sz/vEh5iHAhw='
|
||||
)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles signatures with multiple special characters', () => {
|
||||
const url = 'https://example.com/?sig=a/b+c=d&e=f'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('a/b c=d')
|
||||
expect(setSecret).toHaveBeenCalledWith('a%2Fb%20c%3Dd')
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSecretUrls', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('masks sig parameters in signed_upload_url and signed_download_url', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123',
|
||||
signed_download_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where only upload_url is present', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
})
|
||||
|
||||
it('handles case where only download_url is present', () => {
|
||||
const body = {
|
||||
signed_download_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where URLs do not contain sig parameters', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?token=abc',
|
||||
signed_download_url: 'https://download.com?token=xyz'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles empty string URLs', () => {
|
||||
const body = {
|
||||
signed_upload_url: '',
|
||||
signed_download_url: ''
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if body is not an object or is null', () => {
|
||||
maskSecretUrls(null)
|
||||
expect(debug).toHaveBeenCalledWith('body is not an object or is null')
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if signed_upload_url and signed_download_url are not strings', () => {
|
||||
const body = {
|
||||
signed_upload_url: 123,
|
||||
signed_download_url: 456
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/cache",
|
||||
"version": "5.0.0",
|
||||
"version": "3.2.4",
|
||||
"preview": true,
|
||||
"description": "Actions cache lib",
|
||||
"keywords": [
|
||||
|
|
@ -37,10 +37,9 @@
|
|||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@protobuf-ts/runtime-rpc": "^2.11.1",
|
||||
"@actions/http-client": "^2.1.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
|
|
@ -49,13 +48,7 @@
|
|||
"semver": "^6.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/semver": "^6.0.0",
|
||||
"@protobuf-ts/plugin": "^2.9.4",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,17 +2,9 @@ import * as core from '@actions/core'
|
|||
import * as path from 'path'
|
||||
import * as utils from './internal/cacheUtils'
|
||||
import * as cacheHttpClient from './internal/cacheHttpClient'
|
||||
import * as cacheTwirpClient from './internal/shared/cacheTwirpClient'
|
||||
import {getCacheServiceVersion, isGhes} from './internal/config'
|
||||
import {DownloadOptions, UploadOptions} from './options'
|
||||
import {createTar, extractTar, listTar} from './internal/tar'
|
||||
import {
|
||||
CreateCacheEntryRequest,
|
||||
FinalizeCacheEntryUploadRequest,
|
||||
FinalizeCacheEntryUploadResponse,
|
||||
GetCacheEntryDownloadURLRequest
|
||||
} from './generated/results/api/v1/cache'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
import {DownloadOptions, UploadOptions} from './options'
|
||||
|
||||
export class ValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
|
|
@ -29,14 +21,6 @@ export class ReserveCacheError extends Error {
|
|||
}
|
||||
}
|
||||
|
||||
export class FinalizeCacheError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
this.name = 'FinalizeCacheError'
|
||||
Object.setPrototypeOf(this, FinalizeCacheError.prototype)
|
||||
}
|
||||
}
|
||||
|
||||
function checkPaths(paths: string[]): void {
|
||||
if (!paths || paths.length === 0) {
|
||||
throw new ValidationError(
|
||||
|
|
@ -64,27 +48,17 @@ function checkKey(key: string): void {
|
|||
*
|
||||
* @returns boolean return true if Actions cache service feature is available, otherwise false
|
||||
*/
|
||||
export function isFeatureAvailable(): boolean {
|
||||
const cacheServiceVersion = getCacheServiceVersion()
|
||||
|
||||
// Check availability based on cache service version
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
// For v2, we need ACTIONS_RESULTS_URL
|
||||
return !!process.env['ACTIONS_RESULTS_URL']
|
||||
case 'v1':
|
||||
default:
|
||||
// For v1, we only need ACTIONS_CACHE_URL
|
||||
return !!process.env['ACTIONS_CACHE_URL']
|
||||
}
|
||||
export function isFeatureAvailable(): boolean {
|
||||
return !!process.env['ACTIONS_CACHE_URL']
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores cache from keys
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
|
|
@ -96,49 +70,8 @@ export async function restoreCache(
|
|||
options?: DownloadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<string | undefined> {
|
||||
const cacheServiceVersion: string = getCacheServiceVersion()
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`)
|
||||
|
||||
checkPaths(paths)
|
||||
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return await restoreCacheV2(
|
||||
paths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
options,
|
||||
enableCrossOsArchive
|
||||
)
|
||||
case 'v1':
|
||||
default:
|
||||
return await restoreCacheV1(
|
||||
paths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
options,
|
||||
enableCrossOsArchive
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param options cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
async function restoreCacheV1(
|
||||
paths: string[],
|
||||
primaryKey: string,
|
||||
restoreKeys?: string[],
|
||||
options?: DownloadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<string | undefined> {
|
||||
restoreKeys = restoreKeys || []
|
||||
const keys = [primaryKey, ...restoreKeys]
|
||||
|
||||
|
|
@ -205,17 +138,8 @@ async function restoreCacheV1(
|
|||
if (typedError.name === ValidationError.name) {
|
||||
throw error
|
||||
} else {
|
||||
// warn on cache restore failure and continue build
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (
|
||||
typedError instanceof HttpClientError &&
|
||||
typeof typedError.statusCode === 'number' &&
|
||||
typedError.statusCode >= 500
|
||||
) {
|
||||
core.error(`Failed to restore: ${(error as Error).message}`)
|
||||
} else {
|
||||
core.warning(`Failed to restore: ${(error as Error).message}`)
|
||||
}
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
core.warning(`Failed to restore: ${(error as Error).message}`)
|
||||
}
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
|
|
@ -229,139 +153,6 @@ async function restoreCacheV1(
|
|||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
async function restoreCacheV2(
|
||||
paths: string[],
|
||||
primaryKey: string,
|
||||
restoreKeys?: string[],
|
||||
options?: DownloadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<string | undefined> {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
options = {
|
||||
...options,
|
||||
useAzureSdk: true
|
||||
}
|
||||
restoreKeys = restoreKeys || []
|
||||
const keys = [primaryKey, ...restoreKeys]
|
||||
|
||||
core.debug('Resolved Keys:')
|
||||
core.debug(JSON.stringify(keys))
|
||||
|
||||
if (keys.length > 10) {
|
||||
throw new ValidationError(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
)
|
||||
}
|
||||
for (const key of keys) {
|
||||
checkKey(key)
|
||||
}
|
||||
|
||||
let archivePath = ''
|
||||
try {
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
|
||||
const request: GetCacheEntryDownloadURLRequest = {
|
||||
key: primaryKey,
|
||||
restoreKeys,
|
||||
version: utils.getCacheVersion(
|
||||
paths,
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
)
|
||||
}
|
||||
|
||||
const response = await twirpClient.GetCacheEntryDownloadURL(request)
|
||||
|
||||
if (!response.ok) {
|
||||
core.debug(
|
||||
`Cache not found for version ${request.version} of keys: ${keys.join(
|
||||
', '
|
||||
)}`
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const isRestoreKeyMatch = request.key !== response.matchedKey
|
||||
if (isRestoreKeyMatch) {
|
||||
core.info(`Cache hit for restore-key: ${response.matchedKey}`)
|
||||
} else {
|
||||
core.info(`Cache hit for: ${response.matchedKey}`)
|
||||
}
|
||||
|
||||
if (options?.lookupOnly) {
|
||||
core.info('Lookup only - skipping download')
|
||||
return response.matchedKey
|
||||
}
|
||||
|
||||
archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
)
|
||||
core.debug(`Archive path: ${archivePath}`)
|
||||
core.debug(`Starting download of archive to: ${archivePath}`)
|
||||
|
||||
await cacheHttpClient.downloadCache(
|
||||
response.signedDownloadUrl,
|
||||
archivePath,
|
||||
options
|
||||
)
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B)`
|
||||
)
|
||||
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
await extractTar(archivePath, compressionMethod)
|
||||
core.info('Cache restored successfully')
|
||||
|
||||
return response.matchedKey
|
||||
} catch (error) {
|
||||
const typedError = error as Error
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error
|
||||
} else {
|
||||
// Supress all non-validation cache related errors because caching should be optional
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (
|
||||
typedError instanceof HttpClientError &&
|
||||
typeof typedError.statusCode === 'number' &&
|
||||
typedError.statusCode >= 500
|
||||
) {
|
||||
core.error(`Failed to restore: ${(error as Error).message}`)
|
||||
} else {
|
||||
core.warning(`Failed to restore: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
if (archivePath) {
|
||||
await utils.unlinkFile(archivePath)
|
||||
}
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
|
|
@ -377,34 +168,9 @@ export async function saveCache(
|
|||
options?: UploadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<number> {
|
||||
const cacheServiceVersion: string = getCacheServiceVersion()
|
||||
core.debug(`Cache service version: ${cacheServiceVersion}`)
|
||||
checkPaths(paths)
|
||||
checkKey(key)
|
||||
switch (cacheServiceVersion) {
|
||||
case 'v2':
|
||||
return await saveCacheV2(paths, key, options, enableCrossOsArchive)
|
||||
case 'v1':
|
||||
default:
|
||||
return await saveCacheV1(paths, key, options, enableCrossOsArchive)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save cache using the legacy Cache Service
|
||||
*
|
||||
* @param paths
|
||||
* @param key
|
||||
* @param options
|
||||
* @param enableCrossOsArchive
|
||||
* @returns
|
||||
*/
|
||||
async function saveCacheV1(
|
||||
paths: string[],
|
||||
key: string,
|
||||
options?: UploadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<number> {
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
let cacheId = -1
|
||||
|
||||
|
|
@ -436,7 +202,7 @@ async function saveCacheV1(
|
|||
core.debug(`File Size: ${archiveFileSize}`)
|
||||
|
||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||
if (archiveFileSize > fileSizeLimit && !isGhes()) {
|
||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||
throw new Error(
|
||||
`Cache size of ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
|
|
@ -471,7 +237,7 @@ async function saveCacheV1(
|
|||
}
|
||||
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`)
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath, '', options)
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath, options)
|
||||
} catch (error) {
|
||||
const typedError = error as Error
|
||||
if (typedError.name === ValidationError.name) {
|
||||
|
|
@ -479,163 +245,7 @@ async function saveCacheV1(
|
|||
} else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`)
|
||||
} else {
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (
|
||||
typedError instanceof HttpClientError &&
|
||||
typeof typedError.statusCode === 'number' &&
|
||||
typedError.statusCode >= 500
|
||||
) {
|
||||
core.error(`Failed to save: ${typedError.message}`)
|
||||
} else {
|
||||
core.warning(`Failed to save: ${typedError.message}`)
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
await utils.unlinkFile(archivePath)
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
return cacheId
|
||||
}
|
||||
|
||||
/**
|
||||
* Save cache using Cache Service v2
|
||||
*
|
||||
* @param paths a list of file paths to restore from the cache
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param options cache upload options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @returns
|
||||
*/
|
||||
async function saveCacheV2(
|
||||
paths: string[],
|
||||
key: string,
|
||||
options?: UploadOptions,
|
||||
enableCrossOsArchive = false
|
||||
): Promise<number> {
|
||||
// Override UploadOptions to force the use of Azure
|
||||
// ...options goes first because we want to override the default values
|
||||
// set in UploadOptions with these specific figures
|
||||
options = {
|
||||
...options,
|
||||
uploadChunkSize: 64 * 1024 * 1024, // 64 MiB
|
||||
uploadConcurrency: 8, // 8 workers for parallel upload
|
||||
useAzureSdk: true
|
||||
}
|
||||
const compressionMethod = await utils.getCompressionMethod()
|
||||
const twirpClient = cacheTwirpClient.internalCacheTwirpClient()
|
||||
let cacheId = -1
|
||||
|
||||
const cachePaths = await utils.resolvePaths(paths)
|
||||
core.debug('Cache Paths:')
|
||||
core.debug(`${JSON.stringify(cachePaths)}`)
|
||||
|
||||
if (cachePaths.length === 0) {
|
||||
throw new Error(
|
||||
`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`
|
||||
)
|
||||
}
|
||||
|
||||
const archiveFolder = await utils.createTempDirectory()
|
||||
const archivePath = path.join(
|
||||
archiveFolder,
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
)
|
||||
|
||||
core.debug(`Archive Path: ${archivePath}`)
|
||||
|
||||
try {
|
||||
await createTar(archiveFolder, cachePaths, compressionMethod)
|
||||
if (core.isDebug()) {
|
||||
await listTar(archivePath, compressionMethod)
|
||||
}
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.debug(`File Size: ${archiveFileSize}`)
|
||||
|
||||
// Set the archive size in the options, will be used to display the upload progress
|
||||
options.archiveSizeBytes = archiveFileSize
|
||||
|
||||
core.debug('Reserving Cache')
|
||||
const version = utils.getCacheVersion(
|
||||
paths,
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
)
|
||||
const request: CreateCacheEntryRequest = {
|
||||
key,
|
||||
version
|
||||
}
|
||||
|
||||
let signedUploadUrl
|
||||
|
||||
try {
|
||||
const response = await twirpClient.CreateCacheEntry(request)
|
||||
if (!response.ok) {
|
||||
if (response.message) {
|
||||
core.warning(`Cache reservation failed: ${response.message}`)
|
||||
}
|
||||
throw new Error(response.message || 'Response was not ok')
|
||||
}
|
||||
signedUploadUrl = response.signedUploadUrl
|
||||
} catch (error) {
|
||||
core.debug(`Failed to reserve cache: ${error}`)
|
||||
throw new ReserveCacheError(
|
||||
`Unable to reserve cache with key ${key}, another job may be creating this cache.`
|
||||
)
|
||||
}
|
||||
|
||||
core.debug(`Attempting to upload cache located at: ${archivePath}`)
|
||||
await cacheHttpClient.saveCache(
|
||||
cacheId,
|
||||
archivePath,
|
||||
signedUploadUrl,
|
||||
options
|
||||
)
|
||||
|
||||
const finalizeRequest: FinalizeCacheEntryUploadRequest = {
|
||||
key,
|
||||
version,
|
||||
sizeBytes: `${archiveFileSize}`
|
||||
}
|
||||
|
||||
const finalizeResponse: FinalizeCacheEntryUploadResponse =
|
||||
await twirpClient.FinalizeCacheEntryUpload(finalizeRequest)
|
||||
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`)
|
||||
|
||||
if (!finalizeResponse.ok) {
|
||||
if (finalizeResponse.message) {
|
||||
throw new FinalizeCacheError(finalizeResponse.message)
|
||||
}
|
||||
throw new Error(
|
||||
`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`
|
||||
)
|
||||
}
|
||||
|
||||
cacheId = parseInt(finalizeResponse.entryId)
|
||||
} catch (error) {
|
||||
const typedError = error as Error
|
||||
if (typedError.name === ValidationError.name) {
|
||||
throw error
|
||||
} else if (typedError.name === ReserveCacheError.name) {
|
||||
core.info(`Failed to save: ${typedError.message}`)
|
||||
} else if (typedError.name === FinalizeCacheError.name) {
|
||||
core.warning(typedError.message)
|
||||
} else {
|
||||
// Log server errors (5xx) as errors, all other errors as warnings
|
||||
if (
|
||||
typedError instanceof HttpClientError &&
|
||||
typeof typedError.statusCode === 'number' &&
|
||||
typedError.statusCode >= 500
|
||||
) {
|
||||
core.error(`Failed to save: ${typedError.message}`)
|
||||
} else {
|
||||
core.warning(`Failed to save: ${typedError.message}`)
|
||||
}
|
||||
core.warning(`Failed to save: ${typedError.message}`)
|
||||
}
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
|
|
|
|||
|
|
@ -1,290 +0,0 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/timestamp.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { typeofJsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { PbLong } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone or local
|
||||
* calendar, encoded as a count of seconds and fractions of seconds at
|
||||
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
* January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
* Gregorian calendar backwards to year one.
|
||||
*
|
||||
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
* second table is needed for interpretation, using a [24-hour linear
|
||||
* smear](https://developers.google.com/time/smear).
|
||||
*
|
||||
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
* restricting to that range, we ensure that we can convert to and from [RFC
|
||||
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from Java `Instant.now()`.
|
||||
*
|
||||
* Instant now = Instant.now();
|
||||
*
|
||||
* Timestamp timestamp =
|
||||
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
|
||||
* .setNanos(now.getNano()).build();
|
||||
*
|
||||
*
|
||||
* Example 6: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard
|
||||
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using
|
||||
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date {
|
||||
return new Date(PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue {
|
||||
let ms = PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + typeofJsonValue(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Timestamp>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export const Timestamp = new Timestamp$Type();
|
||||
|
|
@ -1,753 +0,0 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
// These wrappers have no meaningful use within repeated fields as they lack
|
||||
// the ability to detect presence on individual elements.
|
||||
// These wrappers have no meaningful use within a map or a oneof since
|
||||
// individual entries of a map or fields of a oneof can already detect presence.
|
||||
//
|
||||
import { ScalarType } from "@protobuf-ts/runtime";
|
||||
import { LongType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DoubleValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export const DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FloatValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export const FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.INT64, LongType.STRING, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Int64Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export const Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, ScalarType.UINT64, LongType.STRING, "value") as any;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<UInt64Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export const UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Int32Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export const Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value") as number;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<UInt32Value>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export const UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value") as boolean;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<BoolValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export const BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value") as string;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<StringValue>): StringValue {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<StringValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export const StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value") as Uint8Array;
|
||||
return target;
|
||||
}
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<BytesValue>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export const BytesValue = new BytesValue$Type();
|
||||
|
|
@ -1,547 +0,0 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheMetadata } from "../../entities/v1/cachemetadata";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export interface CreateCacheEntryRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 3;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export interface CreateCacheEntryResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to upload the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
/**
|
||||
* When !ok, this field may contain a human-readable error message used to create an annotation
|
||||
*
|
||||
* @generated from protobuf field: string message = 3;
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Size of the cache archive in Bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size_bytes = 3;
|
||||
*/
|
||||
sizeBytes: string;
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export interface FinalizeCacheEntryUploadResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* Cache entry database ID
|
||||
*
|
||||
* @generated from protobuf field: int64 entry_id = 2;
|
||||
*/
|
||||
entryId: string;
|
||||
/**
|
||||
* When !ok, this field may contain a human-readable error message used to create an annotation
|
||||
*
|
||||
* @generated from protobuf field: string message = 3;
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLRequest {
|
||||
/**
|
||||
* Scope and other metadata for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
|
||||
*/
|
||||
metadata?: CacheMetadata;
|
||||
/**
|
||||
* An explicit key for a cache entry
|
||||
*
|
||||
* @generated from protobuf field: string key = 2;
|
||||
*/
|
||||
key: string;
|
||||
/**
|
||||
* Restore keys used for prefix searching
|
||||
*
|
||||
* @generated from protobuf field: repeated string restore_keys = 3;
|
||||
*/
|
||||
restoreKeys: string[];
|
||||
/**
|
||||
* Hash of the compression tool, runner OS and paths cached
|
||||
*
|
||||
* @generated from protobuf field: string version = 4;
|
||||
*/
|
||||
version: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export interface GetCacheEntryDownloadURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* SAS URL to download the cache archive
|
||||
*
|
||||
* @generated from protobuf field: string signed_download_url = 2;
|
||||
*/
|
||||
signedDownloadUrl: string;
|
||||
/**
|
||||
* Key or restore key that matches the lookup
|
||||
*
|
||||
* @generated from protobuf field: string matched_key = 3;
|
||||
*/
|
||||
matchedKey: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest {
|
||||
const message = { key: "", version: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CreateCacheEntryRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* string version */ 3:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.key);
|
||||
/* string version = 3; */
|
||||
if (message.version !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
|
||||
*/
|
||||
export const CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse {
|
||||
const message = { ok: false, signedUploadUrl: "", message: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CreateCacheEntryResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
|
||||
*/
|
||||
export const CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest {
|
||||
const message = { key: "", sizeBytes: "0", version: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeCacheEntryUploadRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* int64 size_bytes */ 3:
|
||||
message.sizeBytes = reader.int64().toString();
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.key);
|
||||
/* int64 size_bytes = 3; */
|
||||
if (message.sizeBytes !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.sizeBytes);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
|
||||
*/
|
||||
export const FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse {
|
||||
const message = { ok: false, entryId: "0", message: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeCacheEntryUploadResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 entry_id */ 2:
|
||||
message.entryId = reader.int64().toString();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 entry_id = 2; */
|
||||
if (message.entryId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.entryId);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
|
||||
*/
|
||||
export const FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
|
||||
{ no: 1, name: "metadata", kind: "message", T: () => CacheMetadata },
|
||||
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest {
|
||||
const message = { key: "", restoreKeys: [], version: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetCacheEntryDownloadURLRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
|
||||
message.metadata = CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
|
||||
break;
|
||||
case /* string key */ 2:
|
||||
message.key = reader.string();
|
||||
break;
|
||||
case /* repeated string restore_keys */ 3:
|
||||
message.restoreKeys.push(reader.string());
|
||||
break;
|
||||
case /* string version */ 4:
|
||||
message.version = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
|
||||
if (message.metadata)
|
||||
CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join();
|
||||
/* string key = 2; */
|
||||
if (message.key !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.key);
|
||||
/* repeated string restore_keys = 3; */
|
||||
for (let i = 0; i < message.restoreKeys.length; i++)
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.restoreKeys[i]);
|
||||
/* string version = 4; */
|
||||
if (message.version !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
|
||||
*/
|
||||
export const GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse {
|
||||
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<GetCacheEntryDownloadURLResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_download_url */ 2:
|
||||
message.signedDownloadUrl = reader.string();
|
||||
break;
|
||||
case /* string matched_key */ 3:
|
||||
message.matchedKey = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_download_url = 2; */
|
||||
if (message.signedDownloadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedDownloadUrl);
|
||||
/* string matched_key = 3; */
|
||||
if (message.matchedKey !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.matchedKey);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
|
||||
*/
|
||||
export const GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
|
||||
*/
|
||||
export const CacheService = new ServiceType("github.actions.results.api.v1.CacheService", [
|
||||
{ name: "CreateCacheEntry", options: {}, I: CreateCacheEntryRequest, O: CreateCacheEntryResponse },
|
||||
{ name: "FinalizeCacheEntryUpload", options: {}, I: FinalizeCacheEntryUploadRequest, O: FinalizeCacheEntryUploadResponse },
|
||||
{ name: "GetCacheEntryDownloadURL", options: {}, I: GetCacheEntryDownloadURLRequest, O: GetCacheEntryDownloadURLResponse }
|
||||
]);
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
import {
|
||||
CreateCacheEntryRequest,
|
||||
CreateCacheEntryResponse,
|
||||
FinalizeCacheEntryUploadRequest,
|
||||
FinalizeCacheEntryUploadResponse,
|
||||
GetCacheEntryDownloadURLRequest,
|
||||
GetCacheEntryDownloadURLResponse,
|
||||
} from "./cache";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface CacheServiceClient {
|
||||
CreateCacheEntry(
|
||||
request: CreateCacheEntryRequest
|
||||
): Promise<CreateCacheEntryResponse>;
|
||||
FinalizeCacheEntryUpload(
|
||||
request: FinalizeCacheEntryUploadRequest
|
||||
): Promise<FinalizeCacheEntryUploadResponse>;
|
||||
GetCacheEntryDownloadURL(
|
||||
request: GetCacheEntryDownloadURLRequest
|
||||
): Promise<GetCacheEntryDownloadURLResponse>;
|
||||
}
|
||||
|
||||
export class CacheServiceClientJSON implements CacheServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
}
|
||||
CreateCacheEntry(
|
||||
request: CreateCacheEntryRequest
|
||||
): Promise<CreateCacheEntryResponse> {
|
||||
const data = CreateCacheEntryRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"CreateCacheEntry",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateCacheEntryResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeCacheEntryUpload(
|
||||
request: FinalizeCacheEntryUploadRequest
|
||||
): Promise<FinalizeCacheEntryUploadResponse> {
|
||||
const data = FinalizeCacheEntryUploadRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"FinalizeCacheEntryUpload",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeCacheEntryUploadResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
GetCacheEntryDownloadURL(
|
||||
request: GetCacheEntryDownloadURLRequest
|
||||
): Promise<GetCacheEntryDownloadURLResponse> {
|
||||
const data = GetCacheEntryDownloadURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"GetCacheEntryDownloadURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetCacheEntryDownloadURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class CacheServiceClientProtobuf implements CacheServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateCacheEntry.bind(this);
|
||||
this.FinalizeCacheEntryUpload.bind(this);
|
||||
this.GetCacheEntryDownloadURL.bind(this);
|
||||
}
|
||||
CreateCacheEntry(
|
||||
request: CreateCacheEntryRequest
|
||||
): Promise<CreateCacheEntryResponse> {
|
||||
const data = CreateCacheEntryRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"CreateCacheEntry",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateCacheEntryResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeCacheEntryUpload(
|
||||
request: FinalizeCacheEntryUploadRequest
|
||||
): Promise<FinalizeCacheEntryUploadResponse> {
|
||||
const data = FinalizeCacheEntryUploadRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"FinalizeCacheEntryUpload",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeCacheEntryUploadResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetCacheEntryDownloadURL(
|
||||
request: GetCacheEntryDownloadURLRequest
|
||||
): Promise<GetCacheEntryDownloadURLResponse> {
|
||||
const data = GetCacheEntryDownloadURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.CacheService",
|
||||
"GetCacheEntryDownloadURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetCacheEntryDownloadURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/entities/v1/cachemetadata.proto" (package "github.actions.results.entities.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { CacheScope } from "./cachescope";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export interface CacheMetadata {
|
||||
/**
|
||||
* Backend repository id
|
||||
*
|
||||
* @generated from protobuf field: int64 repository_id = 1;
|
||||
*/
|
||||
repositoryId: string;
|
||||
/**
|
||||
* Scopes for the cache entry
|
||||
*
|
||||
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
|
||||
*/
|
||||
scope: CacheScope[];
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheMetadata$Type extends MessageType<CacheMetadata> {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheMetadata", [
|
||||
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheScope }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CacheMetadata>): CacheMetadata {
|
||||
const message = { repositoryId: "0", scope: [] };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CacheMetadata>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 repository_id */ 1:
|
||||
message.repositoryId = reader.int64().toString();
|
||||
break;
|
||||
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
|
||||
message.scope.push(CacheScope.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* int64 repository_id = 1; */
|
||||
if (message.repositoryId !== "0")
|
||||
writer.tag(1, WireType.Varint).int64(message.repositoryId);
|
||||
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
|
||||
for (let i = 0; i < message.scope.length; i++)
|
||||
CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
|
||||
*/
|
||||
export const CacheMetadata = new CacheMetadata$Type();
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/entities/v1/cachescope.proto" (package "github.actions.results.entities.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
|
||||
*/
|
||||
export interface CacheScope {
|
||||
/**
|
||||
* Determines the scope of the cache entry
|
||||
*
|
||||
* @generated from protobuf field: string scope = 1;
|
||||
*/
|
||||
scope: string;
|
||||
/**
|
||||
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
|
||||
*
|
||||
* @generated from protobuf field: int64 permission = 2;
|
||||
*/
|
||||
permission: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CacheScope$Type extends MessageType<CacheScope> {
|
||||
constructor() {
|
||||
super("github.actions.results.entities.v1.CacheScope", [
|
||||
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CacheScope>): CacheScope {
|
||||
const message = { scope: "", permission: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CacheScope>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string scope */ 1:
|
||||
message.scope = reader.string();
|
||||
break;
|
||||
case /* int64 permission */ 2:
|
||||
message.permission = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string scope = 1; */
|
||||
if (message.scope !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.scope);
|
||||
/* int64 permission = 2; */
|
||||
if (message.permission !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.permission);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
|
||||
*/
|
||||
export const CacheScope = new CacheScope$Type();
|
||||
|
|
@ -5,10 +5,12 @@ import {
|
|||
RequestOptions,
|
||||
TypedResponse
|
||||
} from '@actions/http-client/lib/interfaces'
|
||||
import * as crypto from 'crypto'
|
||||
import * as fs from 'fs'
|
||||
import {URL} from 'url'
|
||||
|
||||
import * as utils from './cacheUtils'
|
||||
import {uploadCacheArchiveSDK} from './uploadUtils'
|
||||
import {CompressionMethod} from './constants'
|
||||
import {
|
||||
ArtifactCacheEntry,
|
||||
InternalCacheOptions,
|
||||
|
|
@ -34,11 +36,11 @@ import {
|
|||
retryHttpClientResponse,
|
||||
retryTypedResponse
|
||||
} from './requestUtils'
|
||||
import {getCacheServiceURL} from './config'
|
||||
import {getUserAgentString} from './shared/user-agent'
|
||||
|
||||
const versionSalt = '1.0'
|
||||
|
||||
function getCacheApiUrl(resource: string): string {
|
||||
const baseUrl: string = getCacheServiceURL()
|
||||
const baseUrl: string = process.env['ACTIONS_CACHE_URL'] || ''
|
||||
if (!baseUrl) {
|
||||
throw new Error('Cache Service Url not found, unable to restore cache.')
|
||||
}
|
||||
|
|
@ -67,24 +69,48 @@ function createHttpClient(): HttpClient {
|
|||
const bearerCredentialHandler = new BearerCredentialHandler(token)
|
||||
|
||||
return new HttpClient(
|
||||
getUserAgentString(),
|
||||
'actions/cache',
|
||||
[bearerCredentialHandler],
|
||||
getRequestOptions()
|
||||
)
|
||||
}
|
||||
|
||||
export function getCacheVersion(
|
||||
paths: string[],
|
||||
compressionMethod?: CompressionMethod,
|
||||
enableCrossOsArchive = false
|
||||
): string {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice()
|
||||
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod)
|
||||
}
|
||||
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only')
|
||||
}
|
||||
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt)
|
||||
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys: string[],
|
||||
paths: string[],
|
||||
options?: InternalCacheOptions
|
||||
): Promise<ArtifactCacheEntry | null> {
|
||||
const httpClient = createHttpClient()
|
||||
const version = utils.getCacheVersion(
|
||||
const version = getCacheVersion(
|
||||
paths,
|
||||
options?.compressionMethod,
|
||||
options?.enableCrossOsArchive
|
||||
)
|
||||
|
||||
const resource = `cache?keys=${encodeURIComponent(
|
||||
keys.join(',')
|
||||
)}&version=${version}`
|
||||
|
|
@ -181,7 +207,7 @@ export async function reserveCache(
|
|||
options?: InternalCacheOptions
|
||||
): Promise<ITypedResponseWithError<ReserveCacheResponse>> {
|
||||
const httpClient = createHttpClient()
|
||||
const version = utils.getCacheVersion(
|
||||
const version = getCacheVersion(
|
||||
paths,
|
||||
options?.compressionMethod,
|
||||
options?.enableCrossOsArchive
|
||||
|
|
@ -327,45 +353,26 @@ async function commitCache(
|
|||
export async function saveCache(
|
||||
cacheId: number,
|
||||
archivePath: string,
|
||||
signedUploadURL?: string,
|
||||
options?: UploadOptions
|
||||
): Promise<void> {
|
||||
const uploadOptions = getUploadOptions(options)
|
||||
const httpClient = createHttpClient()
|
||||
|
||||
if (uploadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to upload caches directly to Azure
|
||||
if (!signedUploadURL) {
|
||||
throw new Error(
|
||||
'Azure Storage SDK can only be used when a signed URL is provided.'
|
||||
)
|
||||
}
|
||||
await uploadCacheArchiveSDK(signedUploadURL, archivePath, options)
|
||||
} else {
|
||||
const httpClient = createHttpClient()
|
||||
core.debug('Upload cache')
|
||||
await uploadFile(httpClient, cacheId, archivePath, options)
|
||||
|
||||
core.debug('Upload cache')
|
||||
await uploadFile(httpClient, cacheId, archivePath, options)
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`
|
||||
)
|
||||
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache')
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath)
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
cacheSize / (1024 * 1024)
|
||||
)} MB (${cacheSize} B)`
|
||||
const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize)
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
)
|
||||
|
||||
const commitCacheResponse = await commitCache(
|
||||
httpClient,
|
||||
cacheId,
|
||||
cacheSize
|
||||
)
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
)
|
||||
}
|
||||
|
||||
core.info('Cache saved successfully')
|
||||
}
|
||||
|
||||
core.info('Cache saved successfully')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,8 +13,6 @@ import {
|
|||
GnuTarPathOnWindows
|
||||
} from './constants'
|
||||
|
||||
const versionSalt = '1.0'
|
||||
|
||||
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
|
||||
export async function createTempDirectory(): Promise<string> {
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
|
@ -133,35 +131,15 @@ export function assertDefined<T>(name: string, value?: T): T {
|
|||
return value
|
||||
}
|
||||
|
||||
export function getCacheVersion(
|
||||
paths: string[],
|
||||
compressionMethod?: CompressionMethod,
|
||||
enableCrossOsArchive = false
|
||||
): string {
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice()
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod)
|
||||
}
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost =
|
||||
hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST')
|
||||
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only')
|
||||
}
|
||||
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt)
|
||||
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex')
|
||||
}
|
||||
|
||||
export function getRuntimeToken(): string {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN']
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable')
|
||||
}
|
||||
return token
|
||||
return !isGitHubHost && !isGheHost
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,39 +0,0 @@
|
|||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost = hostname.endsWith('.GHE.COM')
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost
|
||||
}
|
||||
|
||||
export function getCacheServiceVersion(): string {
|
||||
// Cache service v2 is not supported on GHES. We will default to
|
||||
// cache service v1 even if the feature flag was enabled by user.
|
||||
if (isGhes()) return 'v1'
|
||||
|
||||
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'
|
||||
}
|
||||
|
||||
export function getCacheServiceURL(): string {
|
||||
const version = getCacheServiceVersion()
|
||||
|
||||
// Based on the version of the cache service, we will determine which
|
||||
// URL to use.
|
||||
switch (version) {
|
||||
case 'v1':
|
||||
return (
|
||||
process.env['ACTIONS_CACHE_URL'] ||
|
||||
process.env['ACTIONS_RESULTS_URL'] ||
|
||||
''
|
||||
)
|
||||
case 'v2':
|
||||
return process.env['ACTIONS_RESULTS_URL'] || ''
|
||||
default:
|
||||
throw new Error(`Unsupported cache service version: ${version}`)
|
||||
}
|
||||
}
|
||||
|
|
@ -36,5 +36,3 @@ export const SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\S
|
|||
export const TarFilename = 'cache.tar'
|
||||
|
||||
export const ManifestFilename = 'manifest.txt'
|
||||
|
||||
export const CacheFileSizeLimit = 10 * Math.pow(1024, 3) // 10GiB per repository
|
||||
|
|
|
|||
|
|
@ -1,206 +0,0 @@
|
|||
import {info, debug} from '@actions/core'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
import {getCacheServiceURL} from '../config'
|
||||
import {getRuntimeToken} from '../cacheUtils'
|
||||
import {BearerCredentialHandler} from '@actions/http-client/lib/auth'
|
||||
import {HttpClient, HttpClientResponse, HttpCodes} from '@actions/http-client'
|
||||
import {CacheServiceClientJSON} from '../../generated/results/api/v1/cache.twirp-client'
|
||||
import {maskSecretUrls} from './util'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
|
||||
*
|
||||
* It adds retry logic to the request method, which is not present in the generated client.
|
||||
*
|
||||
* This class is used to interact with cache service v2.
|
||||
*/
|
||||
class CacheServiceClient implements Rpc {
|
||||
private httpClient: HttpClient
|
||||
private baseUrl: string
|
||||
private maxAttempts = 5
|
||||
private baseRetryIntervalMilliseconds = 3000
|
||||
private retryMultiplier = 1.5
|
||||
|
||||
constructor(
|
||||
userAgent: string,
|
||||
maxAttempts?: number,
|
||||
baseRetryIntervalMilliseconds?: number,
|
||||
retryMultiplier?: number
|
||||
) {
|
||||
const token = getRuntimeToken()
|
||||
this.baseUrl = getCacheServiceURL()
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier
|
||||
}
|
||||
|
||||
this.httpClient = new HttpClient(userAgent, [
|
||||
new BearerCredentialHandler(token)
|
||||
])
|
||||
}
|
||||
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
async request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: 'application/json' | 'application/protobuf',
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array> {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href
|
||||
debug(`[Request] ${method} ${url}`)
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
}
|
||||
try {
|
||||
const {body} = await this.retryableRequest(async () =>
|
||||
this.httpClient.post(url, JSON.stringify(data), headers)
|
||||
)
|
||||
|
||||
return body
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async retryableRequest(
|
||||
operation: () => Promise<HttpClientResponse>
|
||||
): Promise<{response: HttpClientResponse; body: object}> {
|
||||
let attempt = 0
|
||||
let errorMessage = ''
|
||||
let rawBody = ''
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false
|
||||
|
||||
try {
|
||||
const response = await operation()
|
||||
const statusCode = response.message.statusCode
|
||||
rawBody = await response.readBody()
|
||||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
maskSecretUrls(body)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return {response, body}
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode)
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`
|
||||
if (body.msg) {
|
||||
if (UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new UsageError()
|
||||
}
|
||||
|
||||
errorMessage = `${errorMessage}: ${body.msg}`
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
throw error
|
||||
}
|
||||
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
isRetryable = true
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`)
|
||||
}
|
||||
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(
|
||||
`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`
|
||||
)
|
||||
}
|
||||
|
||||
const retryTimeMilliseconds =
|
||||
this.getExponentialRetryTimeMilliseconds(attempt)
|
||||
info(
|
||||
`Attempt ${attempt + 1} of ${
|
||||
this.maxAttempts
|
||||
} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`
|
||||
)
|
||||
await this.sleep(retryTimeMilliseconds)
|
||||
attempt++
|
||||
}
|
||||
|
||||
throw new Error(`Request failed`)
|
||||
}
|
||||
|
||||
isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
return statusCode >= 200 && statusCode < 300
|
||||
}
|
||||
|
||||
isRetryableHttpStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) return false
|
||||
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.GatewayTimeout,
|
||||
HttpCodes.InternalServerError,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.TooManyRequests
|
||||
]
|
||||
|
||||
return retryableStatusCodes.includes(statusCode)
|
||||
}
|
||||
|
||||
async sleep(milliseconds: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds))
|
||||
}
|
||||
|
||||
getExponentialRetryTimeMilliseconds(attempt: number): number {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer')
|
||||
}
|
||||
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds
|
||||
}
|
||||
|
||||
const minTime =
|
||||
this.baseRetryIntervalMilliseconds * this.retryMultiplier ** attempt
|
||||
const maxTime = minTime * this.retryMultiplier
|
||||
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime)
|
||||
}
|
||||
}
|
||||
|
||||
export function internalCacheTwirpClient(options?: {
|
||||
maxAttempts?: number
|
||||
retryIntervalMs?: number
|
||||
retryMultiplier?: number
|
||||
}): CacheServiceClientJSON {
|
||||
const client = new CacheServiceClient(
|
||||
getUserAgentString(),
|
||||
options?.maxAttempts,
|
||||
options?.retryIntervalMs,
|
||||
options?.retryMultiplier
|
||||
)
|
||||
return new CacheServiceClientJSON(client)
|
||||
}
|
||||
|
|
@ -1,72 +0,0 @@
|
|||
export class FilesNotFoundError extends Error {
|
||||
files: string[]
|
||||
|
||||
constructor(files: string[] = []) {
|
||||
let message = 'No files were found to upload'
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`
|
||||
}
|
||||
|
||||
super(message)
|
||||
this.files = files
|
||||
this.name = 'FilesNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class InvalidResponseError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message)
|
||||
this.name = 'InvalidResponseError'
|
||||
}
|
||||
}
|
||||
|
||||
export class CacheNotFoundError extends Error {
|
||||
constructor(message = 'Cache not found') {
|
||||
super(message)
|
||||
this.name = 'CacheNotFoundError'
|
||||
}
|
||||
}
|
||||
|
||||
export class GHESNotSupportedError extends Error {
|
||||
constructor(
|
||||
message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.'
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'GHESNotSupportedError'
|
||||
}
|
||||
}
|
||||
|
||||
export class NetworkError extends Error {
|
||||
code: string
|
||||
|
||||
constructor(code: string) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`
|
||||
super(message)
|
||||
this.code = code
|
||||
this.name = 'NetworkError'
|
||||
}
|
||||
|
||||
static isNetworkErrorCode = (code?: string): boolean => {
|
||||
if (!code) return false
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code)
|
||||
}
|
||||
}
|
||||
|
||||
export class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`
|
||||
super(message)
|
||||
this.name = 'UsageError'
|
||||
}
|
||||
|
||||
static isUsageErrorMessage = (msg?: string): boolean => {
|
||||
if (!msg) return false
|
||||
return msg.includes('insufficient usage')
|
||||
}
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json')
|
||||
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export function getUserAgentString(): string {
|
||||
return `@actions/cache-${packageJson.version}`
|
||||
}
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
import {debug, setSecret} from '@actions/core'
|
||||
|
||||
/**
|
||||
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||
*
|
||||
* @param url - The URL containing the signature parameter to mask
|
||||
* @remarks
|
||||
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||
*
|
||||
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Mask a signature in an Azure SAS token URL
|
||||
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||
* ```
|
||||
*/
|
||||
export function maskSigUrl(url: string): void {
|
||||
if (!url) return
|
||||
try {
|
||||
const parsedUrl = new URL(url)
|
||||
const signature = parsedUrl.searchParams.get('sig')
|
||||
if (signature) {
|
||||
setSecret(signature)
|
||||
setSecret(encodeURIComponent(signature))
|
||||
}
|
||||
} catch (error) {
|
||||
debug(
|
||||
`Failed to parse URL: ${url} ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks sensitive information in URLs containing signature parameters.
|
||||
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||
* and 'signed_download_url' properties of the provided object.
|
||||
*
|
||||
* @param body - The object should contain a signature
|
||||
* @remarks
|
||||
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||
* on each one to redact sensitive signature information. The function doesn't
|
||||
* modify the original object; it only marks the signatures as secrets for
|
||||
* logging purposes.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const responseBody = {
|
||||
* signed_upload_url: 'https://blob.core.windows.net/?sig=abc123',
|
||||
* signed_download_url: 'https://blob.core/windows.net/?sig=def456'
|
||||
* };
|
||||
* maskSecretUrls(responseBody);
|
||||
* ```
|
||||
*/
|
||||
export function maskSecretUrls(body: Record<string, unknown> | null): void {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
debug('body is not an object or is null')
|
||||
return
|
||||
}
|
||||
if (
|
||||
'signed_upload_url' in body &&
|
||||
typeof body.signed_upload_url === 'string'
|
||||
) {
|
||||
maskSigUrl(body.signed_upload_url)
|
||||
}
|
||||
if (
|
||||
'signed_download_url' in body &&
|
||||
typeof body.signed_download_url === 'string'
|
||||
) {
|
||||
maskSigUrl(body.signed_download_url)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
import * as core from '@actions/core'
|
||||
import {
|
||||
BlobClient,
|
||||
BlobUploadCommonResponse,
|
||||
BlockBlobClient,
|
||||
BlockBlobParallelUploadOptions
|
||||
} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/ms-rest-js'
|
||||
import {InvalidResponseError} from './shared/errors'
|
||||
import {UploadOptions} from '../options'
|
||||
|
||||
/**
|
||||
* Class for tracking the upload state and displaying stats.
|
||||
*/
|
||||
export class UploadProgress {
|
||||
contentLength: number
|
||||
sentBytes: number
|
||||
startTime: number
|
||||
displayedComplete: boolean
|
||||
timeoutHandle?: ReturnType<typeof setTimeout>
|
||||
|
||||
constructor(contentLength: number) {
|
||||
this.contentLength = contentLength
|
||||
this.sentBytes = 0
|
||||
this.displayedComplete = false
|
||||
this.startTime = Date.now()
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the number of bytes sent
|
||||
*
|
||||
* @param sentBytes the number of bytes sent
|
||||
*/
|
||||
setSentBytes(sentBytes: number): void {
|
||||
this.sentBytes = sentBytes
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the total number of bytes transferred.
|
||||
*/
|
||||
getTransferredBytes(): number {
|
||||
return this.sentBytes
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the upload is complete.
|
||||
*/
|
||||
isDone(): boolean {
|
||||
return this.getTransferredBytes() === this.contentLength
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the current upload stats. Once the upload completes, this will print one
|
||||
* last line and then stop.
|
||||
*/
|
||||
display(): void {
|
||||
if (this.displayedComplete) {
|
||||
return
|
||||
}
|
||||
|
||||
const transferredBytes = this.sentBytes
|
||||
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(
|
||||
1
|
||||
)
|
||||
const elapsedTime = Date.now() - this.startTime
|
||||
const uploadSpeed = (
|
||||
transferredBytes /
|
||||
(1024 * 1024) /
|
||||
(elapsedTime / 1000)
|
||||
).toFixed(1)
|
||||
|
||||
core.info(
|
||||
`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`
|
||||
)
|
||||
|
||||
if (this.isDone()) {
|
||||
this.displayedComplete = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a function used to handle TransferProgressEvents.
|
||||
*/
|
||||
onProgress(): (progress: TransferProgressEvent) => void {
|
||||
return (progress: TransferProgressEvent) => {
|
||||
this.setSentBytes(progress.loadedBytes)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the timer that displays the stats.
|
||||
*
|
||||
* @param delayInMs the delay between each write
|
||||
*/
|
||||
startDisplayTimer(delayInMs = 1000): void {
|
||||
const displayCallback = (): void => {
|
||||
this.display()
|
||||
|
||||
if (!this.isDone()) {
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
|
||||
}
|
||||
}
|
||||
|
||||
this.timeoutHandle = setTimeout(displayCallback, delayInMs)
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the timer that displays the stats. As this typically indicates the upload
|
||||
* is complete, this will display one last line, unless the last line has already
|
||||
* been written.
|
||||
*/
|
||||
stopDisplayTimer(): void {
|
||||
if (this.timeoutHandle) {
|
||||
clearTimeout(this.timeoutHandle)
|
||||
this.timeoutHandle = undefined
|
||||
}
|
||||
|
||||
this.display()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
|
||||
* This function will display progress information to the console. Concurrency of the
|
||||
* upload is determined by the calling functions.
|
||||
*
|
||||
* @param signedUploadURL
|
||||
* @param archivePath
|
||||
* @param options
|
||||
* @returns
|
||||
*/
|
||||
export async function uploadCacheArchiveSDK(
|
||||
signedUploadURL: string,
|
||||
archivePath: string,
|
||||
options?: UploadOptions
|
||||
): Promise<BlobUploadCommonResponse> {
|
||||
const blobClient: BlobClient = new BlobClient(signedUploadURL)
|
||||
const blockBlobClient: BlockBlobClient = blobClient.getBlockBlobClient()
|
||||
const uploadProgress = new UploadProgress(options?.archiveSizeBytes ?? 0)
|
||||
|
||||
// Specify data transfer options
|
||||
const uploadOptions: BlockBlobParallelUploadOptions = {
|
||||
blockSize: options?.uploadChunkSize,
|
||||
concurrency: options?.uploadConcurrency, // maximum number of parallel transfer workers
|
||||
maxSingleShotSize: 128 * 1024 * 1024, // 128 MiB initial transfer size
|
||||
onProgress: uploadProgress.onProgress()
|
||||
}
|
||||
|
||||
try {
|
||||
uploadProgress.startDisplayTimer()
|
||||
|
||||
core.debug(
|
||||
`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`
|
||||
)
|
||||
|
||||
const response = await blockBlobClient.uploadFile(
|
||||
archivePath,
|
||||
uploadOptions
|
||||
)
|
||||
|
||||
// TODO: better management of non-retryable errors
|
||||
if (response._response.status >= 400) {
|
||||
throw new InvalidResponseError(
|
||||
`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`
|
||||
)
|
||||
}
|
||||
|
||||
return response
|
||||
} catch (error) {
|
||||
core.warning(
|
||||
`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`
|
||||
)
|
||||
throw error
|
||||
} finally {
|
||||
uploadProgress.stopDisplayTimer()
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue