Compare commits
544 Commits
eggyhead/h
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
6b63a2bfc3 | |
|
|
290017ff81 | |
|
|
2a876cd69d | |
|
|
f79b906406 | |
|
|
dcae869a03 | |
|
|
23769d04c7 | |
|
|
d3ab50471b | |
|
|
1388fd1cac | |
|
|
5b446d2657 | |
|
|
006d6978c1 | |
|
|
02afeb1577 | |
|
|
d47594b536 | |
|
|
2823824b94 | |
|
|
cbc06d6766 | |
|
|
9bb6708527 | |
|
|
be1151df02 | |
|
|
130842f4e8 | |
|
|
ab82301c62 | |
|
|
fea4f6b5c5 | |
|
|
d3ade9ecfc | |
|
|
fb592eec03 | |
|
|
70e79399a2 | |
|
|
acb230b99a | |
|
|
5e0fa1aaaa | |
|
|
ac2468e605 | |
|
|
3c8fcfce19 | |
|
|
45467b9199 | |
|
|
700a55077d | |
|
|
6fa8f07827 | |
|
|
d16e86a709 | |
|
|
ae3ac0db0c | |
|
|
b319d6afff | |
|
|
b8ac8fc14a | |
|
|
028d621193 | |
|
|
b0d901f9c2 | |
|
|
394e804dc8 | |
|
|
d402248c45 | |
|
|
66e8437b3e | |
|
|
9c7501a5f3 | |
|
|
3b4b5725f0 | |
|
|
9a364e607b | |
|
|
625c3f4856 | |
|
|
1c3a637017 | |
|
|
ec0ca1b19b | |
|
|
57cd003e61 | |
|
|
b5befc6c6d | |
|
|
88a490d2ce | |
|
|
a8d1fb0687 | |
|
|
347c887e54 | |
|
|
d5af54ee78 | |
|
|
44b9401378 | |
|
|
fb5ae2a0e0 | |
|
|
8024983ab0 | |
|
|
d44f9b8f13 | |
|
|
9b4ee219ef | |
|
|
ee5d8970ad | |
|
|
2874e3a741 | |
|
|
ad4afeeff1 | |
|
|
f9bdf6a054 | |
|
|
59c7ebde79 | |
|
|
0c907a43d3 | |
|
|
d1c1fc4108 | |
|
|
36f30e6d37 | |
|
|
308e05bc50 | |
|
|
33a9b6c09c | |
|
|
ddc5fa4ae8 | |
|
|
9b08f07cd3 | |
|
|
d26e9423f4 | |
|
|
714f93aedc | |
|
|
844423665b | |
|
|
f2ba502b92 | |
|
|
1db3130eb3 | |
|
|
ca8a35d78f | |
|
|
f7f057193f | |
|
|
8e146e124e | |
|
|
1ea77a84d7 | |
|
|
7da95b182e | |
|
|
7c689a5156 | |
|
|
8c6c662cda | |
|
|
3898ed70c4 | |
|
|
9a41b33065 | |
|
|
7aea3e735f | |
|
|
b1eb18b224 | |
|
|
48e42b1fdd | |
|
|
b738f10ef3 | |
|
|
8f32f385e0 | |
|
|
011f07d1dc | |
|
|
aa7077acfb | |
|
|
86207b5042 | |
|
|
523ce8ccda | |
|
|
f58042f9cc | |
|
|
091616a0b8 | |
|
|
8da1e670b6 | |
|
|
06f7fd9df1 | |
|
|
0fe20e9d56 | |
|
|
f82db4c00b | |
|
|
b8cca0c71f | |
|
|
6f0cb0c45e | |
|
|
944ede4d09 | |
|
|
227b1ce741 | |
|
|
447ee85f36 | |
|
|
a6be3de743 | |
|
|
26b94036cb | |
|
|
f3e6fb165e | |
|
|
3a607d0f00 | |
|
|
c9316bb4a7 | |
|
|
ec43e5810d | |
|
|
01715621b0 | |
|
|
6c64260c6d | |
|
|
bf3fc9226a | |
|
|
c6723084aa | |
|
|
bcb928642f | |
|
|
8c3fc9ed99 | |
|
|
1ef3214cee | |
|
|
ece2273b24 | |
|
|
717b895584 | |
|
|
8ff772deb1 | |
|
|
8a3652e16d | |
|
|
eb6226501b | |
|
|
d65ee66d9b | |
|
|
6d3feab2bf | |
|
|
79e1d8bb74 | |
|
|
a0907ed2e2 | |
|
|
bd54a2413a | |
|
|
89397db14b | |
|
|
d48d6b62a4 | |
|
|
bab3dcf7f3 | |
|
|
c51178a15e | |
|
|
bbc6082700 | |
|
|
cf3aaeb491 | |
|
|
cf4886cccb | |
|
|
0c5da92b52 | |
|
|
513216f1dd | |
|
|
3c90578c30 | |
|
|
be5a2ce677 | |
|
|
683703c114 | |
|
|
c28e7d4d5f | |
|
|
12e323ae30 | |
|
|
dbb1ea35ff | |
|
|
f31c2921c1 | |
|
|
41b3ce3141 | |
|
|
8d8a914a94 | |
|
|
36db4d62ad | |
|
|
a25b686a45 | |
|
|
957610a37a | |
|
|
6ed621e7d1 | |
|
|
8007c1c535 | |
|
|
6444290c57 | |
|
|
f32d6bc043 | |
|
|
2e4ab87130 | |
|
|
ef199a9ab0 | |
|
|
917a43eb6e | |
|
|
07cac0a6b3 | |
|
|
2046ee6d6b | |
|
|
2b476323c4 | |
|
|
aebe304a19 | |
|
|
e8f276a715 | |
|
|
d156bcaa78 | |
|
|
5ae4c5be28 | |
|
|
d50f1ac1b9 | |
|
|
87cb7035bb | |
|
|
1b1e81526b | |
|
|
525ebf0c50 | |
|
|
07341e11d8 | |
|
|
930c890727 | |
|
|
a410c4a9cf | |
|
|
10277d48ca | |
|
|
857c61a9df | |
|
|
c40bccc9c3 | |
|
|
ff4d4afef8 | |
|
|
4d4bbebd6a | |
|
|
261fcae498 | |
|
|
4059d2af66 | |
|
|
2559a2ac8a | |
|
|
514314311c | |
|
|
957d42e6c5 | |
|
|
39419dd8c3 | |
|
|
d13e6311f1 | |
|
|
6876e2a664 | |
|
|
fc482662af | |
|
|
abd9054c61 | |
|
|
253e837c4d | |
|
|
3ac34ffcb7 | |
|
|
56c5a39afb | |
|
|
7ae578ddd1 | |
|
|
b2d2270685 | |
|
|
0d1d5c7687 | |
|
|
769bb0fea1 | |
|
|
d7ddca4309 | |
|
|
8780507298 | |
|
|
790c56665a | |
|
|
9d8017eadb | |
|
|
20fee3ea63 | |
|
|
7501423b6f | |
|
|
d0cc3418ea | |
|
|
5007821c77 | |
|
|
47c4fa85df | |
|
|
1cd2f8a538 | |
|
|
b85d4e6b38 | |
|
|
dc22dc7cad | |
|
|
8c05dc87d8 | |
|
|
884aa17886 | |
|
|
944e6b78db | |
|
|
d70fb49aaa | |
|
|
3726c11433 | |
|
|
71b40f7024 | |
|
|
83e5e2517b | |
|
|
d5c8a0fa27 | |
|
|
780e24be34 | |
|
|
ec9716b3cc | |
|
|
0bc338adab | |
|
|
5378ea8eca | |
|
|
b95b593ca5 | |
|
|
4fedf471b1 | |
|
|
1b9063ee0e | |
|
|
d096588f08 | |
|
|
662b9d91f5 | |
|
|
a62f530b6f | |
|
|
2995cdf0a1 | |
|
|
f10f9c8217 | |
|
|
c26e6f3aba | |
|
|
2b08dc18f2 | |
|
|
412108cd55 | |
|
|
8fcec1fb58 | |
|
|
95e747361e | |
|
|
aad39a371f | |
|
|
7fe619c58c | |
|
|
e6fb8f1c5d | |
|
|
6a942b304d | |
|
|
340a6b15b5 | |
|
|
e0c069db55 | |
|
|
1f7c2c79e0 | |
|
|
5e8c25d1f5 | |
|
|
3095d112ef | |
|
|
16ef1448d7 | |
|
|
e55409315f | |
|
|
d4385a64a7 | |
|
|
ede05b95d7 | |
|
|
f3c12d5561 | |
|
|
adb9c4a7f4 | |
|
|
01f21badd5 | |
|
|
26f8f84a96 | |
|
|
433f76091b | |
|
|
4426b4ea91 | |
|
|
f522fdf89d | |
|
|
1e0c16f0dc | |
|
|
b7a00a3203 | |
|
|
0827eef58f | |
|
|
cd9197e9bd | |
|
|
72447df44c | |
|
|
59845ec372 | |
|
|
cb001af8a3 | |
|
|
4498687c5e | |
|
|
a10e209c8d | |
|
|
c02c929c56 | |
|
|
c649df4b94 | |
|
|
fb40492b6f | |
|
|
502e8ce651 | |
|
|
3f7df8ec5a | |
|
|
b24632bd80 | |
|
|
792ec716de | |
|
|
7ad18fd6bd | |
|
|
87171e29ca | |
|
|
a762876d6d | |
|
|
d89855bb90 | |
|
|
db1d01308c | |
|
|
4a272e9053 | |
|
|
ee1c07d0aa | |
|
|
c6f1224d30 | |
|
|
1d403c2fd8 | |
|
|
65892d5ffe | |
|
|
8c5f6f2dc5 | |
|
|
62f5f1885b | |
|
|
eaf0083ee2 | |
|
|
c1fb081674 | |
|
|
df166709a3 | |
|
|
c5a5de05f6 | |
|
|
3a128c88c3 | |
|
|
9cc30cb0d3 | |
|
|
35d87ab129 | |
|
|
af3981c955 | |
|
|
27e5cf2514 | |
|
|
b050504b2d | |
|
|
5d0a4af70a | |
|
|
94f18eb26e | |
|
|
208dbe2131 | |
|
|
46174ed573 | |
|
|
1f087496ca | |
|
|
8f606682c2 | |
|
|
928d3e806d | |
|
|
35ede8fcf0 | |
|
|
4d31e1048a | |
|
|
0e321b26f4 | |
|
|
2d2513915c | |
|
|
de236da416 | |
|
|
4dadd612d6 | |
|
|
54ac2dd012 | |
|
|
4de30f744e | |
|
|
27dfd2c41c | |
|
|
20ed2908f1 | |
|
|
39d19810a8 | |
|
|
e2028d43a2 | |
|
|
267841d7bd | |
|
|
ab58a59f33 | |
|
|
a1e6ef3759 | |
|
|
8616c313a2 | |
|
|
3ca85474b8 | |
|
|
6c11d441a5 | |
|
|
68ab87caa2 | |
|
|
555b03f6fd | |
|
|
ab8110fa2f | |
|
|
5e9ef8532f | |
|
|
ea4bf4810a | |
|
|
c3e354da23 | |
|
|
2ee77e654f | |
|
|
83baffc3f6 | |
|
|
19cdd5f210 | |
|
|
b2557ac90c | |
|
|
69409b3acd | |
|
|
9dff82c727 | |
|
|
d109d9c03e | |
|
|
4e1912a3c3 | |
|
|
9da70ffbd7 | |
|
|
75cdb2c08f | |
|
|
bb2278e5cf | |
|
|
77f247b2f3 | |
|
|
d13839fcf4 | |
|
|
7e54468896 | |
|
|
339447c5d3 | |
|
|
43ce96d373 | |
|
|
265a5be8bc | |
|
|
65ee4d33af | |
|
|
717ba9d9a4 | |
|
|
01bf918aa5 | |
|
|
28dbd8ff93 | |
|
|
7f5921cddd | |
|
|
89354f6540 | |
|
|
d399e33060 | |
|
|
29d342f176 | |
|
|
72113fe791 | |
|
|
7b4d9763cc | |
|
|
26c752f562 | |
|
|
ac1332a8e2 | |
|
|
c6c5ef6b8e | |
|
|
4d1dedf2c7 | |
|
|
13abc95165 | |
|
|
ee93b05ee9 | |
|
|
799f8f5f3d | |
|
|
201b082ce1 | |
|
|
49cbbbcd99 | |
|
|
545e0e6b95 | |
|
|
c18a7d2f73 | |
|
|
d14afd7973 | |
|
|
22a72ac3d7 | |
|
|
6ca0d9b637 | |
|
|
650f7c6aa3 | |
|
|
78af634e7e | |
|
|
2a8f1c5ddd | |
|
|
e62c6428e7 | |
|
|
07e51a445e | |
|
|
70e5684b1f | |
|
|
5a62022195 | |
|
|
8551843690 | |
|
|
d6694e491d | |
|
|
7f19a7886a | |
|
|
6dd369c0e6 | |
|
|
2a07de1333 | |
|
|
2e1998fc42 | |
|
|
b7a914b73b | |
|
|
6c4e082c18 | |
|
|
1e69bffbba | |
|
|
d1aa255c7f | |
|
|
7298ff3219 | |
|
|
571d782946 | |
|
|
ada9e00cda | |
|
|
faf9cb2ea2 | |
|
|
ac3a063583 | |
|
|
7cc96bb976 | |
|
|
fa6cc53297 | |
|
|
f299e8ba1e | |
|
|
1b9927d1c7 | |
|
|
279e891118 | |
|
|
340a1033a5 | |
|
|
50f2977cce | |
|
|
48a65377c0 | |
|
|
f003268b32 | |
|
|
3a33cca851 | |
|
|
bb6c500939 | |
|
|
76b6e24aee | |
|
|
58d14c4ef5 | |
|
|
7463cf3da6 | |
|
|
90d9783552 | |
|
|
7c61054649 | |
|
|
b28406bd1f | |
|
|
9517cdf52d | |
|
|
49927e464a | |
|
|
3e34f6d19c | |
|
|
182702d2df | |
|
|
1db73622df | |
|
|
56832696fc | |
|
|
176b40a888 | |
|
|
4902d3a118 | |
|
|
04d1a7ec3c | |
|
|
e1b7e78d60 | |
|
|
7640cf17c1 | |
|
|
8d7ed4fb57 | |
|
|
5afc042a74 | |
|
|
5e5faf73fc | |
|
|
361a115e53 | |
|
|
dddc440d56 | |
|
|
08d6f14ea8 | |
|
|
9e63a77e7a | |
|
|
146143a9b4 | |
|
|
6635d12ce0 | |
|
|
dccc3f7f1c | |
|
|
66d5434f23 | |
|
|
73100a7f85 | |
|
|
c6b487124a | |
|
|
c8466d1fac | |
|
|
264230c2c5 | |
|
|
32dbccb77b | |
|
|
8735a7e2da | |
|
|
d1df13e178 | |
|
|
d3d7736bae | |
|
|
7d18e7aa0d | |
|
|
e60694077d | |
|
|
ae38557bb0 | |
|
|
abb586d71e | |
|
|
81a73aba8b | |
|
|
0e8fe8af62 | |
|
|
29885a805e | |
|
|
9eb3d3a673 | |
|
|
6e642f628f | |
|
|
0159bbe7f2 | |
|
|
476276bf98 | |
|
|
d82fd09f99 | |
|
|
2961d73391 | |
|
|
eb1cb3649c | |
|
|
b384fe17ba | |
|
|
ccb1df45d1 | |
|
|
5a736647a1 | |
|
|
918b468a41 | |
|
|
234761dc05 | |
|
|
fa1cb5d153 | |
|
|
e998cf1216 | |
|
|
2bbbf928ae | |
|
|
fa06a1eadf | |
|
|
5eea9e34e7 | |
|
|
75b5e5376d | |
|
|
be507421b1 | |
|
|
5d943d4b7f | |
|
|
67951b1f2b | |
|
|
c104cf5dc0 | |
|
|
4fb4c6ed94 | |
|
|
df5a794b3d | |
|
|
c01bc907ed | |
|
|
222733049e | |
|
|
fa9db3c8fa | |
|
|
18a8a22c65 | |
|
|
425f05e29d | |
|
|
90fca23920 | |
|
|
0d3d3bbb40 | |
|
|
98ce947a6c | |
|
|
2ed9516172 | |
|
|
4fc93ec115 | |
|
|
61d6acdeb1 | |
|
|
f98ccd1e39 | |
|
|
7f0a981b2e | |
|
|
2e7a11c409 | |
|
|
9ddf153e00 | |
|
|
f8d95a85df | |
|
|
59e9d284e9 | |
|
|
4ce4c767e2 | |
|
|
a0e6af1e53 | |
|
|
ef77c9d60b | |
|
|
8fee77b04b | |
|
|
b807fc9c54 | |
|
|
55c7a1e03d | |
|
|
4799020e28 | |
|
|
bb420e4681 | |
|
|
0c735ba79d | |
|
|
e918bf24ae | |
|
|
eea6b7f517 | |
|
|
ff435e591d | |
|
|
df3315bbea | |
|
|
b7770574c2 | |
|
|
29bf378d97 | |
|
|
68b042febd | |
|
|
c366a07d62 | |
|
|
9e5eb95517 | |
|
|
7f96bd610d | |
|
|
8f53a1d37f | |
|
|
37a562b194 | |
|
|
ad1f156c7c | |
|
|
6079dea4c4 | |
|
|
437f2be56d | |
|
|
97c606b612 | |
|
|
5a7faf0eb5 | |
|
|
dcc55dfd04 | |
|
|
902046e4d8 | |
|
|
88f7a7bc65 | |
|
|
6cf4fbcef8 | |
|
|
7fa864a4f4 | |
|
|
f77cbc9ef7 | |
|
|
8a1800c5da | |
|
|
90894a8853 | |
|
|
614f27a4fb | |
|
|
ac84a9bee3 | |
|
|
4256ea99c5 | |
|
|
76489f433b | |
|
|
e9005f7727 | |
|
|
8d03fb4787 | |
|
|
d3301c9bc2 | |
|
|
1e326de474 | |
|
|
83731e6528 | |
|
|
a24b9c0184 | |
|
|
31c555afda | |
|
|
9dea373bba | |
|
|
b956d8a4dd | |
|
|
81d5e48db0 | |
|
|
bc5b3a85ae | |
|
|
415c42d27c | |
|
|
e6c1cd0d8c | |
|
|
39621898ff | |
|
|
c500de6dea | |
|
|
c4f4f5ae07 | |
|
|
f1d9b4b985 | |
|
|
d134334a38 | |
|
|
3b02a6fdc5 | |
|
|
1fe633e27c | |
|
|
4e3b068ce1 | |
|
|
017d757dd4 | |
|
|
5212cb5ed9 | |
|
|
cca96584eb | |
|
|
58c2878fce | |
|
|
daf23ba955 | |
|
|
5016db01fe | |
|
|
30942cc4ae | |
|
|
1852eb2115 | |
|
|
abe0bd98df | |
|
|
2ad687a32e | |
|
|
2f5fb3f92b | |
|
|
7fd71a5e13 | |
|
|
b62d4c91b6 |
|
|
@ -43,7 +43,7 @@ Note that before a PR will be accepted, you must ensure:
|
|||
1. In a new branch, create a new Lerna package:
|
||||
|
||||
```console
|
||||
$ npm run create-package new-package
|
||||
$ npm run new-package [name]
|
||||
```
|
||||
|
||||
This will ask you some questions about the new package. Start with `0.0.0` as the first version (look generally at some of the other packages for how the package.json is structured).
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/artifact"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
artifact-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/packages/cache"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
groups:
|
||||
# Group minor and patch updates together but keep major separate
|
||||
cache-minor-patch:
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
echo -n 'hello from file 2' > artifact-path/second.txt
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifact} = require('./packages/artifact/lib/artifact')
|
||||
|
|
@ -72,17 +72,17 @@ jobs:
|
|||
console.log('Successfully blocked second artifact upload')
|
||||
}
|
||||
verify:
|
||||
name: Verify
|
||||
name: Verify and Delete
|
||||
runs-on: ubuntu-latest
|
||||
needs: [upload]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
# Need root node_modules because certain npm packages like jest are configured for the entire repository and it won't be possible
|
||||
# without these to just compile the artifacts package
|
||||
|
|
@ -96,7 +96,7 @@ jobs:
|
|||
working-directory: packages/artifact
|
||||
|
||||
- name: List and Download Artifacts
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
|
@ -164,3 +164,31 @@ jobs:
|
|||
}
|
||||
}
|
||||
}
|
||||
- name: Delete Artifacts
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const {default: artifactClient} = require('./packages/artifact/lib/artifact')
|
||||
|
||||
const artifactsToDelete = [
|
||||
'my-artifact-ubuntu-latest',
|
||||
'my-artifact-windows-latest',
|
||||
'my-artifact-macos-latest'
|
||||
]
|
||||
|
||||
for (const artifactName of artifactsToDelete) {
|
||||
const {id} = await artifactClient.deleteArtifact(artifactName)
|
||||
}
|
||||
|
||||
const {artifacts} = await artifactClient.listArtifacts({latest: true})
|
||||
const foundArtifacts = artifacts.filter(artifact =>
|
||||
artifactsToDelete.includes(artifact.name)
|
||||
)
|
||||
|
||||
if (foundArtifacts.length !== 0) {
|
||||
console.log('Unexpected length of found artifacts:', foundArtifacts)
|
||||
throw new Error(
|
||||
`Expected 0 artifacts but found ${foundArtifacts.length} artifacts.`
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,12 +18,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
run: npm run bootstrap
|
||||
|
||||
- name: audit tools (without allow-list)
|
||||
run: npm audit --audit-level=moderate
|
||||
run: npm audit --audit-level=moderate --omit dev
|
||||
|
||||
- name: audit packages
|
||||
run: npm run audit-all
|
||||
|
|
|
|||
|
|
@ -22,12 +22,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
@ -39,9 +39,11 @@ jobs:
|
|||
- name: Install root npm packages
|
||||
run: npm ci
|
||||
|
||||
# We need to install only runtime dependencies (omit dev dependencies) to verify that what we're shipping is all
|
||||
# that is needed
|
||||
- name: Compile cache package
|
||||
run: |
|
||||
npm ci
|
||||
npm ci --omit=dev
|
||||
npm run tsc
|
||||
working-directory: packages/cache
|
||||
|
||||
|
|
|
|||
|
|
@ -17,16 +17,16 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- shell: bash
|
||||
run: |
|
||||
rm "C:\Program Files\Git\usr\bin\tar.exe"
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v1
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
# In order to save & restore cache from a shell script, certain env variables need to be set that are only available in the
|
||||
# node context. This runs a local action that gets and sets the necessary env variables that are needed
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
|
|
|||
|
|
@ -1,27 +1,42 @@
|
|||
name: Publish NPM
|
||||
|
||||
run-name: Publish NPM - ${{ github.event.inputs.package }}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
package:
|
||||
type: choice
|
||||
required: true
|
||||
description: 'core, artifact, cache, exec, github, glob, http-client, io, tool-cache'
|
||||
description: 'Which package to release'
|
||||
options:
|
||||
- artifact
|
||||
- attest
|
||||
- cache
|
||||
- core
|
||||
- exec
|
||||
- github
|
||||
- glob
|
||||
- http-client
|
||||
- io
|
||||
- tool-cache
|
||||
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: macos-latest
|
||||
runs-on: macos-latest-large
|
||||
|
||||
steps:
|
||||
- name: setup repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: verify package exists
|
||||
run: ls packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
- name: Set Node.js 24.x
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24.x
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
@ -40,19 +55,22 @@ jobs:
|
|||
working-directory: packages/${{ github.event.inputs.package }}
|
||||
|
||||
- name: upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
path: packages/${{ github.event.inputs.package }}/*.tgz
|
||||
|
||||
publish:
|
||||
runs-on: macos-latest
|
||||
runs-on: macos-latest-large
|
||||
needs: test
|
||||
environment: npm-publish
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
|
||||
- name: download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ github.event.inputs.package }}
|
||||
|
||||
|
|
@ -62,7 +80,7 @@ jobs:
|
|||
NPM_TOKEN: ${{ secrets.TOKEN }}
|
||||
|
||||
- name: publish
|
||||
run: npm publish *.tgz
|
||||
run: npm publish --provenance *.tgz
|
||||
|
||||
- name: notify slack on failure
|
||||
if: failure()
|
||||
|
|
|
|||
|
|
@ -16,19 +16,23 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||
runs-on: [ubuntu-latest, macos-latest-large, windows-latest]
|
||||
|
||||
# Node 20 is the currently supported stable Node version for actions - https://docs.github.com/actions/sharing-automations/creating-actions/metadata-syntax-for-github-actions#runsusing-for-javascript-actions
|
||||
# Node 24 is the new version being added with support in actions runners
|
||||
node-version: [20.x, 24.x]
|
||||
fail-fast: false
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set Node.js 20.x
|
||||
uses: actions/setup-node@v3
|
||||
- name: Set up Node ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: npm install
|
||||
run: npm install
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: ${{ github.repository_owner == 'actions' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
- name: Update Octokit
|
||||
working-directory: packages/github
|
||||
run: |
|
||||
|
|
|
|||
|
|
@ -2,3 +2,4 @@
|
|||
|
||||
/packages/artifact/ @actions/artifacts-actions
|
||||
/packages/cache/ @actions/actions-cache
|
||||
/packages/attest/ @actions/package-security
|
||||
|
|
|
|||
45
README.md
45
README.md
|
|
@ -24,7 +24,7 @@ The GitHub Actions ToolKit provides a set of packages to make creating actions e
|
|||
Provides functions for inputs, outputs, results, logging, secrets and variables. Read more [here](packages/core)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/core
|
||||
npm install @actions/core
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -33,7 +33,7 @@ $ npm install @actions/core
|
|||
Provides functions to exec cli tools and process output. Read more [here](packages/exec)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/exec
|
||||
npm install @actions/exec
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ $ npm install @actions/exec
|
|||
Provides functions to search for files matching glob patterns. Read more [here](packages/glob)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/glob
|
||||
npm install @actions/glob
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -51,7 +51,7 @@ $ npm install @actions/glob
|
|||
A lightweight HTTP client optimized for building actions. Read more [here](packages/http-client)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/http-client
|
||||
npm install @actions/http-client
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ $ npm install @actions/http-client
|
|||
Provides disk i/o functions like cp, mv, rmRF, which etc. Read more [here](packages/io)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/io
|
||||
npm install @actions/io
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ Provides functions for downloading and caching tools. e.g. setup-* actions. Rea
|
|||
See @actions/cache for caching workflow dependencies.
|
||||
|
||||
```bash
|
||||
$ npm install @actions/tool-cache
|
||||
npm install @actions/tool-cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ $ npm install @actions/tool-cache
|
|||
Provides an Octokit client hydrated with the context that the current action is being run in. Read more [here](packages/github)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/github
|
||||
npm install @actions/github
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -89,7 +89,7 @@ $ npm install @actions/github
|
|||
Provides functions to interact with actions artifacts. Read more [here](packages/artifact)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/artifact
|
||||
npm install @actions/artifact
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -98,7 +98,16 @@ $ npm install @actions/artifact
|
|||
Provides functions to cache dependencies and build outputs to improve workflow execution time. Read more [here](packages/cache)
|
||||
|
||||
```bash
|
||||
$ npm install @actions/cache
|
||||
npm install @actions/cache
|
||||
```
|
||||
<br/>
|
||||
|
||||
:lock_with_ink_pen: [@actions/attest](packages/attest)
|
||||
|
||||
Provides functions to write attestations for workflow artifacts. Read more [here](packages/attest)
|
||||
|
||||
```bash
|
||||
npm install @actions/attest
|
||||
```
|
||||
<br/>
|
||||
|
||||
|
|
@ -218,9 +227,23 @@ console.log(`We can even get context data, like the repo: ${context.repo.repo}`)
|
|||
```
|
||||
<br/>
|
||||
|
||||
## Contributing
|
||||
## Note
|
||||
|
||||
We welcome contributions. See [how to contribute](.github/CONTRIBUTING.md).
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](SECURITY.md).
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
os: [ubuntu-16.04, windows-2019]
|
||||
runs-on: ${{matrix.os}}
|
||||
actions:
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v5
|
||||
with:
|
||||
version: ${{matrix.node}}
|
||||
- run: |
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ e.g. To use https://github.com/actions/setup-node, users will author:
|
|||
|
||||
```yaml
|
||||
steps:
|
||||
using: actions/setup-node@v3
|
||||
using: actions/setup-node@v5
|
||||
```
|
||||
|
||||
# Define Metadata
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
22
package.json
22
package.json
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "root",
|
||||
"private": true,
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"audit-all": "lerna run audit-moderate",
|
||||
"bootstrap": "lerna exec -- npm install",
|
||||
|
|
@ -13,11 +13,11 @@
|
|||
"lint": "eslint packages/**/*.ts",
|
||||
"lint-fix": "eslint packages/**/*.ts --fix",
|
||||
"new-package": "scripts/create-package",
|
||||
"test": "jest --testTimeout 60000"
|
||||
"test": "jest --testTimeout 70000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.4",
|
||||
"@types/node": "^20.5.7",
|
||||
"@types/node": "^24.1.0",
|
||||
"@types/signale": "^1.4.1",
|
||||
"concurrently": "^6.1.0",
|
||||
"eslint": "^8.0.1",
|
||||
|
|
@ -27,10 +27,24 @@
|
|||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"flow-bin": "^0.115.0",
|
||||
"jest": "^29.6.4",
|
||||
"lerna": "^7.1.4",
|
||||
"lerna": "^6.4.1",
|
||||
"nx": "16.6.0",
|
||||
"prettier": "^3.0.0",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"semver": "^7.6.0",
|
||||
"tar": "^6.2.1",
|
||||
"@octokit/plugin-paginate-rest": "^9.2.2",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@octokit/core": "^5.0.3",
|
||||
"tmp": "^0.2.4",
|
||||
"@types/node": "^24.1.0",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
|
|
@ -41,3 +41,4 @@ Any easy way to test changes for the official upload/download actions is to fork
|
|||
1. In the locally cloned fork, link to your local toolkit changes: `npm link @actions/artifact`
|
||||
2. Then, compile your changes with: `npm run release`. The local `dist/index.js` should be updated with your changes.
|
||||
3. Commit and push to your fork, you can then test with a `uses:` in your workflow pointed at your fork.
|
||||
4. The format for the above is `<username>/<repository-name>/@<ref>`, i.e. `me/myrepo/@HEAD`
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ This is the core library that powers the [`@actions/upload-artifact`](https://gi
|
|||
- [Quick Start](#quick-start)
|
||||
- [Examples](#examples)
|
||||
- [Upload and Download](#upload-and-download)
|
||||
- [Delete an Artifact](#delete-an-artifact)
|
||||
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
|
||||
- [Speeding up large uploads](#speeding-up-large-uploads)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
|
@ -106,6 +107,41 @@ const {downloadPath} = await artifact.downloadArtifact(id, {
|
|||
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
|
||||
```
|
||||
|
||||
### Delete an Artifact
|
||||
|
||||
To delete an artifact, all you need is the name.
|
||||
|
||||
```js
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact'
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}'`)
|
||||
```
|
||||
|
||||
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
|
||||
|
||||
```js
|
||||
const findBy = {
|
||||
// must have actions:write permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// options to find by other repo/owner
|
||||
{ findBy }
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
|
||||
```
|
||||
|
||||
### Downloading from other workflow runs or repos
|
||||
|
||||
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
|
||||
|
|
|
|||
|
|
@ -1,15 +1,178 @@
|
|||
# @actions/artifact Releases
|
||||
|
||||
### 0.1.0
|
||||
### 4.0.0
|
||||
|
||||
- Initial release
|
||||
- Add support for Node 24 [#2110](https://github.com/actions/toolkit/pull/2110)
|
||||
- Fix: artifact pagination bugs and configurable artifact count limits [#2165](https://github.com/actions/toolkit/pull/2165)
|
||||
- Fix: reject the promise on timeout [#2124](https://github.com/actions/toolkit/pull/2124)
|
||||
- Update dependency versions
|
||||
|
||||
### 0.2.0
|
||||
### 2.3.3
|
||||
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
- Dependency updates [#2049](https://github.com/actions/toolkit/pull/2049)
|
||||
|
||||
### 2.3.2
|
||||
|
||||
- Added masking for Shared Access Signature (SAS) artifact URLs [#1982](https://github.com/actions/toolkit/pull/1982)
|
||||
- Change hash to digest for consistent terminology across runner logs [#1991](https://github.com/actions/toolkit/pull/1991)
|
||||
|
||||
### 2.3.1
|
||||
|
||||
- Fix comment typo on expectedHash. [#1986](https://github.com/actions/toolkit/pull/1986)
|
||||
|
||||
### 2.3.0
|
||||
|
||||
- Allow ArtifactClient to perform digest comparisons, if supplied. [#1975](https://github.com/actions/toolkit/pull/1975)
|
||||
|
||||
### 2.2.2
|
||||
|
||||
- Default concurrency to 5 for uploading artifacts [#1962](https://github.com/actions/toolkit/pull/1962)
|
||||
|
||||
### 2.2.1
|
||||
|
||||
- Add `ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY` and `ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS` environment variables [#1928](https://github.com/actions/toolkit/pull/1928)
|
||||
|
||||
### 2.2.0
|
||||
|
||||
- Return artifact digest on upload [#1896](https://github.com/actions/toolkit/pull/1896)
|
||||
|
||||
### 2.1.11
|
||||
|
||||
- Fixed a bug with relative symlinks resolution [#1844](https://github.com/actions/toolkit/pull/1844)
|
||||
- Use native `crypto` [#1815](https://github.com/actions/toolkit/pull/1815)
|
||||
|
||||
### 2.1.10
|
||||
|
||||
- Fixed a regression with symlinks not being automatically resolved [#1830](https://github.com/actions/toolkit/pull/1830)
|
||||
- Fixed a regression with chunk timeout [#1786](https://github.com/actions/toolkit/pull/1786)
|
||||
|
||||
### 2.1.9
|
||||
|
||||
- Fixed artifact upload chunk timeout logic [#1774](https://github.com/actions/toolkit/pull/1774)
|
||||
- Use lazy stream to prevent issues with open file limits [#1771](https://github.com/actions/toolkit/pull/1771)
|
||||
|
||||
### 2.1.8
|
||||
|
||||
- Allows `*.localhost` domains for hostname checks for local development.
|
||||
|
||||
### 2.1.7
|
||||
|
||||
- Update unzip-stream dependency and reverted to using `unzip.Extract()`
|
||||
|
||||
### 2.1.6
|
||||
|
||||
- Will retry on invalid request responses.
|
||||
|
||||
### 2.1.5
|
||||
|
||||
- Bumped `archiver` dependency to 7.0.1
|
||||
|
||||
### 2.1.4
|
||||
|
||||
- Adds info-level logging for zip extraction
|
||||
|
||||
### 2.1.3
|
||||
|
||||
- Fixes a bug in the extract logic updated in 2.1.2
|
||||
|
||||
### 2.1.2
|
||||
|
||||
- Updated the stream extract functionality to use `unzip.Parse()` instead of `unzip.Extract()` for greater control of unzipping artifacts
|
||||
|
||||
### 2.1.1
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 2.1.0
|
||||
|
||||
- Added `ArtifactClient#deleteArtifact` to delete artifacts by name [#1626](https://github.com/actions/toolkit/pull/1626)
|
||||
- Update error messaging to be more useful [#1628](https://github.com/actions/toolkit/pull/1628)
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- [Blog post with more info](https://github.blog/2024-02-12-get-started-with-v4-of-github-actions-artifacts/)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
-
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.2
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.1
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
|
||||
### 0.3.0
|
||||
|
||||
|
|
@ -20,88 +183,13 @@
|
|||
- Clearer error message if storage quota has been reached
|
||||
- Improved logging and output during artifact download
|
||||
|
||||
### 0.3.1
|
||||
### 0.2.0
|
||||
|
||||
- Fix to ensure temporary gzip files get correctly deleted during artifact upload
|
||||
- Remove spaces as a forbidden character during upload
|
||||
- Fixes to TCP connections not closing
|
||||
- GZip file compression to speed up downloads
|
||||
- Improved logging and output
|
||||
- Extra documentation
|
||||
|
||||
### 0.3.2
|
||||
### 0.1.0
|
||||
|
||||
- Fix to ensure readstreams get correctly reset in the event of a retry
|
||||
|
||||
### 0.3.3
|
||||
|
||||
- Increase chunk size during upload from 4MB to 8MB
|
||||
- Improve user-agent strings during API calls to help internally diagnose issues
|
||||
|
||||
### 0.3.5
|
||||
|
||||
- Retry in the event of a 413 response
|
||||
|
||||
### 0.4.0
|
||||
|
||||
- Add option to specify custom retentions on artifacts
|
||||
|
||||
### 0.4.1
|
||||
|
||||
- Update to latest @actions/core version
|
||||
|
||||
### 0.4.2
|
||||
|
||||
- Improved retry-ability when a partial artifact download is encountered
|
||||
|
||||
### 0.5.0
|
||||
|
||||
- Improved retry-ability for all http calls during artifact upload and download if an error is encountered
|
||||
|
||||
### 0.5.1
|
||||
|
||||
- Bump @actions/http-client to version 1.0.11 to fix proxy related issues during artifact upload and download
|
||||
|
||||
### 0.5.2
|
||||
|
||||
- Add HTTP 500 as a retryable status code for artifact upload and download.
|
||||
|
||||
### 0.6.0
|
||||
|
||||
- Support upload from named pipes [#748](https://github.com/actions/toolkit/pull/748)
|
||||
- Fixes to percentage values being greater than 100% when downloading all artifacts [#889](https://github.com/actions/toolkit/pull/889)
|
||||
- Improved logging and output during artifact upload [#949](https://github.com/actions/toolkit/pull/949)
|
||||
- Improvements to client-side validation for certain invalid characters not allowed during upload: [#951](https://github.com/actions/toolkit/pull/951)
|
||||
- Faster upload speeds for certain types of large files by exempting gzip compression [#956](https://github.com/actions/toolkit/pull/956)
|
||||
- More detailed logging when dealing with chunked uploads [#957](https://github.com/actions/toolkit/pull/957)
|
||||
|
||||
### 0.6.1
|
||||
|
||||
- Fix for failing 0 byte file uploads on Windows [#962](https://github.com/actions/toolkit/pull/962)
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json` [#1009](https://github.com/actions/toolkit/pull/1009)
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Add `x-actions-results-crc64` and `x-actions-results-md5` checksum headers on upload [#1063](https://github.com/actions/toolkit/pull/1063)
|
||||
|
||||
### 1.1.1
|
||||
|
||||
- Fixed a bug in Node16 where if an HTTP download finished too quickly (<1ms, e.g. when it's mocked) we attempt to delete a temp file that has not been created yet [#1278](https://github.com/actions/toolkit/pull/1278/commits/b9de68a590daf37c6747e38d3cb4f1dd2cfb791c)
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Major release. Supports new Artifact backend for improved speed, reliability and behavior.
|
||||
- Numerous API changes, [some breaking](./README.md#breaking-changes).
|
||||
|
||||
- Blog post with more info: TBD
|
||||
|
||||
### 2.0.1
|
||||
|
||||
- Patch to fix transient request timeouts https://github.com/actions/download-artifact/issues/249
|
||||
- Initial release
|
||||
|
|
|
|||
|
|
@ -116,6 +116,54 @@ describe('artifact-http-client', () => {
|
|||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should retry if invalid body response', async () => {
|
||||
const mockPost = jest
|
||||
.fn(() => {
|
||||
const msgSucceeded = new http.IncomingMessage(new net.Socket())
|
||||
msgSucceeded.statusCode = 200
|
||||
return {
|
||||
message: msgSucceeded,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(
|
||||
`{"ok": true, "signedUploadUrl": "http://localhost:8080/upload"}`
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
msgFailed.statusCode = 502
|
||||
msgFailed.statusMessage = 'Bad Gateway'
|
||||
return {
|
||||
message: msgFailed,
|
||||
readBody: async () => {
|
||||
return Promise.resolve('💥')
|
||||
}
|
||||
}
|
||||
})
|
||||
const mockHttpClient = (
|
||||
HttpClient as unknown as jest.Mock
|
||||
).mockImplementation(() => {
|
||||
return {
|
||||
post: mockPost
|
||||
}
|
||||
})
|
||||
|
||||
const client = internalArtifactTwirpClient(clientOptions)
|
||||
const artifact = await client.CreateArtifact({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678',
|
||||
name: 'artifact',
|
||||
version: 4
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledTimes(1)
|
||||
expect(artifact).toBeDefined()
|
||||
expect(artifact.ok).toBe(true)
|
||||
expect(artifact.signedUploadUrl).toBe('http://localhost:8080/upload')
|
||||
expect(mockPost).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should fail if the request fails 5 times', async () => {
|
||||
const mockPost = jest.fn(() => {
|
||||
const msgFailed = new http.IncomingMessage(new net.Socket())
|
||||
|
|
|
|||
|
|
@ -0,0 +1,149 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import os from 'os'
|
||||
|
||||
// Mock the `cpus()` function in the `os` module
|
||||
jest.mock('os', () => {
|
||||
const osActual = jest.requireActual('os')
|
||||
return {
|
||||
...osActual,
|
||||
cpus: jest.fn()
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
describe('isGhes', () => {
|
||||
it('should return false when the request domain is github.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.com', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain ends with .localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when the request domain is specific to an enterprise', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadChunkTimeoutEnv', () => {
|
||||
it('should return default 300000 when no env set', () => {
|
||||
expect(config.getUploadChunkTimeout()).toBe(300000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = '150000'
|
||||
expect(config.getUploadChunkTimeout()).toBe(150000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS = 'abc'
|
||||
expect(() => {
|
||||
config.getUploadChunkTimeout()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('uploadConcurrencyEnv', () => {
|
||||
it('Concurrency default to 5', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
expect(config.getConcurrency()).toBe(5)
|
||||
})
|
||||
|
||||
it('Concurrency max out at 300 on systems with many CPUs', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(32))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '301'
|
||||
expect(config.getConcurrency()).toBe(300)
|
||||
})
|
||||
|
||||
it('Concurrency can be set to 32 when cpu num is <= 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '32'
|
||||
expect(config.getConcurrency()).toBe(32)
|
||||
})
|
||||
|
||||
it('Concurrency can be set 16 * num of cpu when cpu num is > 4', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(6))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '96'
|
||||
expect(config.getConcurrency()).toBe(96)
|
||||
})
|
||||
|
||||
it('Concurrency can be overridden by env var ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '10'
|
||||
expect(config.getConcurrency()).toBe(10)
|
||||
})
|
||||
|
||||
it('should throw with invalid value of ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = 'abc'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is < 1', () => {
|
||||
;(os.cpus as jest.Mock).mockReturnValue(new Array(4))
|
||||
process.env.ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY = '0'
|
||||
expect(() => {
|
||||
config.getConcurrency()
|
||||
}).toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getMaxArtifactListCount', () => {
|
||||
beforeEach(() => {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
})
|
||||
|
||||
it('should return default 1000 when no env set', () => {
|
||||
expect(config.getMaxArtifactListCount()).toBe(1000)
|
||||
})
|
||||
|
||||
it('should return value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '2000'
|
||||
expect(config.getMaxArtifactListCount()).toBe(2000)
|
||||
})
|
||||
|
||||
it('should throw if value set in ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is invalid', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = 'abc'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is < 1', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '0'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw if ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT is negative', () => {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '-100'
|
||||
expect(() => {
|
||||
config.getMaxArtifactListCount()
|
||||
}).toThrow(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,192 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RequestInterface} from '@octokit/types'
|
||||
import {
|
||||
deleteArtifactInternal,
|
||||
deleteArtifactPublic
|
||||
} from '../src/internal/delete/delete-artifact'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
type MockedDeleteArtifact = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['deleteArtifact']
|
||||
>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
deleteArtifact: jest.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
repo: 'toolkit',
|
||||
owner: 'actions',
|
||||
token: 'ghp_1234567890',
|
||||
runId: 123,
|
||||
backendIds: {
|
||||
workflowRunBackendId: 'c4d7c21f-ba3f-4ddc-a8c8-6f2f626f8422',
|
||||
workflowJobRunBackendId: '760803a1-f890-4d25-9a6e-a3fc01a0c7cf'
|
||||
},
|
||||
artifacts: [
|
||||
{
|
||||
id: 1,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-01')
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'my-artifact',
|
||||
size: 456,
|
||||
createdAt: new Date('2023-12-02')
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('delete-artifact', () => {
|
||||
beforeAll(() => {
|
||||
noopLogs()
|
||||
})
|
||||
|
||||
describe('public', () => {
|
||||
it('should delete an artifact', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockResolvedValueOnce({
|
||||
status: 204,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: null as never
|
||||
})
|
||||
|
||||
const response = await deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
artifacts: [
|
||||
{
|
||||
name: fixtures.artifacts[0].name,
|
||||
id: fixtures.artifacts[0].id,
|
||||
size_in_bytes: fixtures.artifacts[0].size,
|
||||
created_at: fixtures.artifacts[0].createdAt.toISOString()
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const mockDeleteArtifact = github.getOctokit(fixtures.token).rest.actions
|
||||
.deleteArtifact as MockedDeleteArtifact
|
||||
mockDeleteArtifact.mockRejectedValue(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
deleteArtifactPublic(
|
||||
fixtures.artifacts[0].name,
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token
|
||||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIds)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
})
|
||||
|
||||
it('should delete an artifact', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockResolvedValue({
|
||||
ok: true,
|
||||
artifactId: fixtures.artifacts[0].id.toString()
|
||||
})
|
||||
const response = await deleteArtifactInternal(fixtures.artifacts[0].name)
|
||||
expect(response).toEqual({
|
||||
id: fixtures.artifacts[0].id
|
||||
})
|
||||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'ListArtifacts')
|
||||
.mockResolvedValue({
|
||||
artifacts: fixtures.artifacts.map(artifact => ({
|
||||
...fixtures.backendIds,
|
||||
databaseId: artifact.id.toString(),
|
||||
name: artifact.name,
|
||||
size: artifact.size.toString(),
|
||||
createdAt: Timestamp.fromDate(artifact.createdAt)
|
||||
}))
|
||||
})
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'DeleteArtifact')
|
||||
.mockRejectedValue(new Error('boom'))
|
||||
await expect(
|
||||
deleteArtifactInternal(fixtures.artifacts[0].id)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -111,6 +111,16 @@ const mockGetArtifactSuccess = jest.fn(() => {
|
|||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactHung = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
// Don't push any data or call push(null) to end the stream
|
||||
// This creates a stream that hangs and never completes
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactFailure = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 500
|
||||
|
|
@ -121,6 +131,16 @@ const mockGetArtifactFailure = jest.fn(() => {
|
|||
}
|
||||
})
|
||||
|
||||
const mockGetArtifactMalicious = jest.fn(() => {
|
||||
const message = new http.IncomingMessage(new net.Socket())
|
||||
message.statusCode = 200
|
||||
message.push(fs.readFileSync(path.join(__dirname, 'fixtures', 'evil.zip'))) // evil.zip contains files that are formatted x/../../etc/hosts
|
||||
message.push(null)
|
||||
return {
|
||||
message
|
||||
}
|
||||
})
|
||||
|
||||
describe('download-artifact', () => {
|
||||
describe('public', () => {
|
||||
beforeEach(setup)
|
||||
|
|
@ -170,6 +190,59 @@ describe('download-artifact', () => {
|
|||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should not allow path traversal from malicious artifacts', async () => {
|
||||
const downloadArtifactMock = github.getOctokit(fixtures.token).rest
|
||||
.actions.downloadArtifact as MockedDownloadArtifact
|
||||
downloadArtifactMock.mockResolvedValueOnce({
|
||||
headers: {
|
||||
location: fixtures.blobStorageUrl
|
||||
},
|
||||
status: 302,
|
||||
url: '',
|
||||
data: Buffer.from('')
|
||||
})
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockGetArtifactMalicious
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const response = await downloadArtifactPublic(
|
||||
fixtures.artifactID,
|
||||
fixtures.repositoryOwner,
|
||||
fixtures.repositoryName,
|
||||
fixtures.token
|
||||
)
|
||||
|
||||
expect(downloadArtifactMock).toHaveBeenCalledWith({
|
||||
owner: fixtures.repositoryOwner,
|
||||
repo: fixtures.repositoryName,
|
||||
artifact_id: fixtures.artifactID,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockGetArtifactMalicious).toHaveBeenCalledWith(
|
||||
fixtures.blobStorageUrl
|
||||
)
|
||||
|
||||
// ensure path traversal was not possible
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'x/etc/hosts'))
|
||||
).toBe(true)
|
||||
expect(
|
||||
fs.existsSync(path.join(fixtures.workspaceDir, 'y/etc/hosts'))
|
||||
).toBe(true)
|
||||
|
||||
expect(response.downloadPath).toBe(fixtures.workspaceDir)
|
||||
})
|
||||
|
||||
it('should successfully download an artifact to user defined path', async () => {
|
||||
const customPath = path.join(testDir, 'custom')
|
||||
|
||||
|
|
@ -256,14 +329,6 @@ describe('download-artifact', () => {
|
|||
|
||||
const mockGet = jest.fn(async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Resolve with a 200 status code immediately
|
||||
resolve({
|
||||
message: msg,
|
||||
readBody: async () => {
|
||||
return Promise.resolve(`{"ok": true}`)
|
||||
}
|
||||
})
|
||||
|
||||
// Reject with an error after 31 seconds
|
||||
setTimeout(() => {
|
||||
reject(new Error('Request timeout'))
|
||||
|
|
@ -556,4 +621,32 @@ describe('download-artifact', () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('streamExtractExternal', () => {
|
||||
it('should fail if the timeout is exceeded', async () => {
|
||||
const mockSlowGetArtifact = jest.fn(mockGetArtifactHung)
|
||||
|
||||
const mockHttpClient = (HttpClient as jest.Mock).mockImplementation(
|
||||
() => {
|
||||
return {
|
||||
get: mockSlowGetArtifact
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
try {
|
||||
await streamExtractExternal(
|
||||
fixtures.blobStorageUrl,
|
||||
fixtures.workspaceDir,
|
||||
{timeout: 2}
|
||||
)
|
||||
expect(true).toBe(false) // should not be called
|
||||
} catch (e) {
|
||||
expect(e).toBeInstanceOf(Error)
|
||||
expect(e.message).toContain('did not respond in 2ms')
|
||||
expect(mockHttpClient).toHaveBeenCalledWith(getUserAgentString())
|
||||
expect(mockSlowGetArtifact).toHaveBeenCalledTimes(1)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -1,5 +1,4 @@
|
|||
import * as github from '@actions/github'
|
||||
import type {RestEndpointMethods} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/method-types'
|
||||
import type {RestEndpointMethodTypes} from '@octokit/plugin-rest-endpoint-methods/dist-types/generated/parameters-and-response-types'
|
||||
import {
|
||||
listArtifactsInternal,
|
||||
|
|
@ -10,13 +9,13 @@ import {ArtifactServiceClientJSON, Timestamp} from '../src/generated'
|
|||
import * as util from '../src/internal/shared/util'
|
||||
import {noopLogs} from './common'
|
||||
import {Artifact} from '../src/internal/shared/interfaces'
|
||||
import {RequestInterface} from '@octokit/types'
|
||||
|
||||
type MockedListWorkflowRunArtifacts = jest.MockedFunction<
|
||||
RestEndpointMethods['actions']['listWorkflowRunArtifacts']
|
||||
>
|
||||
type MockedRequest = jest.MockedFunction<RequestInterface<object>>
|
||||
|
||||
jest.mock('@actions/github', () => ({
|
||||
getOctokit: jest.fn().mockReturnValue({
|
||||
request: jest.fn(),
|
||||
rest: {
|
||||
actions: {
|
||||
listWorkflowRunArtifacts: jest.fn()
|
||||
|
|
@ -81,10 +80,10 @@ describe('list-artifact', () => {
|
|||
|
||||
describe('public', () => {
|
||||
it('should return a list of artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -105,10 +104,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('should return the latest artifact when latest is specified', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -129,10 +128,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('can return empty artifacts', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockListArtifacts.mockResolvedValueOnce({
|
||||
mockRequest.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
|
|
@ -156,10 +155,10 @@ describe('list-artifact', () => {
|
|||
})
|
||||
|
||||
it('should fail if non-200 response', async () => {
|
||||
const mockListArtifacts = github.getOctokit(fixtures.token).rest.actions
|
||||
.listWorkflowRunArtifacts as MockedListWorkflowRunArtifacts
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
mockListArtifacts.mockRejectedValue(new Error('boom'))
|
||||
mockRequest.mockRejectedValueOnce(new Error('boom'))
|
||||
|
||||
await expect(
|
||||
listArtifactsPublic(
|
||||
|
|
@ -171,6 +170,126 @@ describe('list-artifact', () => {
|
|||
)
|
||||
).rejects.toThrow('boom')
|
||||
})
|
||||
|
||||
it('should handle pagination correctly when fetching multiple pages', async () => {
|
||||
const mockRequest = github.getOctokit(fixtures.token)
|
||||
.request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 150}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 150
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublic(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Verify that both API calls were made
|
||||
expect(mockRequest).toHaveBeenCalledTimes(2)
|
||||
|
||||
// Should return all 150 artifacts across both pages
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
|
||||
// Verify we got artifacts from both pages
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[99].name).toBe('artifact-100')
|
||||
expect(response.artifacts[100].name).toBe('artifact-101')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
})
|
||||
|
||||
it('should respect ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT environment variable', async () => {
|
||||
const originalEnv = process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = '150'
|
||||
|
||||
jest.resetModules()
|
||||
|
||||
try {
|
||||
const {listArtifactsPublic: listArtifactsPublicReloaded} = await import(
|
||||
'../src/internal/find/list-artifacts'
|
||||
)
|
||||
const githubReloaded = await import('@actions/github')
|
||||
|
||||
const mockRequest = (githubReloaded.getOctokit as jest.Mock)(
|
||||
fixtures.token
|
||||
).request as MockedRequest
|
||||
|
||||
const manyArtifacts = Array.from({length: 200}, (_, i) => ({
|
||||
id: i + 1,
|
||||
name: `artifact-${i + 1}`,
|
||||
size: 100,
|
||||
createdAt: new Date('2023-12-01')
|
||||
}))
|
||||
|
||||
mockRequest
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(0, 100)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 200,
|
||||
headers: {},
|
||||
url: '',
|
||||
data: {
|
||||
...artifactsToListResponse(manyArtifacts.slice(100, 150)),
|
||||
total_count: 200
|
||||
}
|
||||
})
|
||||
|
||||
const response = await listArtifactsPublicReloaded(
|
||||
fixtures.runId,
|
||||
fixtures.owner,
|
||||
fixtures.repo,
|
||||
fixtures.token,
|
||||
false
|
||||
)
|
||||
|
||||
// Should only return 150 artifacts due to the limit
|
||||
expect(response.artifacts).toHaveLength(150)
|
||||
expect(response.artifacts[0].name).toBe('artifact-1')
|
||||
expect(response.artifacts[149].name).toBe('artifact-150')
|
||||
} finally {
|
||||
// Restore original environment variable
|
||||
if (originalEnv !== undefined) {
|
||||
process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT = originalEnv
|
||||
} else {
|
||||
delete process.env.ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT
|
||||
}
|
||||
|
||||
// Reset modules again to restore original state
|
||||
jest.resetModules()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('internal', () => {
|
||||
|
|
|
|||
|
|
@ -1,257 +1,173 @@
|
|||
import * as uploadZipSpecification from '../src/internal/upload/upload-zip-specification'
|
||||
import * as zip from '../src/internal/upload/zip'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import * as retention from '../src/internal/upload/retention'
|
||||
import * as config from '../src/internal/shared/config'
|
||||
import {Timestamp, ArtifactServiceClientJSON} from '../src/generated'
|
||||
import {ArtifactServiceClientJSON} from '../src/generated'
|
||||
import * as blobUpload from '../src/internal/upload/blob-upload'
|
||||
import {uploadArtifact} from '../src/internal/upload/upload-artifact'
|
||||
import {noopLogs} from './common'
|
||||
import {FilesNotFoundError} from '../src/internal/shared/errors'
|
||||
import {BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import unzip from 'unzip-stream'
|
||||
|
||||
const uploadStreamMock = jest.fn()
|
||||
const blockBlobClientMock = jest.fn().mockImplementation(() => ({
|
||||
uploadStream: uploadStreamMock
|
||||
}))
|
||||
|
||||
jest.mock('@azure/storage-blob', () => ({
|
||||
BlobClient: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
getBlockBlobClient: blockBlobClientMock
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
const fixtures = {
|
||||
uploadDirectory: path.join(__dirname, '_temp', 'plz-upload'),
|
||||
files: [
|
||||
{name: 'file1.txt', content: 'test 1 file content'},
|
||||
{name: 'file2.txt', content: 'test 2 file content'},
|
||||
{name: 'file3.txt', content: 'test 3 file content'},
|
||||
{
|
||||
name: 'real.txt',
|
||||
content: 'from a symlink'
|
||||
},
|
||||
{
|
||||
name: 'relative.txt',
|
||||
content: 'from a symlink',
|
||||
symlink: 'real.txt',
|
||||
relative: true
|
||||
},
|
||||
{
|
||||
name: 'absolute.txt',
|
||||
content: 'from a symlink',
|
||||
symlink: 'real.txt',
|
||||
relative: false
|
||||
}
|
||||
],
|
||||
backendIDs: {
|
||||
workflowRunBackendId: '67dbcc20-e851-4452-a7c3-2cc0d2e0ec67',
|
||||
workflowJobRunBackendId: '5f49179d-3386-4c38-85f7-00f8138facd0'
|
||||
},
|
||||
runtimeToken: 'test-token',
|
||||
resultsServiceURL: 'http://results.local',
|
||||
inputs: {
|
||||
artifactName: 'test-artifact',
|
||||
files: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
}
|
||||
}
|
||||
|
||||
describe('upload-artifact', () => {
|
||||
beforeAll(() => {
|
||||
fs.mkdirSync(fixtures.uploadDirectory, {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
for (const file of fixtures.files) {
|
||||
if (file.symlink) {
|
||||
let symlinkPath = file.symlink
|
||||
if (!file.relative) {
|
||||
symlinkPath = path.join(fixtures.uploadDirectory, file.symlink)
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.join(fixtures.uploadDirectory, file.name))) {
|
||||
fs.symlinkSync(
|
||||
symlinkPath,
|
||||
path.join(fixtures.uploadDirectory, file.name),
|
||||
'file'
|
||||
)
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(
|
||||
path.join(fixtures.uploadDirectory, file.name),
|
||||
file.content
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
noopLogs()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(util, 'getBackendIdsFromToken')
|
||||
.mockReturnValue(fixtures.backendIDs)
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue(
|
||||
fixtures.files.map(file => ({
|
||||
sourcePath: path.join(fixtures.uploadDirectory, file.name),
|
||||
destinationPath: file.name,
|
||||
stats: fs.statSync(path.join(fixtures.uploadDirectory, file.name))
|
||||
}))
|
||||
)
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue(fixtures.runtimeToken)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue(fixtures.resultsServiceURL)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.com'
|
||||
})
|
||||
)
|
||||
jest.spyOn(blobUpload, 'uploadZipToBlobStorage').mockReturnValue(
|
||||
Promise.resolve({
|
||||
uploadSize: 1234,
|
||||
sha256Hash: 'test-sha256-hash'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: true, artifactId: '1'}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).resolves.toEqual({size: 1234, id: 1})
|
||||
})
|
||||
|
||||
it('should throw an error if the root directory is invalid', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockImplementation(() => {
|
||||
throw new Error('Invalid root directory')
|
||||
})
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow('Invalid root directory')
|
||||
})
|
||||
|
||||
it('should reject if there are no files to upload', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
it('should reject if there are no files to upload', async () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockClear()
|
||||
.mockReturnValue([])
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
await expect(uploadResp).rejects.toThrowError(FilesNotFoundError)
|
||||
})
|
||||
|
||||
it('should reject if no backend IDs are found', () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
it('should reject if no backend IDs are found', async () => {
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockRestore()
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if the creation request fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should return false if the creation request fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, signedUploadUrl: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should return false if blob storage upload is unsuccessful', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
|
|
@ -264,57 +180,19 @@ describe('upload-artifact', () => {
|
|||
.spyOn(blobUpload, 'uploadZipToBlobStorage')
|
||||
.mockReturnValue(Promise.reject(new Error('boom')))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should reject if finalize artifact fails', () => {
|
||||
const mockDate = new Date('2020-01-01')
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'validateRootDirectory')
|
||||
.mockReturnValue()
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockReturnValue([
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file1.txt',
|
||||
destinationPath: 'file1.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/file2.txt',
|
||||
destinationPath: 'file2.txt'
|
||||
},
|
||||
{
|
||||
sourcePath: '/home/user/files/plz-upload/dir/file3.txt',
|
||||
destinationPath: 'dir/file3.txt'
|
||||
}
|
||||
])
|
||||
|
||||
it('should reject if finalize artifact fails', async () => {
|
||||
jest
|
||||
.spyOn(zip, 'createZipUploadStream')
|
||||
.mockReturnValue(Promise.resolve(new zip.ZipUploadStream(1)))
|
||||
jest.spyOn(util, 'getBackendIdsFromToken').mockReturnValue({
|
||||
workflowRunBackendId: '1234',
|
||||
workflowJobRunBackendId: '5678'
|
||||
})
|
||||
jest
|
||||
.spyOn(retention, 'getExpiration')
|
||||
.mockReturnValue(Timestamp.fromDate(mockDate))
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
|
|
@ -333,22 +211,163 @@ describe('upload-artifact', () => {
|
|||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(Promise.resolve({ok: false, artifactId: ''}))
|
||||
|
||||
// ArtifactHttpClient mocks
|
||||
jest.spyOn(config, 'getRuntimeToken').mockReturnValue('test-token')
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://test-url.com')
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
'test-artifact',
|
||||
[
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
],
|
||||
'/home/user/files/plz-upload'
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
expect(uploadResp).rejects.toThrow()
|
||||
await expect(uploadResp).rejects.toThrow()
|
||||
})
|
||||
|
||||
it('should successfully upload an artifact', async () => {
|
||||
jest
|
||||
.spyOn(uploadZipSpecification, 'getUploadZipSpecification')
|
||||
.mockRestore()
|
||||
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
|
||||
let loadedBytes = 0
|
||||
const uploadedZip = path.join(
|
||||
fixtures.uploadDirectory,
|
||||
'..',
|
||||
'uploaded.zip'
|
||||
)
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress} = options || {}
|
||||
|
||||
if (fs.existsSync(uploadedZip)) {
|
||||
fs.unlinkSync(uploadedZip)
|
||||
}
|
||||
const uploadedZipStream = fs.createWriteStream(uploadedZip)
|
||||
|
||||
onProgress?.({loadedBytes: 0})
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => {
|
||||
loadedBytes += chunk.length
|
||||
uploadedZipStream.write(chunk)
|
||||
onProgress?.({loadedBytes})
|
||||
})
|
||||
stream.on('end', () => {
|
||||
onProgress?.({loadedBytes})
|
||||
uploadedZipStream.end()
|
||||
resolve({})
|
||||
})
|
||||
stream.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const {id, size, digest} = await uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.files.map(file =>
|
||||
path.join(fixtures.uploadDirectory, file.name)
|
||||
),
|
||||
fixtures.uploadDirectory
|
||||
)
|
||||
|
||||
expect(id).toBe(1)
|
||||
expect(size).toBe(loadedBytes)
|
||||
expect(digest).toBeDefined()
|
||||
expect(digest).toHaveLength(64)
|
||||
|
||||
const extractedDirectory = path.join(
|
||||
fixtures.uploadDirectory,
|
||||
'..',
|
||||
'extracted'
|
||||
)
|
||||
if (fs.existsSync(extractedDirectory)) {
|
||||
fs.rmdirSync(extractedDirectory, {recursive: true})
|
||||
}
|
||||
|
||||
const extract = new Promise((resolve, reject) => {
|
||||
fs.createReadStream(uploadedZip)
|
||||
.pipe(unzip.Extract({path: extractedDirectory}))
|
||||
.on('close', () => {
|
||||
resolve(true)
|
||||
})
|
||||
.on('error', err => {
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
|
||||
await expect(extract).resolves.toBe(true)
|
||||
for (const file of fixtures.files) {
|
||||
const filePath = path.join(extractedDirectory, file.name)
|
||||
expect(fs.existsSync(filePath)).toBe(true)
|
||||
expect(fs.readFileSync(filePath, 'utf8')).toBe(file.content)
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw an error uploading blob chunks get delayed', async () => {
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'CreateArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
signedUploadUrl: 'https://signed-upload-url.local'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(ArtifactServiceClientJSON.prototype, 'FinalizeArtifact')
|
||||
.mockReturnValue(
|
||||
Promise.resolve({
|
||||
ok: true,
|
||||
artifactId: '1'
|
||||
})
|
||||
)
|
||||
jest
|
||||
.spyOn(config, 'getResultsServiceUrl')
|
||||
.mockReturnValue('https://results.local')
|
||||
|
||||
jest.spyOn(config, 'getUploadChunkTimeout').mockReturnValue(2_000)
|
||||
|
||||
uploadStreamMock.mockImplementation(
|
||||
async (
|
||||
stream: NodeJS.ReadableStream,
|
||||
bufferSize?: number,
|
||||
maxConcurrency?: number,
|
||||
options?: BlockBlobUploadStreamOptions
|
||||
) => {
|
||||
const {onProgress, abortSignal} = options || {}
|
||||
onProgress?.({loadedBytes: 0})
|
||||
return new Promise(resolve => {
|
||||
abortSignal?.addEventListener('abort', () => {
|
||||
resolve({})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const uploadResp = uploadArtifact(
|
||||
fixtures.inputs.artifactName,
|
||||
fixtures.inputs.files,
|
||||
fixtures.inputs.rootDirectory
|
||||
)
|
||||
|
||||
await expect(uploadResp).rejects.toThrow('Upload progress stalled.')
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -305,4 +305,22 @@ describe('Search', () => {
|
|||
}
|
||||
}
|
||||
})
|
||||
|
||||
it('Upload Specification - Includes symlinks', async () => {
|
||||
const targetPath = path.join(root, 'link-dir', 'symlink-me.txt')
|
||||
await fs.mkdir(path.dirname(targetPath), {recursive: true})
|
||||
await fs.writeFile(targetPath, 'symlink file content')
|
||||
|
||||
const uploadPath = path.join(root, 'upload-dir', 'symlink.txt')
|
||||
await fs.mkdir(path.dirname(uploadPath), {recursive: true})
|
||||
await fs.symlink(targetPath, uploadPath, 'file')
|
||||
|
||||
const specifications = getUploadZipSpecification([uploadPath], root)
|
||||
expect(specifications.length).toEqual(1)
|
||||
expect(specifications[0].sourcePath).toEqual(uploadPath)
|
||||
expect(specifications[0].destinationPath).toEqual(
|
||||
path.join('/upload-dir', 'symlink.txt')
|
||||
)
|
||||
expect(specifications[0].stats.isSymbolicLink()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
import * as config from '../src/internal/shared/config'
|
||||
import * as util from '../src/internal/shared/util'
|
||||
import {maskSigUrl, maskSecretUrls} from '../src/internal/shared/util'
|
||||
import {setSecret, debug} from '@actions/core'
|
||||
|
||||
export const testRuntimeToken =
|
||||
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwic2NwIjoiQWN0aW9ucy5FeGFtcGxlIEFjdGlvbnMuQW5vdGhlckV4YW1wbGU6dGVzdCBBY3Rpb25zLlJlc3VsdHM6Y2U3ZjU0YzctNjFjNy00YWFlLTg4N2YtMzBkYTQ3NWY1ZjFhOmNhMzk1MDg1LTA0MGEtNTI2Yi0yY2U4LWJkYzg1ZjY5Mjc3NCIsImlhdCI6MTUxNjIzOTAyMn0.XYnI_wHPBlUi1mqYveJnnkJhp4dlFjqxzRmISPsqfw8'
|
||||
|
|
@ -59,3 +61,159 @@ describe('get-backend-ids-from-token', () => {
|
|||
)
|
||||
})
|
||||
})
|
||||
|
||||
jest.mock('@actions/core')
|
||||
|
||||
describe('maskSigUrl', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('does nothing if no sig parameter is present', () => {
|
||||
const url = 'https://example.com'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('masks the sig parameter in the middle of the URL and sets it as a secret', () => {
|
||||
const url = 'https://example.com/?param1=value1&sig=12345¶m2=value2'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
|
||||
it('does nothing if the URL is empty', () => {
|
||||
const url = ''
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles URLs with fragments', () => {
|
||||
const url = 'https://example.com?sig=12345#fragment'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('12345')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('12345'))
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSigUrl handles special characters in signatures', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('handles signatures with slashes', () => {
|
||||
const url = 'https://example.com/?sig=abc/123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123')
|
||||
})
|
||||
|
||||
it('handles signatures with plus signs', () => {
|
||||
const url = 'https://example.com/?sig=abc+123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc 123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%20123')
|
||||
})
|
||||
|
||||
it('handles signatures with equals signs', () => {
|
||||
const url = 'https://example.com/?sig=abc=123'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc=123')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%3D123')
|
||||
})
|
||||
|
||||
it('handles already percent-encoded signatures', () => {
|
||||
const url = 'https://example.com/?sig=abc%2F123%3D'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('abc/123=')
|
||||
expect(setSecret).toHaveBeenCalledWith('abc%2F123%3D')
|
||||
})
|
||||
|
||||
it('handles complex Azure SAS signatures', () => {
|
||||
const url =
|
||||
'https://example.com/container/file.txt?sig=nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D&se=2023-12-31'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj//06Cxt80pBRYiiJlYqtPYg5sz/vEh5iHAhw='
|
||||
)
|
||||
expect(setSecret).toHaveBeenCalledWith(
|
||||
'nXyQIUj%2F%2F06Cxt80pBRYiiJlYqtPYg5sz%2FvEh5iHAhw%3D'
|
||||
)
|
||||
})
|
||||
|
||||
it('handles signatures with multiple special characters', () => {
|
||||
const url = 'https://example.com/?sig=a/b+c=d&e=f'
|
||||
maskSigUrl(url)
|
||||
expect(setSecret).toHaveBeenCalledWith('a/b c=d')
|
||||
expect(setSecret).toHaveBeenCalledWith('a%2Fb%20c%3Dd')
|
||||
})
|
||||
})
|
||||
|
||||
describe('maskSecretUrls', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it('masks sig parameters in signed_upload_url and signed_url', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123',
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where only upload_url is present', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?sig=upload123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('upload123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('upload123'))
|
||||
})
|
||||
|
||||
it('handles case where only download_url is present', () => {
|
||||
const body = {
|
||||
signed_url: 'https://download.com?sig=download123'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).toHaveBeenCalledWith('download123')
|
||||
expect(setSecret).toHaveBeenCalledWith(encodeURIComponent('download123'))
|
||||
})
|
||||
|
||||
it('handles case where URLs do not contain sig parameters', () => {
|
||||
const body = {
|
||||
signed_upload_url: 'https://upload.com?token=abc',
|
||||
signed_url: 'https://download.com?token=xyz'
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('handles empty string URLs', () => {
|
||||
const body = {
|
||||
signed_upload_url: '',
|
||||
signed_url: ''
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if body is not an object or is null', () => {
|
||||
maskSecretUrls(null)
|
||||
expect(debug).toHaveBeenCalledWith('body is not an object or is null')
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does nothing if signed_upload_url and signed_url are not strings', () => {
|
||||
const body = {
|
||||
signed_upload_url: 123,
|
||||
signed_url: 456
|
||||
}
|
||||
maskSecretUrls(body)
|
||||
expect(setSecret).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -11,11 +11,14 @@
|
|||
- [FilesNotFoundError](classes/FilesNotFoundError.md)
|
||||
- [GHESNotSupportedError](classes/GHESNotSupportedError.md)
|
||||
- [InvalidResponseError](classes/InvalidResponseError.md)
|
||||
- [NetworkError](classes/NetworkError.md)
|
||||
- [UsageError](classes/UsageError.md)
|
||||
|
||||
### Interfaces
|
||||
|
||||
- [Artifact](interfaces/Artifact.md)
|
||||
- [ArtifactClient](interfaces/ArtifactClient.md)
|
||||
- [DeleteArtifactResponse](interfaces/DeleteArtifactResponse.md)
|
||||
- [DownloadArtifactOptions](interfaces/DownloadArtifactOptions.md)
|
||||
- [DownloadArtifactResponse](interfaces/DownloadArtifactResponse.md)
|
||||
- [FindOptions](interfaces/FindOptions.md)
|
||||
|
|
@ -37,4 +40,4 @@
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/artifact.ts#L7)
|
||||
[src/artifact.ts:7](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/artifact.ts#L7)
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
[src/internal/shared/errors.ts:24](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L24)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ The default artifact client that is used by the artifact action(s).
|
|||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](DefaultArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](DefaultArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](DefaultArtifactClient.md#getartifact)
|
||||
- [listArtifacts](DefaultArtifactClient.md#listartifacts)
|
||||
|
|
@ -33,6 +34,37 @@ The default artifact client that is used by the artifact action(s).
|
|||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](../interfaces/DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Implementation of
|
||||
|
||||
[ArtifactClient](../interfaces/ArtifactClient.md).[deleteArtifact](../interfaces/ArtifactClient.md#deleteartifact)
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:248](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L248)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](../interfaces/DownloadArtifactResponse.md)\>
|
||||
|
|
@ -45,7 +77,7 @@ If `options.findBy` is specified, this will use the public Download Artifact API
|
|||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The name of the artifact to download |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](../interfaces/DownloadArtifactOptions.md) & [`FindOptions`](../interfaces/FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
|
@ -60,7 +92,7 @@ single DownloadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:119](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L119)
|
||||
[src/internal/client.ts:138](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L138)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -95,7 +127,7 @@ If there are multiple artifacts with the same name in the same workflow run this
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:193](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L193)
|
||||
[src/internal/client.ts:212](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L212)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -127,7 +159,7 @@ ListArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:157](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L157)
|
||||
[src/internal/client.ts:176](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L176)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -158,4 +190,4 @@ single UploadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:94](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L94)
|
||||
[src/internal/client.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L113)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
[src/internal/shared/errors.ts:4](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L4)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
@ -59,7 +59,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
[src/internal/shared/errors.ts:2](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L2)
|
||||
|
||||
___
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
[src/internal/shared/errors.ts:31](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L31)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ Error.constructor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
[src/internal/shared/errors.ts:17](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L17)
|
||||
|
||||
## Properties
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,201 @@
|
|||
[@actions/artifact](../README.md) / NetworkError
|
||||
|
||||
# Class: NetworkError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`NetworkError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](NetworkError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [code](NetworkError.md#code)
|
||||
- [message](NetworkError.md#message)
|
||||
- [name](NetworkError.md#name)
|
||||
- [stack](NetworkError.md#stack)
|
||||
- [prepareStackTrace](NetworkError.md#preparestacktrace)
|
||||
- [stackTraceLimit](NetworkError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](NetworkError.md#capturestacktrace)
|
||||
- [isNetworkErrorCode](NetworkError.md#isnetworkerrorcode)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new NetworkError**(`code`): [`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
[`NetworkError`](NetworkError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:42](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L42)
|
||||
|
||||
## Properties
|
||||
|
||||
### code
|
||||
|
||||
• **code**: `string`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L40)
|
||||
|
||||
___
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isNetworkErrorCode
|
||||
|
||||
▸ **isNetworkErrorCode**(`code?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `code?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:49](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L49)
|
||||
|
|
@ -0,0 +1,184 @@
|
|||
[@actions/artifact](../README.md) / UsageError
|
||||
|
||||
# Class: UsageError
|
||||
|
||||
## Hierarchy
|
||||
|
||||
- `Error`
|
||||
|
||||
↳ **`UsageError`**
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Constructors
|
||||
|
||||
- [constructor](UsageError.md#constructor)
|
||||
|
||||
### Properties
|
||||
|
||||
- [message](UsageError.md#message)
|
||||
- [name](UsageError.md#name)
|
||||
- [stack](UsageError.md#stack)
|
||||
- [prepareStackTrace](UsageError.md#preparestacktrace)
|
||||
- [stackTraceLimit](UsageError.md#stacktracelimit)
|
||||
|
||||
### Methods
|
||||
|
||||
- [captureStackTrace](UsageError.md#capturestacktrace)
|
||||
- [isUsageErrorMessage](UsageError.md#isusageerrormessage)
|
||||
|
||||
## Constructors
|
||||
|
||||
### constructor
|
||||
|
||||
• **new UsageError**(): [`UsageError`](UsageError.md)
|
||||
|
||||
#### Returns
|
||||
|
||||
[`UsageError`](UsageError.md)
|
||||
|
||||
#### Overrides
|
||||
|
||||
Error.constructor
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L62)
|
||||
|
||||
## Properties
|
||||
|
||||
### message
|
||||
|
||||
• **message**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.message
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1068
|
||||
|
||||
___
|
||||
|
||||
### name
|
||||
|
||||
• **name**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.name
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1067
|
||||
|
||||
___
|
||||
|
||||
### stack
|
||||
|
||||
• `Optional` **stack**: `string`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stack
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/typescript/lib/lib.es5.d.ts:1069
|
||||
|
||||
___
|
||||
|
||||
### prepareStackTrace
|
||||
|
||||
▪ `Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
|
||||
|
||||
#### Type declaration
|
||||
|
||||
▸ (`err`, `stackTraces`): `any`
|
||||
|
||||
Optional override for formatting stack traces
|
||||
|
||||
##### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `err` | `Error` |
|
||||
| `stackTraces` | `CallSite`[] |
|
||||
|
||||
##### Returns
|
||||
|
||||
`any`
|
||||
|
||||
**`See`**
|
||||
|
||||
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.prepareStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:11
|
||||
|
||||
___
|
||||
|
||||
### stackTraceLimit
|
||||
|
||||
▪ `Static` **stackTraceLimit**: `number`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.stackTraceLimit
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:13
|
||||
|
||||
## Methods
|
||||
|
||||
### captureStackTrace
|
||||
|
||||
▸ **captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
|
||||
|
||||
Create .stack property on a target object
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `targetObject` | `object` |
|
||||
| `constructorOpt?` | `Function` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`void`
|
||||
|
||||
#### Inherited from
|
||||
|
||||
Error.captureStackTrace
|
||||
|
||||
#### Defined in
|
||||
|
||||
node_modules/@types/node/globals.d.ts:4
|
||||
|
||||
___
|
||||
|
||||
### isUsageErrorMessage
|
||||
|
||||
▸ **isUsageErrorMessage**(`msg?`): `boolean`
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type |
|
||||
| :------ | :------ |
|
||||
| `msg?` | `string` |
|
||||
|
||||
#### Returns
|
||||
|
||||
`boolean`
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/errors.ts:68](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/errors.ts#L68)
|
||||
|
|
@ -23,7 +23,7 @@ The time when the artifact was created
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
[src/internal/shared/interfaces.ts:128](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L128)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -35,7 +35,7 @@ The ID of the artifact
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ The name of the artifact
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:108](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L108)
|
||||
[src/internal/shared/interfaces.ts:113](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L113)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -59,4 +59,4 @@ The size of the artifact in bytes
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:118](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L118)
|
||||
[src/internal/shared/interfaces.ts:123](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L123)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ Generic interface for the artifact client.
|
|||
|
||||
### Methods
|
||||
|
||||
- [deleteArtifact](ArtifactClient.md#deleteartifact)
|
||||
- [downloadArtifact](ArtifactClient.md#downloadartifact)
|
||||
- [getArtifact](ArtifactClient.md#getartifact)
|
||||
- [listArtifacts](ArtifactClient.md#listartifacts)
|
||||
|
|
@ -19,6 +20,33 @@ Generic interface for the artifact client.
|
|||
|
||||
## Methods
|
||||
|
||||
### deleteArtifact
|
||||
|
||||
▸ **deleteArtifact**(`artifactName`, `options?`): `Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
Delete an Artifact
|
||||
|
||||
If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactName` | `string` | The name of the artifact to delete |
|
||||
| `options?` | [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the delete behavior |
|
||||
|
||||
#### Returns
|
||||
|
||||
`Promise`\<[`DeleteArtifactResponse`](DeleteArtifactResponse.md)\>
|
||||
|
||||
single DeleteArtifactResponse object
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L103)
|
||||
|
||||
___
|
||||
|
||||
### downloadArtifact
|
||||
|
||||
▸ **downloadArtifact**(`artifactId`, `options?`): `Promise`\<[`DownloadArtifactResponse`](DownloadArtifactResponse.md)\>
|
||||
|
|
@ -31,7 +59,7 @@ If `options.findBy` is specified, this will use the public Download Artifact API
|
|||
|
||||
| Name | Type | Description |
|
||||
| :------ | :------ | :------ |
|
||||
| `artifactId` | `number` | The name of the artifact to download |
|
||||
| `artifactId` | `number` | The id of the artifact to download |
|
||||
| `options?` | [`DownloadArtifactOptions`](DownloadArtifactOptions.md) & [`FindOptions`](FindOptions.md) | Extra options that allow for the customization of the download behavior |
|
||||
|
||||
#### Returns
|
||||
|
|
@ -42,7 +70,7 @@ single DownloadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:84](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L84)
|
||||
[src/internal/client.ts:89](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L89)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -73,7 +101,7 @@ If there are multiple artifacts with the same name in the same workflow run this
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:70](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L70)
|
||||
[src/internal/client.ts:75](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L75)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -101,7 +129,7 @@ ListArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:52](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L52)
|
||||
[src/internal/client.ts:57](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L57)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -128,4 +156,4 @@ single UploadArtifactResponse object
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/client.ts:35](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/client.ts#L35)
|
||||
[src/internal/client.ts:40](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/client.ts#L40)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
[@actions/artifact](../README.md) / DeleteArtifactResponse
|
||||
|
||||
# Interface: DeleteArtifactResponse
|
||||
|
||||
Response from the server when deleting an artifact
|
||||
|
||||
## Table of contents
|
||||
|
||||
### Properties
|
||||
|
||||
- [id](DeleteArtifactResponse.md#id)
|
||||
|
||||
## Properties
|
||||
|
||||
### id
|
||||
|
||||
• **id**: `number`
|
||||
|
||||
The id of the artifact that was deleted
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:163](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L163)
|
||||
|
|
@ -20,4 +20,4 @@ Denotes where the artifact will be downloaded to. If not specified then the arti
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:98](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L98)
|
||||
[src/internal/shared/interfaces.ts:103](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L103)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ The path where the artifact was downloaded to
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:88](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L88)
|
||||
[src/internal/shared/interfaces.ts:93](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L93)
|
||||
|
|
|
|||
|
|
@ -27,4 +27,4 @@ The criteria for finding Artifact(s) out of the scope of the current run.
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:131](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L131)
|
||||
[src/internal/shared/interfaces.ts:136](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L136)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ Metadata about the artifact that was found
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:57](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L57)
|
||||
[src/internal/shared/interfaces.ts:62](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L62)
|
||||
|
|
|
|||
|
|
@ -21,4 +21,4 @@ In the case of reruns, this can be useful to avoid duplicates
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:68](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L68)
|
||||
[src/internal/shared/interfaces.ts:73](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L73)
|
||||
|
|
|
|||
|
|
@ -20,4 +20,4 @@ A list of artifacts that were found
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:78](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L78)
|
||||
[src/internal/shared/interfaces.ts:83](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L83)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ For large files that are not easily compressed, a value of 0 is recommended for
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:47](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L47)
|
||||
[src/internal/shared/interfaces.ts:52](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L52)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -52,4 +52,4 @@ input of 0 assumes default retention setting.
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:36](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L36)
|
||||
[src/internal/shared/interfaces.ts:41](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L41)
|
||||
|
|
|
|||
|
|
@ -8,11 +8,24 @@ Response from the server when an artifact is uploaded
|
|||
|
||||
### Properties
|
||||
|
||||
- [digest](UploadArtifactResponse.md#digest)
|
||||
- [id](UploadArtifactResponse.md#id)
|
||||
- [size](UploadArtifactResponse.md#size)
|
||||
|
||||
## Properties
|
||||
|
||||
### digest
|
||||
|
||||
• `Optional` **digest**: `string`
|
||||
|
||||
The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:19](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L19)
|
||||
|
||||
___
|
||||
|
||||
### id
|
||||
|
||||
• `Optional` **id**: `number`
|
||||
|
|
@ -22,7 +35,7 @@ This ID can be used as input to other APIs to download, delete or get more infor
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
[src/internal/shared/interfaces.ts:14](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L14)
|
||||
|
||||
___
|
||||
|
||||
|
|
@ -34,4 +47,4 @@ Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
|||
|
||||
#### Defined in
|
||||
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/e3764a5/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
||||
[src/internal/shared/interfaces.ts:8](https://github.com/actions/toolkit/blob/f522fdf/packages/artifact/src/internal/shared/interfaces.ts#L8)
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "4.0.0",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
|
|
@ -41,25 +41,29 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@actions/http-client": "^2.1.0",
|
||||
"@azure/core-http": "^3.0.5",
|
||||
"@azure/storage-blob": "^12.15.0",
|
||||
"@octokit/core": "^3.5.1",
|
||||
"@octokit/core": "^5.2.1",
|
||||
"@octokit/plugin-request-log": "^1.0.4",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/request-error": "^5.0.0",
|
||||
"@octokit/request": "^8.4.1",
|
||||
"@octokit/request-error": "^5.1.1",
|
||||
"@protobuf-ts/plugin": "^2.2.3-alpha.1",
|
||||
"archiver": "^5.3.1",
|
||||
"crypto": "^1.0.1",
|
||||
"archiver": "^7.0.1",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"twirp-ts": "^2.5.0",
|
||||
"unzip-stream": "^0.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/archiver": "^5.3.2",
|
||||
"@types/unzip-stream": "^0.3.4",
|
||||
"typedoc": "^0.25.4",
|
||||
"typedoc": "^0.28.13",
|
||||
"typedoc-plugin-markdown": "^3.17.1",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"overrides": {
|
||||
"uri-js": "npm:uri-js-replace@^1.0.1",
|
||||
"node-fetch": "^3.3.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
export * from './google/protobuf/timestamp'
|
||||
export * from './google/protobuf/wrappers'
|
||||
export * from './results/api/v1/artifact'
|
||||
export * from './results/api/v1/artifact.twirp'
|
||||
export * from './results/api/v1/artifact.twirp-client'
|
||||
|
|
|
|||
|
|
@ -15,6 +15,66 @@ import { MessageType } from "@protobuf-ts/runtime";
|
|||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export interface MigrateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 3;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export interface MigrateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 2;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 3;
|
||||
*/
|
||||
size: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export interface FinalizeMigratedArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
|
|
@ -169,6 +229,12 @@ export interface ListArtifactsResponse_MonolithArtifact {
|
|||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
/**
|
||||
* The SHA-256 digest of the artifact, calculated on upload for upload-artifact v4 & newer
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue digest = 7;
|
||||
*/
|
||||
digest?: StringValue;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
|
|
@ -196,6 +262,266 @@ export interface GetSignedArtifactURLResponse {
|
|||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactRequest$Type extends MessageType<MigrateArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "expires_at", kind: "message", T: () => Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactRequest>): MigrateArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactRequest): MigrateArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 3:
|
||||
message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 3; */
|
||||
if (message.expiresAt)
|
||||
Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
|
||||
*/
|
||||
export const MigrateArtifactRequest = new MigrateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class MigrateArtifactResponse$Type extends MessageType<MigrateArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.MigrateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<MigrateArtifactResponse>): MigrateArtifactResponse {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<MigrateArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MigrateArtifactResponse): MigrateArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: MigrateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
|
||||
*/
|
||||
export const MigrateArtifactResponse = new MigrateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactRequest$Type extends MessageType<FinalizeMigratedArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactRequest>): FinalizeMigratedArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactRequest): FinalizeMigratedArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 2:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 3:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string name = 2; */
|
||||
if (message.name !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 3; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.size);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
|
||||
*/
|
||||
export const FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeMigratedArtifactResponse$Type extends MessageType<FinalizeMigratedArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<FinalizeMigratedArtifactResponse>): FinalizeMigratedArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<FinalizeMigratedArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeMigratedArtifactResponse): FinalizeMigratedArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: FinalizeMigratedArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
|
||||
*/
|
||||
export const FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor() {
|
||||
|
|
@ -578,7 +904,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp }
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => Timestamp },
|
||||
{ no: 7, name: "digest", kind: "message", T: () => StringValue }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact {
|
||||
|
|
@ -611,6 +938,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
case /* google.protobuf.StringValue digest */ 7:
|
||||
message.digest = StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
|
|
@ -641,6 +971,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifa
|
|||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.StringValue digest = 7; */
|
||||
if (message.digest)
|
||||
StringValue.internalBinaryWrite(message.digest, writer.tag(7, WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
|
|
@ -759,6 +1092,121 @@ class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURL
|
|||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export const GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export const DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<DeleteArtifactResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export const DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
|
|
@ -766,5 +1214,8 @@ export const ArtifactService = new ServiceType("github.actions.results.api.v1.Ar
|
|||
{ name: "CreateArtifact", options: {}, I: CreateArtifactRequest, O: CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: FinalizeArtifactRequest, O: FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: ListArtifactsRequest, O: ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse }
|
||||
]);
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: GetSignedArtifactURLRequest, O: GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: DeleteArtifactRequest, O: DeleteArtifactResponse },
|
||||
{ name: "MigrateArtifact", options: {}, I: MigrateArtifactRequest, O: MigrateArtifactResponse },
|
||||
{ name: "FinalizeMigratedArtifact", options: {}, I: FinalizeMigratedArtifactRequest, O: FinalizeMigratedArtifactResponse }
|
||||
]);
|
||||
|
|
@ -0,0 +1,232 @@
|
|||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
DeleteArtifactRequest,
|
||||
DeleteArtifactResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
DeleteArtifact(
|
||||
request: DeleteArtifactRequest
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const data = DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"DeleteArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
DeleteArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,799 +0,0 @@
|
|||
import {
|
||||
TwirpContext,
|
||||
TwirpServer,
|
||||
RouterEvents,
|
||||
TwirpError,
|
||||
TwirpErrorCode,
|
||||
Interceptor,
|
||||
TwirpContentType,
|
||||
chainInterceptors,
|
||||
} from "twirp-ts";
|
||||
import {
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse,
|
||||
} from "./artifact";
|
||||
|
||||
//==================================//
|
||||
// Client Code //
|
||||
//==================================//
|
||||
|
||||
interface Rpc {
|
||||
request(
|
||||
service: string,
|
||||
method: string,
|
||||
contentType: "application/json" | "application/protobuf",
|
||||
data: object | Uint8Array
|
||||
): Promise<object | Uint8Array>;
|
||||
}
|
||||
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromJson(data as any, { ignoreUnknownFields: true })
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/json",
|
||||
data as object
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromJson(data as any, {
|
||||
ignoreUnknownFields: true,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc: Rpc;
|
||||
constructor(rpc: Rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
}
|
||||
CreateArtifact(
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse> {
|
||||
const data = CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"CreateArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
CreateArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
FinalizeArtifact(
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse> {
|
||||
const data = FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"FinalizeArtifact",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
FinalizeArtifactResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse> {
|
||||
const data = ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"ListArtifacts",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
ListArtifactsResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
|
||||
GetSignedArtifactURL(
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse> {
|
||||
const data = GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request(
|
||||
"github.actions.results.api.v1.ArtifactService",
|
||||
"GetSignedArtifactURL",
|
||||
"application/protobuf",
|
||||
data
|
||||
);
|
||||
return promise.then((data) =>
|
||||
GetSignedArtifactURLResponse.fromBinary(data as Uint8Array)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//==================================//
|
||||
// Server Code //
|
||||
//==================================//
|
||||
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(
|
||||
ctx: T,
|
||||
request: CreateArtifactRequest
|
||||
): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(
|
||||
ctx: T,
|
||||
request: FinalizeArtifactRequest
|
||||
): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(
|
||||
ctx: T,
|
||||
request: ListArtifactsRequest
|
||||
): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(
|
||||
ctx: T,
|
||||
request: GetSignedArtifactURLRequest
|
||||
): Promise<GetSignedArtifactURLResponse>;
|
||||
}
|
||||
|
||||
export enum ArtifactServiceMethod {
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
}
|
||||
|
||||
export const ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
];
|
||||
|
||||
export function createArtifactServiceServer<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(service: ArtifactServiceTwirp<T>) {
|
||||
return new TwirpServer<ArtifactServiceTwirp, T>({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
|
||||
function matchArtifactServiceRoute<T extends TwirpContext = TwirpContext>(
|
||||
method: string,
|
||||
events: RouterEvents<T>
|
||||
) {
|
||||
switch (method) {
|
||||
case "CreateArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "CreateArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "FinalizeArtifact":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "FinalizeArtifact" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "ListArtifacts":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "ListArtifacts" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
case "GetSignedArtifactURL":
|
||||
return async (
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) => {
|
||||
ctx = { ...ctx, methodName: "GetSignedArtifactURL" };
|
||||
await events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
};
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceCreateArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceFinalizeArtifactRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceListArtifactsRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
): Promise<string | Uint8Array> {
|
||||
switch (ctx.contentType) {
|
||||
case TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
case TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf<T>(
|
||||
ctx,
|
||||
service,
|
||||
data,
|
||||
interceptors
|
||||
);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new TwirpError(TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLJSON<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return JSON.stringify(
|
||||
GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}) as string
|
||||
);
|
||||
}
|
||||
async function handleArtifactServiceCreateArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, CreateArtifactRequest, CreateArtifactResponse>[]
|
||||
) {
|
||||
let request: CreateArtifactRequest;
|
||||
let response: CreateArtifactResponse;
|
||||
|
||||
try {
|
||||
request = CreateArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
CreateArtifactRequest,
|
||||
CreateArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.CreateArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(CreateArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceFinalizeArtifactProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>[]
|
||||
) {
|
||||
let request: FinalizeArtifactRequest;
|
||||
let response: FinalizeArtifactResponse;
|
||||
|
||||
try {
|
||||
request = FinalizeArtifactRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
FinalizeArtifactRequest,
|
||||
FinalizeArtifactResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.FinalizeArtifact(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(FinalizeArtifactResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceListArtifactsProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<T, ListArtifactsRequest, ListArtifactsResponse>[]
|
||||
) {
|
||||
let request: ListArtifactsRequest;
|
||||
let response: ListArtifactsResponse;
|
||||
|
||||
try {
|
||||
request = ListArtifactsRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
ListArtifactsRequest,
|
||||
ListArtifactsResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.ListArtifacts(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(ListArtifactsResponse.toBinary(response));
|
||||
}
|
||||
|
||||
async function handleArtifactServiceGetSignedArtifactURLProtobuf<
|
||||
T extends TwirpContext = TwirpContext
|
||||
>(
|
||||
ctx: T,
|
||||
service: ArtifactServiceTwirp,
|
||||
data: Buffer,
|
||||
interceptors?: Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>[]
|
||||
) {
|
||||
let request: GetSignedArtifactURLRequest;
|
||||
let response: GetSignedArtifactURLResponse;
|
||||
|
||||
try {
|
||||
request = GetSignedArtifactURLRequest.fromBinary(data);
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new TwirpError(TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = chainInterceptors(...interceptors) as Interceptor<
|
||||
T,
|
||||
GetSignedArtifactURLRequest,
|
||||
GetSignedArtifactURLResponse
|
||||
>;
|
||||
response = await interceptor(ctx, request!, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
} else {
|
||||
response = await service.GetSignedArtifactURL(ctx, request!);
|
||||
}
|
||||
|
||||
return Buffer.from(GetSignedArtifactURLResponse.toBinary(response));
|
||||
}
|
||||
|
|
@ -8,13 +8,18 @@ import {
|
|||
ListArtifactsOptions,
|
||||
ListArtifactsResponse,
|
||||
DownloadArtifactResponse,
|
||||
FindOptions
|
||||
FindOptions,
|
||||
DeleteArtifactResponse
|
||||
} from './shared/interfaces'
|
||||
import {uploadArtifact} from './upload/upload-artifact'
|
||||
import {
|
||||
downloadArtifactPublic,
|
||||
downloadArtifactInternal
|
||||
} from './download/download-artifact'
|
||||
import {
|
||||
deleteArtifactPublic,
|
||||
deleteArtifactInternal
|
||||
} from './delete/delete-artifact'
|
||||
import {getArtifactPublic, getArtifactInternal} from './find/get-artifact'
|
||||
import {listArtifactsPublic, listArtifactsInternal} from './find/list-artifacts'
|
||||
import {GHESNotSupportedError} from './shared/errors'
|
||||
|
|
@ -77,7 +82,7 @@ export interface ArtifactClient {
|
|||
*
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The name of the artifact to download
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
|
|
@ -85,6 +90,20 @@ export interface ArtifactClient {
|
|||
artifactId: number,
|
||||
options?: DownloadArtifactOptions & FindOptions
|
||||
): Promise<DownloadArtifactResponse>
|
||||
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse>
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -225,4 +244,41 @@ If the error persists, please check whether Actions and API requests are operati
|
|||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async deleteArtifact(
|
||||
artifactName: string,
|
||||
options?: FindOptions
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
try {
|
||||
if (isGhes()) {
|
||||
throw new GHESNotSupportedError()
|
||||
}
|
||||
|
||||
if (options?.findBy) {
|
||||
const {
|
||||
findBy: {repositoryOwner, repositoryName, workflowRunId, token}
|
||||
} = options
|
||||
|
||||
return deleteArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
}
|
||||
|
||||
return deleteArtifactInternal(artifactName)
|
||||
} catch (error) {
|
||||
warning(
|
||||
`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,109 @@
|
|||
import {info, debug} from '@actions/core'
|
||||
import {getOctokit} from '@actions/github'
|
||||
import {DeleteArtifactResponse} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getRetryOptions} from '../find/retry-options'
|
||||
import {defaults as defaultGitHubOptions} from '@actions/github/lib/utils'
|
||||
import {requestLog} from '@octokit/plugin-request-log'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {
|
||||
DeleteArtifactRequest,
|
||||
ListArtifactsRequest,
|
||||
StringValue
|
||||
} from '../../generated'
|
||||
import {getArtifactPublic} from '../find/get-artifact'
|
||||
import {ArtifactNotFoundError, InvalidResponseError} from '../shared/errors'
|
||||
|
||||
export async function deleteArtifactPublic(
|
||||
artifactName: string,
|
||||
workflowRunId: number,
|
||||
repositoryOwner: string,
|
||||
repositoryName: string,
|
||||
token: string
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const [retryOpts, requestOpts] = getRetryOptions(defaultGitHubOptions)
|
||||
|
||||
const opts: OctokitOptions = {
|
||||
log: undefined,
|
||||
userAgent: getUserAgentString(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
}
|
||||
|
||||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
const getArtifactResp = await getArtifactPublic(
|
||||
artifactName,
|
||||
workflowRunId,
|
||||
repositoryOwner,
|
||||
repositoryName,
|
||||
token
|
||||
)
|
||||
|
||||
const deleteArtifactResp = await github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
})
|
||||
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new InvalidResponseError(
|
||||
`Invalid response from GitHub API: ${deleteArtifactResp.status} (${deleteArtifactResp?.headers?.['x-github-request-id']})`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteArtifactInternal(
|
||||
artifactName
|
||||
): Promise<DeleteArtifactResponse> {
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
const listReq: ListArtifactsRequest = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: StringValue.create({value: artifactName})
|
||||
}
|
||||
|
||||
const listRes = await artifactClient.ListArtifacts(listReq)
|
||||
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
)
|
||||
}
|
||||
|
||||
let artifact = listRes.artifacts[0]
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort(
|
||||
(a, b) => Number(b.databaseId) - Number(a.databaseId)
|
||||
)[0]
|
||||
|
||||
debug(
|
||||
`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`
|
||||
)
|
||||
}
|
||||
|
||||
const req: DeleteArtifactRequest = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
}
|
||||
|
||||
const res = await artifactClient.DeleteArtifact(req)
|
||||
info(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`)
|
||||
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,11 +1,15 @@
|
|||
import fs from 'fs/promises'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
|
||||
import * as github from '@actions/github'
|
||||
import * as core from '@actions/core'
|
||||
import * as httpClient from '@actions/http-client'
|
||||
import unzip from 'unzip-stream'
|
||||
import {
|
||||
DownloadArtifactOptions,
|
||||
DownloadArtifactResponse
|
||||
DownloadArtifactResponse,
|
||||
StreamExtractResponse
|
||||
} from '../shared/interfaces'
|
||||
import {getUserAgentString} from '../shared/user-agent'
|
||||
import {getGitHubWorkspaceDir} from '../shared/config'
|
||||
|
|
@ -37,12 +41,14 @@ async function exists(path: string): Promise<boolean> {
|
|||
}
|
||||
}
|
||||
|
||||
async function streamExtract(url: string, directory: string): Promise<void> {
|
||||
async function streamExtract(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<StreamExtractResponse> {
|
||||
let retryCount = 0
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
await streamExtractExternal(url, directory)
|
||||
return
|
||||
return await streamExtractExternal(url, directory)
|
||||
} catch (error) {
|
||||
retryCount++
|
||||
core.debug(
|
||||
|
|
@ -58,8 +64,9 @@ async function streamExtract(url: string, directory: string): Promise<void> {
|
|||
|
||||
export async function streamExtractExternal(
|
||||
url: string,
|
||||
directory: string
|
||||
): Promise<void> {
|
||||
directory: string,
|
||||
opts: {timeout: number} = {timeout: 30 * 1000}
|
||||
): Promise<StreamExtractResponse> {
|
||||
const client = new httpClient.HttpClient(getUserAgentString())
|
||||
const response = await client.get(url)
|
||||
if (response.message.statusCode !== 200) {
|
||||
|
|
@ -68,17 +75,26 @@ export async function streamExtractExternal(
|
|||
)
|
||||
}
|
||||
|
||||
const timeout = 30 * 1000 // 30 seconds
|
||||
let sha256Digest: string | undefined = undefined
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = (): void => {
|
||||
response.message.destroy(
|
||||
new Error(`Blob storage chunk did not respond in ${timeout}ms`)
|
||||
const timeoutError = new Error(
|
||||
`Blob storage chunk did not respond in ${opts.timeout}ms`
|
||||
)
|
||||
response.message.destroy(timeoutError)
|
||||
reject(timeoutError)
|
||||
}
|
||||
const timer = setTimeout(timerFn, timeout)
|
||||
const timer = setTimeout(timerFn, opts.timeout)
|
||||
|
||||
response.message
|
||||
const hashStream = crypto.createHash('sha256').setEncoding('hex')
|
||||
const passThrough = new stream.PassThrough()
|
||||
|
||||
response.message.pipe(passThrough)
|
||||
passThrough.pipe(hashStream)
|
||||
const extractStream = passThrough
|
||||
|
||||
extractStream
|
||||
.on('data', () => {
|
||||
timer.refresh()
|
||||
})
|
||||
|
|
@ -92,7 +108,12 @@ export async function streamExtractExternal(
|
|||
.pipe(unzip.Extract({path: directory}))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer)
|
||||
resolve()
|
||||
if (hashStream) {
|
||||
hashStream.end()
|
||||
sha256Digest = hashStream.read() as string
|
||||
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`)
|
||||
}
|
||||
resolve({sha256Digest: `sha256:${sha256Digest}`})
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
reject(error)
|
||||
|
|
@ -111,6 +132,8 @@ export async function downloadArtifactPublic(
|
|||
|
||||
const api = github.getOctokit(token)
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
core.info(
|
||||
`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`
|
||||
)
|
||||
|
|
@ -140,13 +163,20 @@ export async function downloadArtifactPublic(
|
|||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(location, downloadPath)
|
||||
const extractResponse = await streamExtract(location, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath}
|
||||
return {downloadPath, digestMismatch}
|
||||
}
|
||||
|
||||
export async function downloadArtifactInternal(
|
||||
|
|
@ -157,6 +187,8 @@ export async function downloadArtifactInternal(
|
|||
|
||||
const artifactClient = internalArtifactTwirpClient()
|
||||
|
||||
let digestMismatch = false
|
||||
|
||||
const {workflowRunBackendId, workflowJobRunBackendId} =
|
||||
getBackendIdsFromToken()
|
||||
|
||||
|
|
@ -192,13 +224,20 @@ export async function downloadArtifactInternal(
|
|||
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`)
|
||||
await streamExtract(signedUrl, downloadPath)
|
||||
const extractResponse = await streamExtract(signedUrl, downloadPath)
|
||||
core.info(`Artifact download completed successfully.`)
|
||||
if (options?.expectedHash) {
|
||||
if (options?.expectedHash !== extractResponse.sha256Digest) {
|
||||
digestMismatch = true
|
||||
core.debug(`Computed digest: ${extractResponse.sha256Digest}`)
|
||||
core.debug(`Expected digest: ${options.expectedHash}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`)
|
||||
}
|
||||
|
||||
return {downloadPath}
|
||||
return {downloadPath, digestMismatch}
|
||||
}
|
||||
|
||||
async function resolveOrCreateDirectory(
|
||||
|
|
|
|||
|
|
@ -49,7 +49,9 @@ export async function getArtifactPublic(
|
|||
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -66,7 +68,10 @@ export async function getArtifactPublic(
|
|||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: artifact.digest
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -89,7 +94,9 @@ export async function getArtifactInternal(
|
|||
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new ArtifactNotFoundError(
|
||||
`Artifact not found for name: ${artifactName}`
|
||||
`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -111,7 +118,8 @@ export async function getArtifactInternal(
|
|||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ import {retry} from '@octokit/plugin-retry'
|
|||
import {OctokitOptions} from '@octokit/core/dist-types/types'
|
||||
import {internalArtifactTwirpClient} from '../shared/artifact-twirp-client'
|
||||
import {getBackendIdsFromToken} from '../shared/util'
|
||||
import {getMaxArtifactListCount} from '../shared/config'
|
||||
import {ListArtifactsRequest, Timestamp} from '../../generated'
|
||||
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000
|
||||
const maximumArtifactCount = getMaxArtifactListCount()
|
||||
const paginationCount = 100
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount
|
||||
const maxNumberOfPages = Math.ceil(maximumArtifactCount / paginationCount)
|
||||
|
||||
export async function listArtifactsPublic(
|
||||
workflowRunId: number,
|
||||
|
|
@ -41,14 +41,17 @@ export async function listArtifactsPublic(
|
|||
const github = getOctokit(token, opts, retry, requestLog)
|
||||
|
||||
let currentPageNumber = 1
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
let numberOfPages = Math.ceil(
|
||||
listArtifactResponse.total_count / paginationCount
|
||||
|
|
@ -56,7 +59,7 @@ export async function listArtifactsPublic(
|
|||
const totalArtifactCount = listArtifactResponse.total_count
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
warning(
|
||||
`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
`Workflow run ${workflowRunId} has ${totalArtifactCount} artifacts, exceeding the limit of ${maximumArtifactCount}. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`
|
||||
)
|
||||
numberOfPages = maxNumberOfPages
|
||||
}
|
||||
|
|
@ -67,27 +70,32 @@ export async function listArtifactsPublic(
|
|||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
})
|
||||
}
|
||||
|
||||
// Move to the next page
|
||||
currentPageNumber++
|
||||
// Iterate over any remaining pages
|
||||
for (
|
||||
currentPageNumber;
|
||||
currentPageNumber < numberOfPages;
|
||||
currentPageNumber <= numberOfPages;
|
||||
currentPageNumber++
|
||||
) {
|
||||
currentPageNumber++
|
||||
debug(`Fetching page ${currentPageNumber} of artifact list`)
|
||||
|
||||
const {data: listArtifactResponse} =
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
const {data: listArtifactResponse} = await github.request(
|
||||
'GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts',
|
||||
{
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
|
|
@ -96,7 +104,8 @@ export async function listArtifactsPublic(
|
|||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
: undefined,
|
||||
digest: (artifact as ArtifactResponse).digest
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -132,7 +141,8 @@ export async function listArtifactsInternal(
|
|||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
: undefined,
|
||||
digest: artifact.digest?.value
|
||||
}))
|
||||
|
||||
if (latest) {
|
||||
|
|
@ -146,6 +156,18 @@ export async function listArtifactsInternal(
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This exists so that we don't have to use 'any' when receiving the artifact list from the GitHub API.
|
||||
* The digest field is not present in OpenAPI/types at time of writing, which necessitates this change.
|
||||
*/
|
||||
interface ArtifactResponse {
|
||||
name: string
|
||||
id: number
|
||||
size_in_bytes: number
|
||||
created_at?: string
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import {ArtifactServiceClientJSON} from '../../generated'
|
|||
import {getResultsServiceUrl, getRuntimeToken} from './config'
|
||||
import {getUserAgentString} from './user-agent'
|
||||
import {NetworkError, UsageError} from './errors'
|
||||
import {maskSecretUrls} from './util'
|
||||
|
||||
// The twirp http client must implement this interface
|
||||
interface Rpc {
|
||||
|
|
@ -86,6 +87,7 @@ class ArtifactHttpClient implements Rpc {
|
|||
debug(`[Response] - ${response.message.statusCode}`)
|
||||
debug(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`)
|
||||
const body = JSON.parse(rawBody)
|
||||
maskSecretUrls(body)
|
||||
debug(`Body: ${JSON.stringify(body, null, 2)}`)
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return {response, body}
|
||||
|
|
@ -102,7 +104,6 @@ class ArtifactHttpClient implements Rpc {
|
|||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
debug(`Raw Body: ${rawBody}`)
|
||||
throw error
|
||||
}
|
||||
|
||||
if (error instanceof UsageError) {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os from 'os'
|
||||
import {info} from '@actions/core'
|
||||
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
|
|
@ -27,7 +28,13 @@ export function isGhes(): boolean {
|
|||
const ghUrl = new URL(
|
||||
process.env['GITHUB_SERVER_URL'] || 'https://github.com'
|
||||
)
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'
|
||||
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase()
|
||||
const isGitHubHost = hostname === 'GITHUB.COM'
|
||||
const isGheHost = hostname.endsWith('.GHE.COM')
|
||||
const isLocalHost = hostname.endsWith('.LOCALHOST')
|
||||
|
||||
return !isGitHubHost && !isGheHost && !isLocalHost
|
||||
}
|
||||
|
||||
export function getGitHubWorkspaceDir(): string {
|
||||
|
|
@ -38,16 +45,71 @@ export function getGitHubWorkspaceDir(): string {
|
|||
return ghWorkspaceDir
|
||||
}
|
||||
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
// The maximum value of concurrency is 300.
|
||||
// This value can be changed with ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY variable.
|
||||
export function getConcurrency(): number {
|
||||
const numCPUs = os.cpus().length
|
||||
let concurrencyCap = 32
|
||||
|
||||
if (numCPUs <= 4) {
|
||||
return 32
|
||||
if (numCPUs > 4) {
|
||||
const concurrency = 16 * numCPUs
|
||||
concurrencyCap = concurrency > 300 ? 300 : concurrency
|
||||
}
|
||||
|
||||
const concurrency = 16 * numCPUs
|
||||
return concurrency > 300 ? 300 : concurrency
|
||||
const concurrencyOverride = process.env['ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY']
|
||||
if (concurrencyOverride) {
|
||||
const concurrency = parseInt(concurrencyOverride)
|
||||
if (isNaN(concurrency) || concurrency < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY env variable'
|
||||
)
|
||||
}
|
||||
|
||||
if (concurrency < concurrencyCap) {
|
||||
info(
|
||||
`Set concurrency based on the value set in ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY.`
|
||||
)
|
||||
return concurrency
|
||||
}
|
||||
|
||||
info(
|
||||
`ACTIONS_ARTIFACT_UPLOAD_CONCURRENCY is higher than the cap of ${concurrencyCap} based on the number of cpus. Set it to the maximum value allowed.`
|
||||
)
|
||||
return concurrencyCap
|
||||
}
|
||||
|
||||
// default concurrency to 5
|
||||
return 5
|
||||
}
|
||||
|
||||
export function getUploadChunkTimeout(): number {
|
||||
const timeoutVar = process.env['ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS']
|
||||
if (!timeoutVar) {
|
||||
return 300000 // 5 minutes
|
||||
}
|
||||
|
||||
const timeout = parseInt(timeoutVar)
|
||||
if (isNaN(timeout)) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_UPLOAD_TIMEOUT_MS env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return timeout
|
||||
}
|
||||
|
||||
// This value can be changed with ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT variable.
|
||||
// Defaults to 1000 as a safeguard for rate limiting.
|
||||
export function getMaxArtifactListCount(): number {
|
||||
const maxCountVar =
|
||||
process.env['ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT'] || '1000'
|
||||
|
||||
const maxCount = parseInt(maxCountVar)
|
||||
if (isNaN(maxCount) || maxCount < 1) {
|
||||
throw new Error(
|
||||
'Invalid value set for ACTIONS_ARTIFACT_MAX_ARTIFACT_COUNT env variable'
|
||||
)
|
||||
}
|
||||
|
||||
return maxCount
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ export interface UploadArtifactResponse {
|
|||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number
|
||||
|
||||
/**
|
||||
* The SHA256 digest of the artifact that was created. Not provided if no artifact was uploaded
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -86,6 +91,11 @@ export interface DownloadArtifactResponse {
|
|||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string
|
||||
|
||||
/**
|
||||
* Returns true if the digest of the downloaded artifact does not match the expected hash
|
||||
*/
|
||||
digestMismatch?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -96,6 +106,20 @@ export interface DownloadArtifactOptions {
|
|||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string
|
||||
|
||||
/**
|
||||
* The hash that was computed for the artifact during upload. If provided, the outcome of the download
|
||||
* will provide a digestMismatch property indicating whether the hash of the downloaded artifact
|
||||
* matches the expected hash.
|
||||
*/
|
||||
expectedHash?: string
|
||||
}
|
||||
|
||||
export interface StreamExtractResponse {
|
||||
/**
|
||||
* The SHA256 hash of the downloaded file
|
||||
*/
|
||||
sha256Digest?: string
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -121,6 +145,11 @@ export interface Artifact {
|
|||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date
|
||||
|
||||
/**
|
||||
* The digest of the artifact, computed at time of upload.
|
||||
*/
|
||||
digest?: string
|
||||
}
|
||||
|
||||
// FindOptions are for fetching Artifact(s) out of the scope of the current run.
|
||||
|
|
@ -147,3 +176,13 @@ export interface FindOptions {
|
|||
repositoryName: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import * as core from '@actions/core'
|
||||
import {getRuntimeToken} from './config'
|
||||
import jwt_decode from 'jwt-decode'
|
||||
import {debug, setSecret} from '@actions/core'
|
||||
|
||||
export interface BackendIds {
|
||||
workflowRunBackendId: string
|
||||
|
|
@ -69,3 +70,76 @@ export function getBackendIdsFromToken(): BackendIds {
|
|||
|
||||
throw InvalidJwtError
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks the `sig` parameter in a URL and sets it as a secret.
|
||||
*
|
||||
* @param url - The URL containing the signature parameter to mask
|
||||
* @remarks
|
||||
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
|
||||
* If found, it registers both the raw and URL-encoded signature values as secrets using
|
||||
* the Actions `setSecret` API, which prevents them from being displayed in logs.
|
||||
*
|
||||
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Mask a signature in an Azure SAS token URL
|
||||
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
|
||||
* ```
|
||||
*/
|
||||
export function maskSigUrl(url: string): void {
|
||||
if (!url) return
|
||||
try {
|
||||
const parsedUrl = new URL(url)
|
||||
const signature = parsedUrl.searchParams.get('sig')
|
||||
if (signature) {
|
||||
setSecret(signature)
|
||||
setSecret(encodeURIComponent(signature))
|
||||
}
|
||||
} catch (error) {
|
||||
debug(
|
||||
`Failed to parse URL: ${url} ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Masks sensitive information in URLs containing signature parameters.
|
||||
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
|
||||
* and 'signed_download_url' properties of the provided object.
|
||||
*
|
||||
* @param body - The object should contain a signature
|
||||
* @remarks
|
||||
* This function extracts URLs from the object properties and calls maskSigUrl
|
||||
* on each one to redact sensitive signature information. The function doesn't
|
||||
* modify the original object; it only marks the signatures as secrets for
|
||||
* logging purposes.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const responseBody = {
|
||||
* signed_upload_url: 'https://example.com?sig=abc123',
|
||||
* signed_download_url: 'https://example.com?sig=def456'
|
||||
* };
|
||||
* maskSecretUrls(responseBody);
|
||||
* ```
|
||||
*/
|
||||
export function maskSecretUrls(body: Record<string, unknown> | null): void {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
debug('body is not an object or is null')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
'signed_upload_url' in body &&
|
||||
typeof body.signed_upload_url === 'string'
|
||||
) {
|
||||
maskSigUrl(body.signed_upload_url)
|
||||
}
|
||||
if ('signed_url' in body && typeof body.signed_url === 'string') {
|
||||
maskSigUrl(body.signed_url)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
import {BlobClient, BlockBlobUploadStreamOptions} from '@azure/storage-blob'
|
||||
import {TransferProgressEvent} from '@azure/core-http'
|
||||
import {TransferProgressEvent} from '@azure/core-http-compat'
|
||||
import {ZipUploadStream} from './zip'
|
||||
import {getUploadChunkSize, getConcurrency} from '../shared/config'
|
||||
import {
|
||||
getUploadChunkSize,
|
||||
getConcurrency,
|
||||
getUploadChunkTimeout
|
||||
} from '../shared/config'
|
||||
import * as core from '@actions/core'
|
||||
import * as crypto from 'crypto'
|
||||
import * as stream from 'stream'
|
||||
|
|
@ -24,6 +28,22 @@ export async function uploadZipToBlobStorage(
|
|||
zipUploadStream: ZipUploadStream
|
||||
): Promise<BlobUploadResponse> {
|
||||
let uploadByteCount = 0
|
||||
let lastProgressTime = Date.now()
|
||||
const abortController = new AbortController()
|
||||
|
||||
const chunkTimer = async (interval: number): Promise<void> =>
|
||||
new Promise((resolve, reject) => {
|
||||
const timer = setInterval(() => {
|
||||
if (Date.now() - lastProgressTime > interval) {
|
||||
reject(new Error('Upload progress stalled.'))
|
||||
}
|
||||
}, interval)
|
||||
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(timer)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
const maxConcurrency = getConcurrency()
|
||||
const bufferSize = getUploadChunkSize()
|
||||
|
|
@ -37,11 +57,13 @@ export async function uploadZipToBlobStorage(
|
|||
const uploadCallback = (progress: TransferProgressEvent): void => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`)
|
||||
uploadByteCount = progress.loadedBytes
|
||||
lastProgressTime = Date.now()
|
||||
}
|
||||
|
||||
const options: BlockBlobUploadStreamOptions = {
|
||||
blobHTTPHeaders: {blobContentType: 'zip'},
|
||||
onProgress: uploadCallback
|
||||
onProgress: uploadCallback,
|
||||
abortSignal: abortController.signal
|
||||
}
|
||||
|
||||
let sha256Hash: string | undefined = undefined
|
||||
|
|
@ -54,32 +76,35 @@ export async function uploadZipToBlobStorage(
|
|||
core.info('Beginning upload of artifact content to blob storage')
|
||||
|
||||
try {
|
||||
await blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxConcurrency,
|
||||
options
|
||||
)
|
||||
await Promise.race([
|
||||
blockBlobClient.uploadStream(
|
||||
uploadStream,
|
||||
bufferSize,
|
||||
maxConcurrency,
|
||||
options
|
||||
),
|
||||
chunkTimer(getUploadChunkTimeout())
|
||||
])
|
||||
} catch (error) {
|
||||
if (NetworkError.isNetworkErrorCode(error?.code)) {
|
||||
throw new NetworkError(error?.code)
|
||||
}
|
||||
|
||||
throw error
|
||||
} finally {
|
||||
abortController.abort()
|
||||
}
|
||||
|
||||
core.info('Finished uploading artifact content to blob storage!')
|
||||
|
||||
hashStream.end()
|
||||
sha256Hash = hashStream.read() as string
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`)
|
||||
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`)
|
||||
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(
|
||||
`No data was uploaded to blob storage. Reported upload byte count is 0.`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
|
|
|
|||
|
|
@ -110,6 +110,7 @@ export async function uploadArtifact(
|
|||
|
||||
return {
|
||||
size: uploadResult.uploadSize,
|
||||
digest: uploadResult.sha256Hash,
|
||||
id: Number(artifactId)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,6 +13,12 @@ export interface UploadZipSpecification {
|
|||
* The destination path in a zip for a file
|
||||
*/
|
||||
destinationPath: string
|
||||
|
||||
/**
|
||||
* Information about the file
|
||||
* https://nodejs.org/api/fs.html#class-fsstats
|
||||
*/
|
||||
stats: fs.Stats
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -75,10 +81,11 @@ export function getUploadZipSpecification(
|
|||
- file3.txt
|
||||
*/
|
||||
for (let file of filesToZip) {
|
||||
if (!fs.existsSync(file)) {
|
||||
const stats = fs.lstatSync(file, {throwIfNoEntry: false})
|
||||
if (!stats) {
|
||||
throw new Error(`File ${file} does not exist`)
|
||||
}
|
||||
if (!fs.statSync(file).isDirectory()) {
|
||||
if (!stats.isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = normalize(file)
|
||||
file = resolve(file)
|
||||
|
|
@ -94,7 +101,8 @@ export function getUploadZipSpecification(
|
|||
|
||||
specification.push({
|
||||
sourcePath: file,
|
||||
destinationPath: uploadPath
|
||||
destinationPath: uploadPath,
|
||||
stats
|
||||
})
|
||||
} else {
|
||||
// Empty directory
|
||||
|
|
@ -103,7 +111,8 @@ export function getUploadZipSpecification(
|
|||
|
||||
specification.push({
|
||||
sourcePath: null,
|
||||
destinationPath: directoryPath
|
||||
destinationPath: directoryPath,
|
||||
stats
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import * as stream from 'stream'
|
||||
import {realpath} from 'fs/promises'
|
||||
import * as archiver from 'archiver'
|
||||
import * as core from '@actions/core'
|
||||
import {createReadStream} from 'fs'
|
||||
import {UploadZipSpecification} from './upload-zip-specification'
|
||||
import {getUploadChunkSize} from '../shared/config'
|
||||
|
||||
|
|
@ -43,8 +43,14 @@ export async function createZipUploadStream(
|
|||
|
||||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Add a normal file to the zip
|
||||
zip.append(createReadStream(file.sourcePath), {
|
||||
// Check if symlink and resolve the source path
|
||||
let sourcePath = file.sourcePath
|
||||
if (file.stats.isSymbolicLink()) {
|
||||
sourcePath = await realpath(file.sourcePath)
|
||||
}
|
||||
|
||||
// Add the file to the zip
|
||||
zip.file(sourcePath, {
|
||||
name: file.destinationPath
|
||||
})
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright 2024 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -0,0 +1,191 @@
|
|||
# `@actions/attest`
|
||||
|
||||
Functions for generating signed attestations for workflow artifacts.
|
||||
|
||||
Attestations bind some subject (a named artifact along with its digest) to a
|
||||
predicate (some assertion about that subject) using the [in-toto
|
||||
statement](https://github.com/in-toto/attestation/tree/main/spec/v1) format. A
|
||||
signature is generated for the attestation using a
|
||||
[Sigstore](https://www.sigstore.dev/)-issued signing certificate.
|
||||
|
||||
Once the attestation has been created and signed, it will be uploaded to the GH
|
||||
attestations API and associated with the repository from which the workflow was
|
||||
initiated.
|
||||
|
||||
See [Using artifact attestations to establish provenance for builds](https://docs.github.com/en/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)
|
||||
for more information on artifact attestations.
|
||||
|
||||
## Usage
|
||||
|
||||
### `attest`
|
||||
|
||||
The `attest` function takes the supplied subject/predicate pair and generates a
|
||||
signed attestation.
|
||||
|
||||
```js
|
||||
const { attest } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attest({
|
||||
subjects: [{name: 'my-artifact-name', digest: { 'sha256': '36ab4667...'}}],
|
||||
predicateType: 'https://in-toto.io/attestation/release',
|
||||
predicate: { . . . },
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attest` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestOptions = {
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectName?: string
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectDigest?: Record<string, string>
|
||||
// Collection of subjects to be attested
|
||||
subjects?: Subject[]
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
|
||||
export type Subject = {
|
||||
// Name of the subject.
|
||||
name: string
|
||||
// Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
|
||||
digest: Record<string, string>
|
||||
}
|
||||
```
|
||||
|
||||
### `attestProvenance`
|
||||
|
||||
The `attestProvenance` function accepts the name and digest of some artifact and
|
||||
generates a build provenance attestation over those values.
|
||||
|
||||
The attestation is formed by first generating a [SLSA provenance
|
||||
predicate](https://slsa.dev/spec/v1.0/provenance) populated with
|
||||
[metadata](https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1)
|
||||
pulled from the GitHub Actions run.
|
||||
|
||||
```js
|
||||
const { attestProvenance } = require('@actions/attest');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// In order to persist attestations to the repo, this should be a token with
|
||||
// repository write permissions.
|
||||
const ghToken = core.getInput('gh-token');
|
||||
|
||||
const attestation = await attestProvenance({
|
||||
subjectName: 'my-artifact-name',
|
||||
subjectDigest: { 'sha256': '36ab4667...'},
|
||||
token: ghToken
|
||||
});
|
||||
|
||||
console.log(attestation);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
The `attestProvenance` function supports the following options:
|
||||
|
||||
```typescript
|
||||
export type AttestProvenanceOptions = {
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectName?: string
|
||||
// Deprecated. Use 'subjects' instead.
|
||||
subjectDigest?: Record<string, string>
|
||||
// Collection of subjects to be attested
|
||||
subjects?: Subject[]
|
||||
// URI identifying the content type of the predicate being attested.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: 'public-good' | 'github'
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
// Issuer URL responsible for minting the OIDC token from which the
|
||||
// provenance data is read. Defaults to
|
||||
// 'https://token.actions.githubusercontent.com".
|
||||
issuer?: string
|
||||
}
|
||||
```
|
||||
|
||||
### `Attestation`
|
||||
|
||||
The `Attestation` returned by `attest`/`attestProvenance` has the following
|
||||
fields:
|
||||
|
||||
```typescript
|
||||
export type Attestation = {
|
||||
/*
|
||||
* JSON-serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation (if
|
||||
* applicable).
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
||||
```
|
||||
|
||||
For details about the Sigstore bundle format, see the [Bundle protobuf
|
||||
specification](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto).
|
||||
|
||||
## Sigstore Instance
|
||||
|
||||
When generating the signed attestation there are two different Sigstore
|
||||
instances which can be used to issue the signing certificate. By default,
|
||||
workflows initiated from public repositories will use the Sigstore public-good
|
||||
instance and persist the attestation signature to the public [Rekor transparency
|
||||
log](https://docs.sigstore.dev/logging/overview/). Workflows initiated from
|
||||
private/internal repositories will use the GitHub-internal Sigstore instance
|
||||
which uses a signed timestamp issued by GitHub's timestamp authority in place of
|
||||
the public transparency log.
|
||||
|
||||
The default Sigstore instance selection can be overridden by passing an explicit
|
||||
value of either "public-good" or "github" for the `sigstore` option when calling
|
||||
either `attest` or `attestProvenance`.
|
||||
|
||||
## Storage
|
||||
|
||||
Attestations created by `attest`/`attestProvenance` will be uploaded to the GH
|
||||
attestations API and associated with the appropriate repository. Attestation
|
||||
storage is only supported for public repositories or repositories which belong
|
||||
to a GitHub Enterprise Cloud account.
|
||||
|
||||
In order to generate attestations for private, non-Enterprise repositories, the
|
||||
`skipWrite` option should be set to `true`.
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
# @actions/attest Releases
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Add support for Node 24 [#2110](https://github.com/actions/toolkit/pull/2110)
|
||||
- Bump @sigstore/bundle from 3.0.0 to 3.1.0
|
||||
- Bump @sigstore/sign from 3.0.0 to 3.1.0
|
||||
- Bump jose from 5.2.3 to 5.10.0
|
||||
|
||||
### 1.6.0
|
||||
|
||||
- Update `buildSLSAProvenancePredicate` to populate `workflow.ref` field from the `ref` claim in the OIDC token [#1969](https://github.com/actions/toolkit/pull/1969)
|
||||
|
||||
### 1.5.0
|
||||
|
||||
- Bump @actions/core from 1.10.1 to 1.11.1 [#1847](https://github.com/actions/toolkit/pull/1847)
|
||||
- Bump @sigstore/bundle from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
|
||||
- Bump @sigstore/sign from 2.3.2 to 3.0.0 [#1846](https://github.com/actions/toolkit/pull/1846)
|
||||
- Support for generating multi-subject attestations [#1864](https://github.com/actions/toolkit/pull/1865)
|
||||
- Fix bug in `buildSLSAProvenancePredicate` related to `workflow_ref` OIDC token claims containing the "@" symbol in the tag name [#1863](https://github.com/actions/toolkit/pull/1863)
|
||||
|
||||
### 1.4.2
|
||||
|
||||
- Fix bug in `buildSLSAProvenancePredicate`/`attestProvenance` when generating provenance statement for enterprise account using customized OIDC issuer value [#1823](https://github.com/actions/toolkit/pull/1823)
|
||||
|
||||
### 1.4.1
|
||||
|
||||
- Bump @actions/http-client from 2.2.1 to 2.2.3 [#1805](https://github.com/actions/toolkit/pull/1805)
|
||||
|
||||
### 1.4.0
|
||||
|
||||
- Add new `headers` parameter to the `attest` and `attestProvenance` functions [#1790](https://github.com/actions/toolkit/pull/1790)
|
||||
- Update `buildSLSAProvenancePredicate`/`attestProvenance` to automatically derive default OIDC issuer URL from current execution context [#1796](https://github.com/actions/toolkit/pull/1796)
|
||||
### 1.3.1
|
||||
|
||||
- Fix bug with proxy support when retrieving JWKS for OIDC issuer [#1776](https://github.com/actions/toolkit/pull/1776)
|
||||
|
||||
### 1.3.0
|
||||
|
||||
- Dynamic construction of Sigstore API URLs [#1735](https://github.com/actions/toolkit/pull/1735)
|
||||
- Switch to new GH provenance build type [#1745](https://github.com/actions/toolkit/pull/1745)
|
||||
- Fetch existing Rekor entry on 409 conflict error [#1759](https://github.com/actions/toolkit/pull/1759)
|
||||
- Bump @sigstore/bundle from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
|
||||
- Bump @sigstore/sign from 2.3.0 to 2.3.2 [#1738](https://github.com/actions/toolkit/pull/1738)
|
||||
|
||||
### 1.2.1
|
||||
|
||||
- Retry request on attestation persistence failure [#1725](https://github.com/actions/toolkit/pull/1725)
|
||||
|
||||
### 1.2.0
|
||||
|
||||
- Generate attestations using the v0.3 Sigstore bundle format [#1701](https://github.com/actions/toolkit/pull/1701)
|
||||
- Bump @sigstore/bundle from 2.2.0 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
|
||||
- Bump @sigstore/sign from 2.2.3 to 2.3.0 [#1701](https://github.com/actions/toolkit/pull/1701)
|
||||
- Remove dependency on make-fetch-happen [#1714](https://github.com/actions/toolkit/pull/1714)
|
||||
|
||||
### 1.1.0
|
||||
|
||||
- Updates the `attestProvenance` function to retrieve a token from the GitHub OIDC provider and use the token claims to populate the provenance statement [#1693](https://github.com/actions/toolkit/pull/1693)
|
||||
|
||||
### 1.0.0
|
||||
|
||||
- Initial release
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`buildIntotoStatement returns an intoto statement 1`] = `
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"predicate": {
|
||||
"key": "value",
|
||||
},
|
||||
"predicateType": "predicatey",
|
||||
"subject": [
|
||||
{
|
||||
"digest": {
|
||||
"sha256": "7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32",
|
||||
},
|
||||
"name": "subjecty",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`provenance functions buildSLSAProvenancePredicate returns a provenance hydrated from an OIDC token 1`] = `
|
||||
{
|
||||
"params": {
|
||||
"buildDefinition": {
|
||||
"buildType": "https://actions.github.io/buildtypes/workflow/v1",
|
||||
"externalParameters": {
|
||||
"workflow": {
|
||||
"path": ".github/workflows/main.yml",
|
||||
"ref": "refs/heads/main",
|
||||
"repository": "https://foo.ghe.com/owner/repo",
|
||||
},
|
||||
},
|
||||
"internalParameters": {
|
||||
"github": {
|
||||
"event_name": "push",
|
||||
"repository_id": "repo-id",
|
||||
"repository_owner_id": "owner-id",
|
||||
"runner_environment": "github-hosted",
|
||||
},
|
||||
},
|
||||
"resolvedDependencies": [
|
||||
{
|
||||
"digest": {
|
||||
"gitCommit": "babca52ab0c93ae16539e5923cb0d7403b9a093b",
|
||||
},
|
||||
"uri": "git+https://foo.ghe.com/owner/repo@refs/heads/main",
|
||||
},
|
||||
],
|
||||
},
|
||||
"runDetails": {
|
||||
"builder": {
|
||||
"id": "https://foo.ghe.com/owner/workflows/.github/workflows/publish.yml@main",
|
||||
},
|
||||
"metadata": {
|
||||
"invocationId": "https://foo.ghe.com/owner/repo/actions/runs/run-id/attempts/run-attempt",
|
||||
},
|
||||
},
|
||||
},
|
||||
"type": "https://slsa.dev/provenance/v1",
|
||||
}
|
||||
`;
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import {attest} from '../src/attest'
|
||||
|
||||
describe('attest', () => {
|
||||
describe('when no subject information is provided', () => {
|
||||
it('throws an error', async () => {
|
||||
const options = {
|
||||
predicateType: 'foo',
|
||||
predicate: {bar: 'baz'},
|
||||
token: 'token'
|
||||
}
|
||||
expect(attest(options)).rejects.toThrowError(
|
||||
'Must provide either subjectName and subjectDigest or subjects'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import {signingEndpoints} from '../src/endpoints'
|
||||
|
||||
describe('signingEndpoints', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when using github.com', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://github.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.githubapp.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.githubapp.com')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using custom domain', () => {
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_SERVER_URL: 'https://foo.bar.com'
|
||||
}
|
||||
})
|
||||
|
||||
it('returns a expected endpoints', async () => {
|
||||
const endpoints = signingEndpoints('github')
|
||||
|
||||
expect(endpoints.fulcioURL).toEqual('https://fulcio.foo.bar.com')
|
||||
expect(endpoints.tsaServerURL).toEqual('https://timestamp.foo.bar.com')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import {attest, attestProvenance} from '../src'
|
||||
|
||||
it('exports functions', () => {
|
||||
expect(attestProvenance).toBeInstanceOf(Function)
|
||||
expect(attest).toBeInstanceOf(Function)
|
||||
})
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import {buildIntotoStatement} from '../src/intoto'
|
||||
import type {Predicate, Subject} from '../src/shared.types'
|
||||
|
||||
describe('buildIntotoStatement', () => {
|
||||
const subject: Subject = {
|
||||
name: 'subjecty',
|
||||
digest: {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
|
||||
const predicate: Predicate = {
|
||||
type: 'predicatey',
|
||||
params: {
|
||||
key: 'value'
|
||||
}
|
||||
}
|
||||
|
||||
it('returns an intoto statement', () => {
|
||||
const statement = buildIntotoStatement([subject], predicate)
|
||||
expect(statement).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,199 @@
|
|||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {getIDTokenClaims} from '../src/oidc'
|
||||
|
||||
describe('getIDTokenClaims', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://example.com'
|
||||
const audience = 'nobody'
|
||||
const requestToken = 'token'
|
||||
const openidConfigPath = '/.well-known/openid-configuration'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
const openIDConfig = {jwks_uri: `${issuer}${jwksPath}`}
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
let key: any
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: requestToken
|
||||
}
|
||||
|
||||
// Generate JWT signing key
|
||||
key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK and JWKS
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
|
||||
nock(issuer).get(openidConfigPath).reply(200, openIDConfig)
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when ID token is valid', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience,
|
||||
ref: 'ref',
|
||||
sha: 'sha',
|
||||
repository: 'repo',
|
||||
event_name: 'push',
|
||||
job_workflow_ref: 'job_workflow_ref',
|
||||
workflow_ref: 'workflow',
|
||||
repository_id: '1',
|
||||
repository_owner_id: '1',
|
||||
runner_environment: 'github-hosted',
|
||||
run_id: '1',
|
||||
run_attempt: '1'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('returns the ID token claims', async () => {
|
||||
const result = await getIDTokenClaims(issuer)
|
||||
expect(result).toEqual(claims)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID token is valid (w/ enterprise slug)', () => {
|
||||
const claims = {
|
||||
iss: `${issuer}/foo-bar`,
|
||||
aud: audience,
|
||||
ref: 'ref',
|
||||
sha: 'sha',
|
||||
repository: 'repo',
|
||||
event_name: 'push',
|
||||
job_workflow_ref: 'job_workflow_ref',
|
||||
workflow_ref: 'workflow',
|
||||
repository_id: '1',
|
||||
repository_owner_id: '1',
|
||||
runner_environment: 'github-hosted',
|
||||
run_id: '1',
|
||||
run_attempt: '1'
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('returns the ID token claims', async () => {
|
||||
const result = await getIDTokenClaims(issuer)
|
||||
expect(result).toEqual(claims)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID token is missing the "iss" claim', () => {
|
||||
const claims = {
|
||||
aud: audience
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing "iss"/i)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID token is missing required claims', () => {
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: audience
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/missing claims/i)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong issuer', () => {
|
||||
const claims = {foo: 'bar', iss: 'foo', aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
|
||||
/unexpected "iss"/i
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when ID has the wrong audience', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'bar'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
})
|
||||
|
||||
it('throw an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(/unexpected "aud"/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when openid config cannot be retrieved', () => {
|
||||
const claims = {foo: 'bar', iss: issuer, aud: 'nobody'}
|
||||
|
||||
beforeEach(async () => {
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
|
||||
// Disable the openid config endpoint
|
||||
nock.removeInterceptor({
|
||||
proto: 'https',
|
||||
hostname: 'example.com',
|
||||
port: '443',
|
||||
method: 'GET',
|
||||
path: openidConfigPath
|
||||
})
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(getIDTokenClaims(issuer)).rejects.toThrow(
|
||||
/failed to get id/i
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,248 @@
|
|||
import * as github from '@actions/github'
|
||||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import * as jose from 'jose'
|
||||
import nock from 'nock'
|
||||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {SIGSTORE_PUBLIC_GOOD, signingEndpoints} from '../src/endpoints'
|
||||
import {attestProvenance, buildSLSAProvenancePredicate} from '../src/provenance'
|
||||
|
||||
describe('provenance functions', () => {
|
||||
const originalEnv = process.env
|
||||
const issuer = 'https://token.actions.foo.ghe.com'
|
||||
const audience = 'nobody'
|
||||
const jwksPath = '/.well-known/jwks.json'
|
||||
const tokenPath = '/token'
|
||||
|
||||
// MockAgent for mocking @actions/github
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
const claims = {
|
||||
iss: issuer,
|
||||
aud: 'nobody',
|
||||
repository: 'owner/repo',
|
||||
ref: 'refs/heads/main',
|
||||
sha: 'babca52ab0c93ae16539e5923cb0d7403b9a093b',
|
||||
job_workflow_ref: 'owner/workflows/.github/workflows/publish.yml@main',
|
||||
workflow_ref: 'owner/repo/.github/workflows/main.yml@main',
|
||||
event_name: 'push',
|
||||
repository_id: 'repo-id',
|
||||
repository_owner_id: 'owner-id',
|
||||
run_id: 'run-id',
|
||||
run_attempt: 'run-attempt',
|
||||
runner_environment: 'github-hosted'
|
||||
}
|
||||
|
||||
const mockIssuer = async (claims: jose.JWTPayload): Promise<void> => {
|
||||
// Generate JWT signing key
|
||||
const key = await jose.generateKeyPair('PS256')
|
||||
|
||||
// Create JWK, JWKS, and JWT
|
||||
const jwk = await jose.exportJWK(key.publicKey)
|
||||
const jwks = {keys: [jwk]}
|
||||
const jwt = await new jose.SignJWT(claims)
|
||||
.setProtectedHeader({alg: 'PS256'})
|
||||
.sign(key.privateKey)
|
||||
|
||||
// Mock OpenID configuration and JWKS endpoints
|
||||
nock(issuer)
|
||||
.get('/.well-known/openid-configuration')
|
||||
.reply(200, {jwks_uri: `${issuer}${jwksPath}`})
|
||||
nock(issuer).get(jwksPath).reply(200, jwks)
|
||||
|
||||
// Mock OIDC token endpoint for populating the provenance
|
||||
nock(issuer).get(tokenPath).query({audience}).reply(200, {value: jwt})
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: `${issuer}${tokenPath}?`,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token',
|
||||
GITHUB_SERVER_URL: 'https://foo.ghe.com',
|
||||
GITHUB_REPOSITORY: claims.repository
|
||||
}
|
||||
|
||||
await mockIssuer(claims)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('buildSLSAProvenancePredicate', () => {
|
||||
it('returns a provenance hydrated from an OIDC token', async () => {
|
||||
const predicate = await buildSLSAProvenancePredicate()
|
||||
expect(predicate).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
describe('attestProvenance', () => {
|
||||
// Subject to attest
|
||||
const subjectName = 'subjective'
|
||||
const subjectDigest = {
|
||||
sha256: '7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
|
||||
// Fake an OIDC token
|
||||
const oidcPayload = {sub: 'foo@bar.com', iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
const attestationID = '1234567890'
|
||||
|
||||
beforeEach(async () => {
|
||||
nock(issuer)
|
||||
.get(tokenPath)
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
describe('when using the github Sigstore instance', () => {
|
||||
beforeEach(async () => {
|
||||
const {fulcioURL, tsaServerURL} = signingEndpoints('github')
|
||||
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
token: 'token',
|
||||
sigstore: 'github'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'private'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeUndefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when using the public-good Sigstore instance', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
|
||||
// Mock GH attestations API
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: /^\/repos\/.*\/.*\/attestations$/,
|
||||
method: 'post'
|
||||
})
|
||||
.reply(201, {id: attestationID})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is explicitly set', () => {
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
token: 'token',
|
||||
sigstore: 'public-good'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the sigstore instance is inferred from the repo visibility', () => {
|
||||
const savedRepository = github.context.payload.repository
|
||||
|
||||
beforeEach(() => {
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
github.context.payload.repository = {visibility: 'public'} as any
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
github.context.payload.repository = savedRepository
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjects: [{name: subjectName, digest: subjectDigest}],
|
||||
token: 'token'
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBe(attestationID)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when skipWrite is set to true', () => {
|
||||
const {fulcioURL, rekorURL} = SIGSTORE_PUBLIC_GOOD
|
||||
beforeEach(async () => {
|
||||
// Mock Sigstore
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('attests provenance', async () => {
|
||||
const attestation = await attestProvenance({
|
||||
subjectName,
|
||||
subjectDigest,
|
||||
token: 'token',
|
||||
sigstore: 'public-good',
|
||||
skipWrite: true
|
||||
})
|
||||
|
||||
expect(attestation).toBeDefined()
|
||||
expect(attestation.bundle).toBeDefined()
|
||||
expect(attestation.certificate).toMatch(/-----BEGIN CERTIFICATE-----/)
|
||||
expect(attestation.tlogID).toBeDefined()
|
||||
expect(attestation.attestationID).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
import {mockFulcio, mockRekor, mockTSA} from '@sigstore/mock'
|
||||
import nock from 'nock'
|
||||
import {Payload, signPayload} from '../src/sign'
|
||||
|
||||
describe('signProvenance', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
// Fake an OIDC token
|
||||
const subject = 'foo@bar.com'
|
||||
const oidcPayload = {sub: subject, iss: ''}
|
||||
const oidcToken = `.${Buffer.from(JSON.stringify(oidcPayload)).toString(
|
||||
'base64'
|
||||
)}.}`
|
||||
|
||||
// Dummy provenance to be signed
|
||||
const provenance = {
|
||||
_type: 'https://in-toto.io/Statement/v1',
|
||||
subject: {
|
||||
name: 'subjective',
|
||||
digest: {
|
||||
sha256:
|
||||
'7d070f6b64d9bcc530fe99cc21eaaa4b3c364e0b2d367d7735671fa202a03b32'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(provenance)),
|
||||
type: 'application/vnd.in-toto+json'
|
||||
}
|
||||
|
||||
const fulcioURL = 'https://fulcio.url'
|
||||
const rekorURL = 'https://rekor.url'
|
||||
const tsaServerURL = 'https://tsa.url'
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock OIDC token endpoint
|
||||
const tokenURL = 'https://token.url'
|
||||
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
ACTIONS_ID_TOKEN_REQUEST_URL: tokenURL,
|
||||
ACTIONS_ID_TOKEN_REQUEST_TOKEN: 'token'
|
||||
}
|
||||
|
||||
nock(tokenURL)
|
||||
.get('/')
|
||||
.query({audience: 'sigstore'})
|
||||
.reply(200, {value: oidcToken})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when visibility is public', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockRekor({baseURL: rekorURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, rekorURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(1)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when visibility is private', () => {
|
||||
beforeEach(async () => {
|
||||
await mockFulcio({baseURL: fulcioURL, strict: false})
|
||||
await mockTSA({baseURL: tsaServerURL})
|
||||
})
|
||||
|
||||
it('returns a bundle', async () => {
|
||||
const att = await signPayload(payload, {fulcioURL, tsaServerURL})
|
||||
|
||||
expect(att).toBeDefined()
|
||||
expect(att.mediaType).toEqual(
|
||||
'application/vnd.dev.sigstore.bundle.v0.3+json'
|
||||
)
|
||||
|
||||
expect(att.content.$case).toEqual('dsseEnvelope')
|
||||
expect(att.verificationMaterial.content.$case).toEqual('certificate')
|
||||
expect(att.verificationMaterial.tlogEntries).toHaveLength(0)
|
||||
expect(
|
||||
att.verificationMaterial.timestampVerificationData?.rfc3161Timestamps
|
||||
).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
import {MockAgent, setGlobalDispatcher} from 'undici'
|
||||
import {writeAttestation} from '../src/store'
|
||||
|
||||
describe('writeAttestation', () => {
|
||||
const originalEnv = process.env
|
||||
const attestation = {foo: 'bar '}
|
||||
const token = 'token'
|
||||
const headers = {'X-GitHub-Foo': 'true'}
|
||||
|
||||
const mockAgent = new MockAgent()
|
||||
setGlobalDispatcher(mockAgent)
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
GITHUB_REPOSITORY: 'foo/bar'
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
describe('when the api call is successful', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`, ...headers},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(
|
||||
writeAttestation(attestation, token, {headers})
|
||||
).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails', () => {
|
||||
beforeEach(() => {
|
||||
mockAgent
|
||||
.get('https://api.github.com')
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
})
|
||||
|
||||
it('throws an error', async () => {
|
||||
await expect(
|
||||
writeAttestation(attestation, token, {retry: 0})
|
||||
).rejects.toThrow(/oops/)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the api call fails but succeeds on retry', () => {
|
||||
beforeEach(() => {
|
||||
const pool = mockAgent.get('https://api.github.com')
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(500, 'oops')
|
||||
.times(1)
|
||||
|
||||
pool
|
||||
.intercept({
|
||||
path: '/repos/foo/bar/attestations',
|
||||
method: 'POST',
|
||||
headers: {authorization: `token ${token}`},
|
||||
body: JSON.stringify({bundle: attestation})
|
||||
})
|
||||
.reply(201, {id: '123'})
|
||||
.times(1)
|
||||
})
|
||||
|
||||
it('persists the attestation', async () => {
|
||||
await expect(writeAttestation(attestation, token)).resolves.toEqual('123')
|
||||
})
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,58 @@
|
|||
{
|
||||
"name": "@actions/attest",
|
||||
"version": "2.0.0",
|
||||
"description": "Actions attestation lib",
|
||||
"keywords": [
|
||||
"github",
|
||||
"actions",
|
||||
"attestation"
|
||||
],
|
||||
"homepage": "https://github.com/actions/toolkit/tree/main/packages/attest",
|
||||
"license": "MIT",
|
||||
"main": "lib/index.js",
|
||||
"types": "lib/index.d.ts",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"test": "__tests__"
|
||||
},
|
||||
"files": [
|
||||
"lib"
|
||||
],
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"provenance": true
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/toolkit.git",
|
||||
"directory": "packages/attest"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: run tests from root\" && exit 1",
|
||||
"tsc": "tsc"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sigstore/mock": "^0.10.0",
|
||||
"@sigstore/rekor-types": "^3.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.6",
|
||||
"nock": "^13.5.1",
|
||||
"undici": "^6.20.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/http-client": "^2.2.3",
|
||||
"@octokit/plugin-retry": "^6.0.1",
|
||||
"@sigstore/bundle": "^3.1.0",
|
||||
"@sigstore/sign": "^3.1.0",
|
||||
"jose": "^5.10.0"
|
||||
},
|
||||
"overrides": {
|
||||
"@octokit/plugin-retry": {
|
||||
"@octokit/core": "^5.2.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
import {bundleToJSON} from '@sigstore/bundle'
|
||||
import {X509Certificate} from 'crypto'
|
||||
import {SigstoreInstance, signingEndpoints} from './endpoints'
|
||||
import {buildIntotoStatement} from './intoto'
|
||||
import {Payload, signPayload} from './sign'
|
||||
import {writeAttestation} from './store'
|
||||
|
||||
import type {Bundle} from '@sigstore/sign'
|
||||
import type {Attestation, Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
|
||||
|
||||
/**
|
||||
* Options for attesting a subject / predicate.
|
||||
*/
|
||||
export type AttestOptions = {
|
||||
/**
|
||||
* @deprecated Use `subjects` instead.
|
||||
**/
|
||||
subjectName?: string
|
||||
/**
|
||||
* @deprecated Use `subjects` instead.
|
||||
**/
|
||||
subjectDigest?: Record<string, string>
|
||||
// Subjects to be attested.
|
||||
subjects?: Subject[]
|
||||
// Content type of the predicate being attested.
|
||||
predicateType: string
|
||||
// Predicate to be attested.
|
||||
predicate: object
|
||||
// GitHub token for writing attestations.
|
||||
token: string
|
||||
// Sigstore instance to use for signing. Must be one of "public-good" or
|
||||
// "github".
|
||||
sigstore?: SigstoreInstance
|
||||
// HTTP headers to include in request to attestations API.
|
||||
headers?: {[header: string]: string | number | undefined}
|
||||
// Whether to skip writing the attestation to the GH attestations API.
|
||||
skipWrite?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an attestation for the given subject and predicate. The subject and
|
||||
* predicate are combined into an in-toto statement, which is then signed using
|
||||
* the identified Sigstore instance and stored as an attestation.
|
||||
* @param options - The options for attestation.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attest(options: AttestOptions): Promise<Attestation> {
|
||||
let subjects: Subject[]
|
||||
|
||||
if (options.subjects) {
|
||||
subjects = options.subjects
|
||||
} else if (options.subjectName && options.subjectDigest) {
|
||||
subjects = [{name: options.subjectName, digest: options.subjectDigest}]
|
||||
} else {
|
||||
throw new Error(
|
||||
'Must provide either subjectName and subjectDigest or subjects'
|
||||
)
|
||||
}
|
||||
|
||||
const predicate: Predicate = {
|
||||
type: options.predicateType,
|
||||
params: options.predicate
|
||||
}
|
||||
|
||||
const statement = buildIntotoStatement(subjects, predicate)
|
||||
|
||||
// Sign the provenance statement
|
||||
const payload: Payload = {
|
||||
body: Buffer.from(JSON.stringify(statement)),
|
||||
type: INTOTO_PAYLOAD_TYPE
|
||||
}
|
||||
const endpoints = signingEndpoints(options.sigstore)
|
||||
const bundle = await signPayload(payload, endpoints)
|
||||
|
||||
// Store the attestation
|
||||
let attestationID: string | undefined
|
||||
if (options.skipWrite !== true) {
|
||||
attestationID = await writeAttestation(
|
||||
bundleToJSON(bundle),
|
||||
options.token,
|
||||
{headers: options.headers}
|
||||
)
|
||||
}
|
||||
|
||||
return toAttestation(bundle, attestationID)
|
||||
}
|
||||
|
||||
function toAttestation(bundle: Bundle, attestationID?: string): Attestation {
|
||||
let certBytes: Buffer
|
||||
switch (bundle.verificationMaterial.content.$case) {
|
||||
case 'x509CertificateChain':
|
||||
certBytes =
|
||||
bundle.verificationMaterial.content.x509CertificateChain.certificates[0]
|
||||
.rawBytes
|
||||
break
|
||||
case 'certificate':
|
||||
certBytes = bundle.verificationMaterial.content.certificate.rawBytes
|
||||
break
|
||||
default:
|
||||
throw new Error('Bundle must contain an x509 certificate')
|
||||
}
|
||||
|
||||
const signingCert = new X509Certificate(certBytes)
|
||||
|
||||
// Collect transparency log ID if available
|
||||
const tlogEntries = bundle.verificationMaterial.tlogEntries
|
||||
const tlogID = tlogEntries.length > 0 ? tlogEntries[0].logIndex : undefined
|
||||
|
||||
return {
|
||||
bundle: bundleToJSON(bundle),
|
||||
certificate: signingCert.toString(),
|
||||
tlogID,
|
||||
attestationID
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
import * as github from '@actions/github'
|
||||
|
||||
const PUBLIC_GOOD_ID = 'public-good'
|
||||
const GITHUB_ID = 'github'
|
||||
|
||||
const FULCIO_PUBLIC_GOOD_URL = 'https://fulcio.sigstore.dev'
|
||||
const REKOR_PUBLIC_GOOD_URL = 'https://rekor.sigstore.dev'
|
||||
|
||||
export type SigstoreInstance = typeof PUBLIC_GOOD_ID | typeof GITHUB_ID
|
||||
|
||||
export type Endpoints = {
|
||||
fulcioURL: string
|
||||
rekorURL?: string
|
||||
tsaServerURL?: string
|
||||
}
|
||||
|
||||
export const SIGSTORE_PUBLIC_GOOD: Endpoints = {
|
||||
fulcioURL: FULCIO_PUBLIC_GOOD_URL,
|
||||
rekorURL: REKOR_PUBLIC_GOOD_URL
|
||||
}
|
||||
|
||||
export const signingEndpoints = (sigstore?: SigstoreInstance): Endpoints => {
|
||||
let instance: SigstoreInstance
|
||||
|
||||
// An explicitly set instance type takes precedence, but if not set, use the
|
||||
// repository's visibility to determine the instance type.
|
||||
if (sigstore && [PUBLIC_GOOD_ID, GITHUB_ID].includes(sigstore)) {
|
||||
instance = sigstore
|
||||
} else {
|
||||
instance =
|
||||
github.context.payload.repository?.visibility === 'public'
|
||||
? PUBLIC_GOOD_ID
|
||||
: GITHUB_ID
|
||||
}
|
||||
|
||||
switch (instance) {
|
||||
case PUBLIC_GOOD_ID:
|
||||
return SIGSTORE_PUBLIC_GOOD
|
||||
case GITHUB_ID:
|
||||
return buildGitHubEndpoints()
|
||||
}
|
||||
}
|
||||
|
||||
function buildGitHubEndpoints(): Endpoints {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
|
||||
let host = new URL(serverURL).hostname
|
||||
|
||||
if (host === 'github.com') {
|
||||
host = 'githubapp.com'
|
||||
}
|
||||
return {
|
||||
fulcioURL: `https://fulcio.${host}`,
|
||||
tsaServerURL: `https://timestamp.${host}`
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
export {AttestOptions, attest} from './attest'
|
||||
export {
|
||||
AttestProvenanceOptions,
|
||||
attestProvenance,
|
||||
buildSLSAProvenancePredicate
|
||||
} from './provenance'
|
||||
|
||||
export type {SerializedBundle} from '@sigstore/bundle'
|
||||
export type {Attestation, Predicate, Subject} from './shared.types'
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import {Predicate, Subject} from './shared.types'
|
||||
|
||||
const INTOTO_STATEMENT_V1_TYPE = 'https://in-toto.io/Statement/v1'
|
||||
|
||||
/**
|
||||
* An in-toto statement.
|
||||
* https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md
|
||||
*/
|
||||
export type InTotoStatement = {
|
||||
_type: string
|
||||
subject: Subject[]
|
||||
predicateType: string
|
||||
predicate: object
|
||||
}
|
||||
|
||||
/**
|
||||
* Assembles the given subject and predicate into an in-toto statement.
|
||||
* @param subject - The subject of the statement.
|
||||
* @param predicate - The predicate of the statement.
|
||||
* @returns The constructed in-toto statement.
|
||||
*/
|
||||
export const buildIntotoStatement = (
|
||||
subjects: Subject[],
|
||||
predicate: Predicate
|
||||
): InTotoStatement => {
|
||||
return {
|
||||
_type: INTOTO_STATEMENT_V1_TYPE,
|
||||
subject: subjects,
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
import {getIDToken} from '@actions/core'
|
||||
import {HttpClient} from '@actions/http-client'
|
||||
import * as jose from 'jose'
|
||||
|
||||
const OIDC_AUDIENCE = 'nobody'
|
||||
|
||||
const VALID_SERVER_URLS = [
|
||||
'https://github.com',
|
||||
new RegExp('^https://[a-z0-9-]+\\.ghe\\.com$')
|
||||
] as const
|
||||
|
||||
const REQUIRED_CLAIMS = [
|
||||
'iss',
|
||||
'ref',
|
||||
'sha',
|
||||
'repository',
|
||||
'event_name',
|
||||
'job_workflow_ref',
|
||||
'workflow_ref',
|
||||
'repository_id',
|
||||
'repository_owner_id',
|
||||
'runner_environment',
|
||||
'run_id',
|
||||
'run_attempt'
|
||||
] as const
|
||||
|
||||
export type ClaimSet = {[K in (typeof REQUIRED_CLAIMS)[number]]: string}
|
||||
|
||||
type OIDCConfig = {
|
||||
jwks_uri: string
|
||||
}
|
||||
|
||||
export const getIDTokenClaims = async (issuer?: string): Promise<ClaimSet> => {
|
||||
issuer = issuer || getIssuer()
|
||||
try {
|
||||
const token = await getIDToken(OIDC_AUDIENCE)
|
||||
const claims = await decodeOIDCToken(token, issuer)
|
||||
assertClaimSet(claims)
|
||||
return claims
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to get ID token: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
const decodeOIDCToken = async (
|
||||
token: string,
|
||||
issuer: string
|
||||
): Promise<jose.JWTPayload> => {
|
||||
// Verify and decode token
|
||||
const jwks = jose.createLocalJWKSet(await getJWKS(issuer))
|
||||
const {payload} = await jose.jwtVerify(token, jwks, {
|
||||
audience: OIDC_AUDIENCE
|
||||
})
|
||||
|
||||
if (!payload.iss) {
|
||||
throw new Error('Missing "iss" claim')
|
||||
}
|
||||
|
||||
// Check that the issuer STARTS WITH the expected issuer URL to account for
|
||||
// the fact that the value may include an enterprise-specific slug
|
||||
if (!payload.iss.startsWith(issuer)) {
|
||||
throw new Error(`Unexpected "iss" claim: ${payload.iss}`)
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
|
||||
const getJWKS = async (issuer: string): Promise<jose.JSONWebKeySet> => {
|
||||
const client = new HttpClient('@actions/attest')
|
||||
const config = await client.getJson<OIDCConfig>(
|
||||
`${issuer}/.well-known/openid-configuration`
|
||||
)
|
||||
|
||||
if (!config.result) {
|
||||
throw new Error('No OpenID configuration found')
|
||||
}
|
||||
|
||||
const jwks = await client.getJson<jose.JSONWebKeySet>(config.result.jwks_uri)
|
||||
|
||||
if (!jwks.result) {
|
||||
throw new Error('No JWKS found for issuer')
|
||||
}
|
||||
|
||||
return jwks.result
|
||||
}
|
||||
|
||||
function assertClaimSet(claims: jose.JWTPayload): asserts claims is ClaimSet {
|
||||
const missingClaims: string[] = []
|
||||
|
||||
for (const claim of REQUIRED_CLAIMS) {
|
||||
if (!(claim in claims)) {
|
||||
missingClaims.push(claim)
|
||||
}
|
||||
}
|
||||
|
||||
if (missingClaims.length > 0) {
|
||||
throw new Error(`Missing claims: ${missingClaims.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Derive the current OIDC issuer based on the server URL
|
||||
function getIssuer(): string {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL || 'https://github.com'
|
||||
|
||||
// Ensure the server URL is a valid GitHub server URL
|
||||
if (!VALID_SERVER_URLS.some(valid_url => serverURL.match(valid_url))) {
|
||||
throw new Error(`Invalid server URL: ${serverURL}`)
|
||||
}
|
||||
|
||||
let host = new URL(serverURL).hostname
|
||||
|
||||
if (host === 'github.com') {
|
||||
host = 'githubusercontent.com'
|
||||
}
|
||||
|
||||
return `https://token.actions.${host}`
|
||||
}
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
import {attest, AttestOptions} from './attest'
|
||||
import {getIDTokenClaims} from './oidc'
|
||||
import type {Attestation, Predicate} from './shared.types'
|
||||
|
||||
const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1'
|
||||
const GITHUB_BUILD_TYPE = 'https://actions.github.io/buildtypes/workflow/v1'
|
||||
|
||||
export type AttestProvenanceOptions = Omit<
|
||||
AttestOptions,
|
||||
'predicate' | 'predicateType'
|
||||
> & {
|
||||
issuer?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an SLSA (Supply Chain Levels for Software Artifacts) provenance
|
||||
* predicate using the GitHub Actions Workflow build type.
|
||||
* https://slsa.dev/spec/v1.0/provenance
|
||||
* https://github.com/slsa-framework/github-actions-buildtypes/tree/main/workflow/v1
|
||||
* @param issuer - URL for the OIDC issuer. Defaults to the GitHub Actions token
|
||||
* issuer.
|
||||
* @returns The SLSA provenance predicate.
|
||||
*/
|
||||
export const buildSLSAProvenancePredicate = async (
|
||||
issuer?: string
|
||||
): Promise<Predicate> => {
|
||||
const serverURL = process.env.GITHUB_SERVER_URL
|
||||
const claims = await getIDTokenClaims(issuer)
|
||||
|
||||
// Split just the path and ref from the workflow string.
|
||||
// owner/repo/.github/workflows/main.yml@main =>
|
||||
// .github/workflows/main.yml, main
|
||||
const [workflowPath] = claims.workflow_ref
|
||||
.replace(`${claims.repository}/`, '')
|
||||
.split('@')
|
||||
|
||||
return {
|
||||
type: SLSA_PREDICATE_V1_TYPE,
|
||||
params: {
|
||||
buildDefinition: {
|
||||
buildType: GITHUB_BUILD_TYPE,
|
||||
externalParameters: {
|
||||
workflow: {
|
||||
ref: claims.ref,
|
||||
repository: `${serverURL}/${claims.repository}`,
|
||||
path: workflowPath
|
||||
}
|
||||
},
|
||||
internalParameters: {
|
||||
github: {
|
||||
event_name: claims.event_name,
|
||||
repository_id: claims.repository_id,
|
||||
repository_owner_id: claims.repository_owner_id,
|
||||
runner_environment: claims.runner_environment
|
||||
}
|
||||
},
|
||||
resolvedDependencies: [
|
||||
{
|
||||
uri: `git+${serverURL}/${claims.repository}@${claims.ref}`,
|
||||
digest: {
|
||||
gitCommit: claims.sha
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
runDetails: {
|
||||
builder: {
|
||||
id: `${serverURL}/${claims.job_workflow_ref}`
|
||||
},
|
||||
metadata: {
|
||||
invocationId: `${serverURL}/${claims.repository}/actions/runs/${claims.run_id}/attempts/${claims.run_attempt}`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attests the build provenance of the provided subject. Generates the SLSA
|
||||
* build provenance predicate, assembles it into an in-toto statement, and
|
||||
* attests it.
|
||||
*
|
||||
* @param options - The options for attesting the provenance.
|
||||
* @returns A promise that resolves to the attestation.
|
||||
*/
|
||||
export async function attestProvenance(
|
||||
options: AttestProvenanceOptions
|
||||
): Promise<Attestation> {
|
||||
const predicate = await buildSLSAProvenancePredicate(options.issuer)
|
||||
return attest({
|
||||
...options,
|
||||
predicateType: predicate.type,
|
||||
predicate: predicate.params
|
||||
})
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
import type {SerializedBundle} from '@sigstore/bundle'
|
||||
|
||||
/*
|
||||
* The subject of an attestation.
|
||||
*/
|
||||
export type Subject = {
|
||||
/*
|
||||
* Name of the subject.
|
||||
*/
|
||||
name: string
|
||||
/*
|
||||
* Digests of the subject. Should be a map of digest algorithms to their hex-encoded values.
|
||||
*/
|
||||
digest: Record<string, string>
|
||||
}
|
||||
|
||||
/*
|
||||
* The predicate of an attestation.
|
||||
*/
|
||||
export type Predicate = {
|
||||
/*
|
||||
* URI identifying the content type of the predicate.
|
||||
*/
|
||||
type: string
|
||||
/*
|
||||
* Predicate parameters.
|
||||
*/
|
||||
params: object
|
||||
}
|
||||
|
||||
/*
|
||||
* Artifact attestation.
|
||||
*/
|
||||
export type Attestation = {
|
||||
/*
|
||||
* Serialized Sigstore bundle containing the provenance attestation,
|
||||
* signature, signing certificate and witnessed timestamp.
|
||||
*/
|
||||
bundle: SerializedBundle
|
||||
/*
|
||||
* PEM-encoded signing certificate used to sign the attestation.
|
||||
*/
|
||||
certificate: string
|
||||
/*
|
||||
* ID of Rekor transparency log entry created for the attestation.
|
||||
*/
|
||||
tlogID?: string
|
||||
/*
|
||||
* ID of the persisted attestation (accessible via the GH API).
|
||||
*/
|
||||
attestationID?: string
|
||||
}
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
import {
|
||||
Bundle,
|
||||
BundleBuilder,
|
||||
CIContextProvider,
|
||||
DSSEBundleBuilder,
|
||||
FulcioSigner,
|
||||
RekorWitness,
|
||||
TSAWitness,
|
||||
Witness
|
||||
} from '@sigstore/sign'
|
||||
|
||||
const OIDC_AUDIENCE = 'sigstore'
|
||||
const DEFAULT_TIMEOUT = 10000
|
||||
const DEFAULT_RETRIES = 3
|
||||
|
||||
/**
|
||||
* The payload to be signed (body) and its media type (type).
|
||||
*/
|
||||
export type Payload = {
|
||||
body: Buffer
|
||||
type: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for signing a document.
|
||||
*/
|
||||
export type SignOptions = {
|
||||
/**
|
||||
* The URL of the Fulcio service.
|
||||
*/
|
||||
fulcioURL: string
|
||||
/**
|
||||
* The URL of the Rekor service.
|
||||
*/
|
||||
rekorURL?: string
|
||||
/**
|
||||
* The URL of the TSA (Time Stamping Authority) server.
|
||||
*/
|
||||
tsaServerURL?: string
|
||||
/**
|
||||
* The timeout duration in milliseconds when communicating with Sigstore
|
||||
* services.
|
||||
*/
|
||||
timeout?: number
|
||||
/**
|
||||
* The number of retry attempts.
|
||||
*/
|
||||
retry?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Signs the provided payload with a Sigstore-issued certificate and returns the
|
||||
* signature bundle.
|
||||
* @param payload Payload to be signed.
|
||||
* @param options Signing options.
|
||||
* @returns A promise that resolves to the Sigstore signature bundle.
|
||||
*/
|
||||
export const signPayload = async (
|
||||
payload: Payload,
|
||||
options: SignOptions
|
||||
): Promise<Bundle> => {
|
||||
const artifact = {
|
||||
data: payload.body,
|
||||
type: payload.type
|
||||
}
|
||||
|
||||
// Sign the artifact and build the bundle
|
||||
return initBundleBuilder(options).create(artifact)
|
||||
}
|
||||
|
||||
// Assembles the Sigstore bundle builder with the appropriate options
|
||||
const initBundleBuilder = (opts: SignOptions): BundleBuilder => {
|
||||
const identityProvider = new CIContextProvider(OIDC_AUDIENCE)
|
||||
const timeout = opts.timeout || DEFAULT_TIMEOUT
|
||||
const retry = opts.retry || DEFAULT_RETRIES
|
||||
const witnesses: Witness[] = []
|
||||
|
||||
const signer = new FulcioSigner({
|
||||
identityProvider,
|
||||
fulcioBaseURL: opts.fulcioURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
|
||||
if (opts.rekorURL) {
|
||||
witnesses.push(
|
||||
new RekorWitness({
|
||||
rekorBaseURL: opts.rekorURL,
|
||||
fetchOnConflict: true,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (opts.tsaServerURL) {
|
||||
witnesses.push(
|
||||
new TSAWitness({
|
||||
tsaBaseURL: opts.tsaServerURL,
|
||||
timeout,
|
||||
retry
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Build the bundle with the singleCertificate option which will
|
||||
// trigger the creation of v0.3 DSSE bundles
|
||||
return new DSSEBundleBuilder({signer, witnesses})
|
||||
}
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
import * as github from '@actions/github'
|
||||
import {retry} from '@octokit/plugin-retry'
|
||||
import {RequestHeaders} from '@octokit/types'
|
||||
|
||||
const CREATE_ATTESTATION_REQUEST = 'POST /repos/{owner}/{repo}/attestations'
|
||||
const DEFAULT_RETRY_COUNT = 5
|
||||
|
||||
export type WriteOptions = {
|
||||
retry?: number
|
||||
headers?: RequestHeaders
|
||||
}
|
||||
/**
|
||||
* Writes an attestation to the repository's attestations endpoint.
|
||||
* @param attestation - The attestation to write.
|
||||
* @param token - The GitHub token for authentication.
|
||||
* @returns The ID of the attestation.
|
||||
* @throws Error if the attestation fails to persist.
|
||||
*/
|
||||
export const writeAttestation = async (
|
||||
attestation: unknown,
|
||||
token: string,
|
||||
options: WriteOptions = {}
|
||||
): Promise<string> => {
|
||||
const retries = options.retry ?? DEFAULT_RETRY_COUNT
|
||||
const octokit = github.getOctokit(token, {retry: {retries}}, retry)
|
||||
|
||||
try {
|
||||
const response = await octokit.request(CREATE_ATTESTATION_REQUEST, {
|
||||
owner: github.context.repo.owner,
|
||||
repo: github.context.repo.repo,
|
||||
headers: options.headers,
|
||||
bundle: attestation as {
|
||||
mediaType?: string
|
||||
verificationMaterial?: {[key: string]: unknown}
|
||||
dsseEnvelope?: {[key: string]: unknown}
|
||||
}
|
||||
})
|
||||
|
||||
const data =
|
||||
typeof response.data == 'string'
|
||||
? JSON.parse(response.data)
|
||||
: response.data
|
||||
return data?.id
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : err
|
||||
throw new Error(`Failed to persist attestation: ${message}`)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"declaration": true,
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
||||
|
|
@ -6,6 +6,20 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
|||
|
||||
Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB.
|
||||
|
||||
## ⚠️ Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
## Usage
|
||||
|
||||
This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).
|
||||
|
|
@ -47,5 +61,3 @@ const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
|
|||
A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 10 minutes (starting `v3.2.1` and higher, previously 60 minutes in versions between `v.3.0.4` and `v3.2.0`, both included) and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,212 @@
|
|||
# @actions/cache Releases
|
||||
|
||||
### 0.1.0
|
||||
### 4.1.0
|
||||
|
||||
- Initial release
|
||||
- Remove client side 10GiB cache size limit check & update twirp client [#2118](https://github.com/actions/toolkit/pull/2118)
|
||||
|
||||
### 0.2.0
|
||||
### 4.0.5
|
||||
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
- Reintroduce @protobuf-ts/runtime-rpc as a runtime dependency [#2113](https://github.com/actions/toolkit/pull/2113)
|
||||
|
||||
### 0.2.1
|
||||
### 4.0.4
|
||||
|
||||
- Fix to await async function getCompressionMethod
|
||||
⚠️ Faulty patch release. Upgrade to 4.0.5 instead.
|
||||
|
||||
- Optimized cache dependencies by moving `@protobuf-ts/plugin` to dev dependencies [#2106](https://github.com/actions/toolkit/pull/2106)
|
||||
- Improved cache service availability determination for different cache service versions (v1 and v2) [#2100](https://github.com/actions/toolkit/pull/2100)
|
||||
- Enhanced server error handling: 5xx HTTP errors are now logged as errors instead of warnings [#2099](https://github.com/actions/toolkit/pull/2099)
|
||||
- Fixed cache hit logging to properly distinguish between exact key matches and restore key matches [#2101](https://github.com/actions/toolkit/pull/2101)
|
||||
|
||||
### 4.0.3
|
||||
|
||||
- Added masking for Shared Access Signature (SAS) cache entry URLs [#1982](https://github.com/actions/toolkit/pull/1982)
|
||||
- Improved debugging by logging both the cache version alongside the keys requested when a cache restore fails [#1994](https://github.com/actions/toolkit/pull/1994)
|
||||
|
||||
### 4.0.2
|
||||
|
||||
- Wrap create failures in ReserveCacheError [#1966](https://github.com/actions/toolkit/pull/1966)
|
||||
|
||||
### 4.0.1
|
||||
|
||||
- Remove runtime dependency on `twirp-ts` [#1947](https://github.com/actions/toolkit/pull/1947)
|
||||
- Cache miss as debug, not warning annotation [#1954](https://github.com/actions/toolkit/pull/1954)
|
||||
|
||||
### 4.0.0
|
||||
|
||||
#### Important changes
|
||||
|
||||
The cache backend service has been rewritten from the ground up for improved performance and reliability. The [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) package now integrates with the new cache service (v2) APIs.
|
||||
|
||||
The new service will gradually roll out as of **February 1st, 2025**. The legacy service will also be sunset on the same date. Changes in this release are **fully backward compatible**.
|
||||
|
||||
**All previous versions of this package will be deprecated**. We recommend upgrading to version `4.0.0` as soon as possible before **February 1st, 2025.**
|
||||
|
||||
If you do not upgrade, all workflow runs using any of the deprecated [@actions/cache](https://github.com/actions/toolkit/tree/main/packages/cache) packages will fail.
|
||||
|
||||
Upgrading to the recommended version should not break or require any changes to your workflows beyond updating your `package.json` to version `4.0.0`.
|
||||
|
||||
Read more about the change & access the migration guide: [reference to the announcement](https://github.com/actions/toolkit/discussions/1890).
|
||||
|
||||
#### Minor changes
|
||||
|
||||
- Update `@actions/core` to `1.11.0`
|
||||
- Update `semver` `6.3.1`
|
||||
- Add `twirp-ts` `2.5.0` to dependencies
|
||||
|
||||
### 3.3.0
|
||||
|
||||
- Update `@actions/core` to `1.11.1`
|
||||
- Remove dependency on `uuid` package [#1824](https://github.com/actions/toolkit/pull/1824), [#1842](https://github.com/actions/toolkit/pull/1842)
|
||||
|
||||
### 3.2.4
|
||||
|
||||
- Updated `isGhes` check to include `.ghe.com` and `.ghe.localhost` as accepted hosts
|
||||
|
||||
### 3.2.3
|
||||
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
|
||||
### 3.2.2
|
||||
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.2
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
|
||||
### 1.0.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
|
||||
### 1.0.0
|
||||
|
||||
|
|
@ -19,152 +215,14 @@
|
|||
- Includes changes that break compatibility with earlier versions, including:
|
||||
- `retry`, `retryTypedResponse`, and `retryHttpClientResponse` moved from `cacheHttpClient` to `requestUtils`
|
||||
|
||||
### 1.0.1
|
||||
### 0.2.1
|
||||
|
||||
- Fix bug in downloading large files (> 2 GBs) with the Azure SDK
|
||||
- Fix to await async function getCompressionMethod
|
||||
|
||||
### 0.2.0
|
||||
|
||||
### 1.0.2
|
||||
- Fixes issues with the zstd compression algorithm on Windows and Ubuntu 16.04 [#469](https://github.com/actions/toolkit/pull/469)
|
||||
|
||||
### 0.1.0
|
||||
|
||||
- Use posix archive format to add support for some tools
|
||||
|
||||
### 1.0.3
|
||||
|
||||
- Use http-client v1.0.9
|
||||
- Fixes error handling so retries are not attempted on non-retryable errors (409 Conflict, for example)
|
||||
- Adds 5 second delay between retry attempts
|
||||
|
||||
### 1.0.4
|
||||
|
||||
- Use @actions/core v1.2.6
|
||||
- Fixes uploadChunk to throw an error if any unsuccessful response code is received
|
||||
|
||||
### 1.0.5
|
||||
|
||||
- Fix to ensure Windows cache paths get resolved correctly
|
||||
|
||||
### 1.0.6
|
||||
|
||||
- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650)
|
||||
- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701)
|
||||
|
||||
### 1.0.7
|
||||
|
||||
- Fixes permissions issue extracting archives with GNU tar on macOS ([issue](https://github.com/actions/cache/issues/527))
|
||||
|
||||
### 1.0.8
|
||||
|
||||
- Increase the allowed artifact cache size from 5GB to 10GB ([issue](https://github.com/actions/cache/discussions/497))
|
||||
|
||||
### 1.0.9
|
||||
|
||||
- Use @azure/ms-rest-js v2.6.0
|
||||
- Use @azure/storage-blob v12.8.0
|
||||
|
||||
### 1.0.10
|
||||
|
||||
- Update `lockfileVersion` to `v2` in `package-lock.json [#1022](https://github.com/actions/toolkit/pull/1022)
|
||||
|
||||
### 1.0.11
|
||||
|
||||
- Fix file downloads > 2GB([issue](https://github.com/actions/cache/issues/773))
|
||||
|
||||
### 2.0.0
|
||||
|
||||
- Added support to check if Actions cache service feature is available or not [#1028](https://github.com/actions/toolkit/pull/1028)
|
||||
|
||||
### 2.0.3
|
||||
|
||||
- Update to v2.0.0 of `@actions/http-client`
|
||||
|
||||
### 2.0.4
|
||||
|
||||
- Update to v2.0.1 of `@actions/http-client` [#1087](https://github.com/actions/toolkit/pull/1087)
|
||||
|
||||
### 2.0.5
|
||||
|
||||
- Fix to avoid saving empty cache when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||
|
||||
### 2.0.6
|
||||
|
||||
- Fix `Tar failed with error: The process '/usr/bin/tar' failed with exit code 1` issue when temp directory where tar is getting created is actually the subdirectory of the path mentioned by the user for caching. ([issue](https://github.com/actions/cache/issues/689))
|
||||
|
||||
### 3.0.0
|
||||
|
||||
- Updated actions/cache to suppress Actions cache server error and log warning for those error [#1122](https://github.com/actions/toolkit/pull/1122)
|
||||
|
||||
### 3.0.1
|
||||
|
||||
- Fix [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory.
|
||||
- Fix [#809](https://github.com/actions/cache/issues/809) `zstd -d: no such file or directory` error on AWS self-hosted runners.
|
||||
|
||||
### 3.0.2
|
||||
|
||||
- Added 1 hour timeout for the download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.3
|
||||
|
||||
- Bug fixes for download stuck issue [#810](https://github.com/actions/cache/issues/810).
|
||||
|
||||
### 3.0.4
|
||||
|
||||
- Fix zstd not working for windows on gnu tar in issues [#888](https://github.com/actions/cache/issues/888) and [#891](https://github.com/actions/cache/issues/891).
|
||||
- Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes.
|
||||
|
||||
### 3.0.5
|
||||
|
||||
- Update `@actions/cache` to use `@actions/core@^1.10.0`
|
||||
|
||||
### 3.0.6
|
||||
|
||||
- Added `@azure/abort-controller` to dependencies to fix compatibility issue with ESM [#1208](https://github.com/actions/toolkit/issues/1208)
|
||||
|
||||
### 3.1.0-beta.1
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
||||
|
||||
### 3.1.0-beta.2
|
||||
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.0-beta.3
|
||||
|
||||
- Bug Fixes for fallback to gzip to restore old caches on windows and bsdtar if gnutar is not available.
|
||||
|
||||
### 3.1.0
|
||||
|
||||
- Update actions/cache on windows to use gnu tar and zstd by default
|
||||
- Update actions/cache on windows to fallback to bsdtar and zstd if gnu tar is not available.
|
||||
- Added support for fallback to gzip to restore old caches on windows.
|
||||
|
||||
### 3.1.1
|
||||
|
||||
- Reverted changes in 3.1.0 to fix issue with symlink restoration on windows.
|
||||
- Added support for verbose logging about cache version during cache miss.
|
||||
|
||||
### 3.1.2
|
||||
|
||||
- Fix issue with symlink restoration on windows.
|
||||
|
||||
### 3.1.3
|
||||
|
||||
- Fix to prevent from setting MYSYS environement variable globally [#1329](https://github.com/actions/toolkit/pull/1329).
|
||||
|
||||
### 3.1.4
|
||||
|
||||
- Fix zstd not being used due to `zstd --version` output change in zstd 1.5.4 release. See [#1353](https://github.com/actions/toolkit/pull/1353).
|
||||
|
||||
### 3.2.0
|
||||
|
||||
- Add `lookupOnly` to cache restore `DownloadOptions`.
|
||||
|
||||
### 3.2.1
|
||||
|
||||
- Updated @azure/storage-blob to `v12.13.0`
|
||||
|
||||
### 3.2.2
|
||||
|
||||
- Add new default cache download method to improve performance and reduce hangs [#1484](https://github.com/actions/toolkit/pull/1484)
|
||||
|
||||
### 3.2.3
|
||||
|
||||
- Fixed a bug that mutated path arguments to `getCacheVersion` [#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Initial release
|
||||
|
|
|
|||
|
|
@ -3,10 +3,13 @@
|
|||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const filePath = process.env[`GITHUB_ENV`]
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_TOKEN=${process.env.ACTIONS_RUNTIME_TOKEN}${os.EOL}`, {
|
||||
fs.appendFileSync(filePath, `ACTIONS_CACHE_SERVICE_V2=true${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_CACHE_URL=${process.env.ACTIONS_CACHE_URL}${os.EOL}`, {
|
||||
fs.appendFileSync(filePath, `ACTIONS_RESULTS_URL=${process.env.ACTIONS_RESULTS_URL}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `ACTIONS_RUNTIME_TOKEN=${process.env.ACTIONS_RUNTIME_TOKEN}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
fs.appendFileSync(filePath, `GITHUB_RUN_ID=${process.env.GITHUB_RUN_ID}${os.EOL}`, {
|
||||
|
|
|
|||
|
|
@ -1,14 +1,69 @@
|
|||
import * as cache from '../src/cache'
|
||||
|
||||
test('isFeatureAvailable returns true if server url is set', () => {
|
||||
try {
|
||||
describe('isFeatureAvailable', () => {
|
||||
const originalEnv = process.env
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
process.env = {...originalEnv}
|
||||
// Clean cache-related environment variables
|
||||
delete process.env['ACTIONS_CACHE_URL']
|
||||
delete process.env['ACTIONS_RESULTS_URL']
|
||||
delete process.env['ACTIONS_CACHE_SERVICE_V2']
|
||||
delete process.env['GITHUB_SERVER_URL']
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnv
|
||||
})
|
||||
|
||||
test('returns true for cache service v1 when ACTIONS_CACHE_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
} finally {
|
||||
delete process.env['ACTIONS_CACHE_URL']
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test('isFeatureAvailable returns false if server url is not set', () => {
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
test('returns false for cache service v1 when only ACTIONS_RESULTS_URL is set', () => {
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns true for cache service v1 when both URLs are set', () => {
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns true for cache service v2 when ACTIONS_RESULTS_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns false for cache service v2 when only ACTIONS_CACHE_URL is set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns false when no cache URLs are set', () => {
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns false for cache service v2 when no URLs are set', () => {
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
|
||||
test('returns true for GHES with v1 even when v2 flag is set', () => {
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://my-enterprise.github.com'
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_CACHE_URL'] = 'http://cache.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(true)
|
||||
})
|
||||
|
||||
test('returns false for GHES with only ACTIONS_RESULTS_URL', () => {
|
||||
process.env['GITHUB_SERVER_URL'] = 'https://my-enterprise.github.com'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'http://results.com'
|
||||
expect(cache.isFeatureAvailable()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,10 +1,17 @@
|
|||
import {downloadCache, getCacheVersion} from '../src/internal/cacheHttpClient'
|
||||
import {downloadCache} from '../src/internal/cacheHttpClient'
|
||||
import {getCacheVersion} from '../src/internal/cacheUtils'
|
||||
import {CompressionMethod} from '../src/internal/constants'
|
||||
import * as downloadUtils from '../src/internal/downloadUtils'
|
||||
import {DownloadOptions, getDownloadOptions} from '../src/options'
|
||||
|
||||
jest.mock('../src/internal/downloadUtils')
|
||||
|
||||
test('getCacheVersion does not mutate arguments', async () => {
|
||||
const paths = ['node_modules']
|
||||
getCacheVersion(paths, undefined, true)
|
||||
expect(paths).toEqual(['node_modules'])
|
||||
})
|
||||
|
||||
test('getCacheVersion with one path returns version', async () => {
|
||||
const paths = ['node_modules']
|
||||
const result = getCacheVersion(paths, undefined, true)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,10 @@ import {promises as fs} from 'fs'
|
|||
import * as path from 'path'
|
||||
import * as cacheUtils from '../src/internal/cacheUtils'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
test('getArchiveFileSizeInBytes returns file size', () => {
|
||||
const filePath = path.join(__dirname, '__fixtures__', 'helloWorld.txt')
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
import * as config from '../src/internal/config'
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules()
|
||||
})
|
||||
|
||||
test('isGhes returns false for github.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://github.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.com', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://somedomain.ghe.com'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
||||
test('isGhes returns true for enterprise URL', async () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my-enterprise.github.com'
|
||||
expect(config.isGhes()).toBe(true)
|
||||
})
|
||||
|
||||
test('isGhes returns false for ghe.localhost', () => {
|
||||
process.env.GITHUB_SERVER_URL = 'https://my.domain.ghe.localhost'
|
||||
expect(config.isGhes()).toBe(false)
|
||||
})
|
||||
|
|
@ -11,8 +11,6 @@ const downloadConcurrency = 8
|
|||
const timeoutInMs = 30000
|
||||
const segmentTimeoutInMs = 600000
|
||||
const lookupOnly = false
|
||||
const uploadConcurrency = 4
|
||||
const uploadChunkSize = 32 * 1024 * 1024
|
||||
|
||||
test('getDownloadOptions sets defaults', async () => {
|
||||
const actualOptions = getDownloadOptions()
|
||||
|
|
@ -43,18 +41,21 @@ test('getDownloadOptions overrides all settings', async () => {
|
|||
})
|
||||
|
||||
test('getUploadOptions sets defaults', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 4,
|
||||
uploadChunkSize: 32 * 1024 * 1024,
|
||||
useAzureSdk: false
|
||||
}
|
||||
const actualOptions = getUploadOptions()
|
||||
|
||||
expect(actualOptions).toEqual({
|
||||
uploadConcurrency,
|
||||
uploadChunkSize
|
||||
})
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('getUploadOptions overrides all settings', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 2,
|
||||
uploadChunkSize: 16 * 1024 * 1024
|
||||
uploadChunkSize: 16 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
|
|
@ -62,6 +63,34 @@ test('getUploadOptions overrides all settings', async () => {
|
|||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('env variables override all getUploadOptions settings', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 16,
|
||||
uploadChunkSize: 64 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
process.env.CACHE_UPLOAD_CONCURRENCY = '16'
|
||||
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('env variables override all getUploadOptions settings but do not exceed caps', async () => {
|
||||
const expectedOptions: UploadOptions = {
|
||||
uploadConcurrency: 32,
|
||||
uploadChunkSize: 128 * 1024 * 1024,
|
||||
useAzureSdk: true
|
||||
}
|
||||
|
||||
process.env.CACHE_UPLOAD_CONCURRENCY = '64'
|
||||
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
|
||||
|
||||
const actualOptions = getUploadOptions(expectedOptions)
|
||||
expect(actualOptions).toEqual(expectedOptions)
|
||||
})
|
||||
|
||||
test('getDownloadOptions overrides download timeout minutes', async () => {
|
||||
const expectedOptions: DownloadOptions = {
|
||||
useAzureSdk: false,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ import * as cacheUtils from '../src/internal/cacheUtils'
|
|||
import {CacheFilename, CompressionMethod} from '../src/internal/constants'
|
||||
import {ArtifactCacheEntry} from '../src/internal/contracts'
|
||||
import * as tar from '../src/internal/tar'
|
||||
import {HttpClientError} from '@actions/http-client'
|
||||
import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp-client'
|
||||
|
||||
jest.mock('../src/internal/cacheHttpClient')
|
||||
jest.mock('../src/internal/cacheUtils')
|
||||
|
|
@ -73,18 +75,28 @@ test('restore with no cache found', async () => {
|
|||
test('restore with server error should fail', async () => {
|
||||
const paths = ['node_modules']
|
||||
const key = 'node-test'
|
||||
const logWarningMock = jest.spyOn(core, 'warning')
|
||||
const logErrorMock = jest.spyOn(core, 'error')
|
||||
|
||||
jest.spyOn(cacheHttpClient, 'getCacheEntry').mockImplementation(() => {
|
||||
throw new Error('HTTP Error Occurred')
|
||||
})
|
||||
// Set cache service to V2 to test error logging for server errors
|
||||
process.env['ACTIONS_CACHE_SERVICE_V2'] = 'true'
|
||||
process.env['ACTIONS_RESULTS_URL'] = 'https://results.local/'
|
||||
|
||||
jest
|
||||
.spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL')
|
||||
.mockImplementation(() => {
|
||||
throw new HttpClientError('HTTP Error Occurred', 500)
|
||||
})
|
||||
|
||||
const cacheKey = await restoreCache(paths, key)
|
||||
expect(cacheKey).toBe(undefined)
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1)
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
expect(logErrorMock).toHaveBeenCalledTimes(1)
|
||||
expect(logErrorMock).toHaveBeenCalledWith(
|
||||
'Failed to restore: HTTP Error Occurred'
|
||||
)
|
||||
|
||||
// Clean up environment
|
||||
delete process.env['ACTIONS_CACHE_SERVICE_V2']
|
||||
delete process.env['ACTIONS_RESULTS_URL']
|
||||
})
|
||||
|
||||
test('restore with restore keys and no cache found', async () => {
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue