Simplify rsync-antora-reference httpdocs-path

Closes gh-44864
This commit is contained in:
Phillip Webb 2025-05-05 12:06:51 -07:00
parent 0405d65f10
commit 8a533d7f94
2 changed files with 336 additions and 51 deletions

View File

@ -18,54 +18,54 @@ jobs:
if: github.repository_owner == 'spring-projects'
runs-on: ubuntu-latest
steps:
- name: Check Out
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Fetch Main Branch
run: git fetch origin main:main
- name: Set Up Node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Check Out 'package.json' From Main
run: node run.js --only-checkout
- name: Cache Files
uses: actions/cache@v4
with:
key: antora-${{ hashFiles('package-lock.json', 'antora-playbook.yml') }}
path: |
~/.npm
~/.cache/antora
- name: Install and Run Antora
env:
ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }}
ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }}
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
BUILD_VERSION: ${{ github.event.inputs.build-version }}
run: node run.js --no-checkout
- name: Sync Documentation
uses: spring-io/spring-doc-actions/rsync-antora-reference@v0.0.20
with:
docs-username: ${{ secrets.DOCS_USERNAME }}
docs-host: ${{ secrets.DOCS_HOST }}
docs-ssh-key: ${{ secrets.DOCS_SSH_KEY }}
docs-ssh-host-key: ${{ secrets.DOCS_SSH_HOST_KEY }}
httpdocs-path: /spring-boot/antora/reference
env:
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
BUILD_VERSION: ${{ github.event.inputs.build-version }}
- name: Bust Cloudflare Cache
uses: spring-io/spring-doc-actions/bust-cloudflare-antora-cache@v0.0.20
with:
context-root: spring-boot
context-path: /
cloudflare-zone-id: ${{ secrets.CLOUDFLARE_ZONE_ID }}
cloudflare-cache-token: ${{ secrets.CLOUDFLARE_CACHE_TOKEN }}
- name: Send Notification
if: failure()
uses: ./.github/actions/send-notification
with:
run-name: ${{ format('{0} | Build and Deploy Docs', github.ref_name) }}
status: ${{ job.status }}
webhook-url: ${{ secrets.GOOGLE_CHAT_WEBHOOK_URL }}
- name: Check Out
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Fetch Main Branch
run: git fetch origin main:main
- name: Set Up Node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Check Out 'package.json' From Main
run: node run.js --only-checkout
- name: Cache Files
uses: actions/cache@v4
with:
key: antora-${{ hashFiles('package-lock.json', 'antora-playbook.yml') }}
path: |
~/.npm
~/.cache/antora
- name: Install and Run Antora
env:
ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }}
ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }}
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
BUILD_VERSION: ${{ github.event.inputs.build-version }}
run: node run.js --no-checkout
- name: Sync Documentation
uses: spring-io/spring-doc-actions/rsync-antora-reference@v0.0.20
with:
docs-username: ${{ secrets.DOCS_USERNAME }}
docs-host: ${{ secrets.DOCS_HOST }}
docs-ssh-key: ${{ secrets.DOCS_SSH_KEY }}
docs-ssh-host-key: ${{ secrets.DOCS_SSH_HOST_KEY }}
httpdocs-path: /spring-boot/antora
env:
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
BUILD_VERSION: ${{ github.event.inputs.build-version }}
- name: Bust Cloudflare Cache
uses: spring-io/spring-doc-actions/bust-cloudflare-antora-cache@v0.0.20
with:
context-root: spring-boot
context-path: /
cloudflare-zone-id: ${{ secrets.CLOUDFLARE_ZONE_ID }}
cloudflare-cache-token: ${{ secrets.CLOUDFLARE_CACHE_TOKEN }}
- name: Send Notification
if: failure()
uses: ./.github/actions/send-notification
with:
run-name: ${{ format('{0} | Build and Deploy Docs', github.ref_name) }}
status: ${{ job.status }}
webhook-url: ${{ secrets.GOOGLE_CHAT_WEBHOOK_URL }}

View File

@ -0,0 +1,285 @@
diff --git a/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js b/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
index 17d902d..0448dec 100644
--- a/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
+++ b/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
@@ -1,135 +1,157 @@
-'use strict';
-
-var fs = require('fs');
-var constants = fs.constants;
-var yauzl = require('yauzl');
-var File = require('../vinyl-zip');
-var queue = require('queue');
-var through = require('through');
-var map = require('through2').obj;
-
-function modeFromEntry(entry) {
- var attr = entry.externalFileAttributes >> 16 || 33188;
-
- // The following constants are not available on all platforms:
- // 448 = constants.S_IRWXU, 56 = constants.S_IRWXG, 7 = constants.S_IRWXO
- return [448, 56, 7]
- .map(function (mask) { return attr & mask; })
- .reduce(function (a, b) { return a + b; }, attr & constants.S_IFMT);
+'use strict'
+
+// This is fork of vinyl-zip with the following updates:
+// - unzipFile has an additional `.on('error'` handler
+// - toStream has an additional `zip.on('error'` handler
+
+const fs = require('fs')
+const constants = fs.constants
+const yauzl = require('yauzl')
+const File = require('vinyl')
+const queue = require('queue')
+const through = require('through')
+const map = require('through2').obj
+
+function modeFromEntry (entry) {
+ const attr = entry.externalFileAttributes >> 16 || 33188
+ return [448, 56, 7]
+ .map(function (mask) {
+ return attr & mask
+ })
+ .reduce(function (a, b) {
+ return a + b
+ }, attr & constants.S_IFMT)
}
-function mtimeFromEntry(entry) {
- return yauzl.dosDateTimeToDate(entry.lastModFileDate, entry.lastModFileTime);
+function mtimeFromEntry (entry) {
+ return yauzl.dosDateTimeToDate(entry.lastModFileDate, entry.lastModFileTime)
}
-function toStream(zip) {
- var result = through();
- var q = queue();
- var didErr = false;
-
- q.on('error', function (err) {
- didErr = true;
- result.emit('error', err);
- });
-
- zip.on('entry', function (entry) {
- if (didErr) { return; }
-
- var stat = new fs.Stats();
- stat.mode = modeFromEntry(entry);
- stat.mtime = mtimeFromEntry(entry);
-
- // directories
- if (/\/$/.test(entry.fileName)) {
- stat.mode = (stat.mode & ~constants.S_IFMT) | constants.S_IFDIR;
- }
-
- var file = {
- path: entry.fileName,
- stat: stat
- };
-
- if (stat.isFile()) {
- stat.size = entry.uncompressedSize;
- if (entry.uncompressedSize === 0) {
- file.contents = Buffer.alloc(0);
- result.emit('data', new File(file));
- } else {
- q.push(function (cb) {
- zip.openReadStream(entry, function (err, readStream) {
- if (err) { return cb(err); }
- file.contents = readStream;
- result.emit('data', new File(file));
- cb();
- });
- });
-
- q.start();
- }
- } else if (stat.isSymbolicLink()) {
- stat.size = entry.uncompressedSize;
- q.push(function (cb) {
- zip.openReadStream(entry, function (err, readStream) {
- if (err) { return cb(err); }
- file.symlink = '';
- readStream.on('data', function (c) { file.symlink += c; });
- readStream.on('error', cb);
- readStream.on('end', function () {
- result.emit('data', new File(file));
- cb();
- });
- });
- });
-
- q.start();
- } else if (stat.isDirectory()) {
- result.emit('data', new File(file));
- } else {
- result.emit('data', new File(file));
- }
- });
-
- zip.on('end', function () {
- if (didErr) {
- return;
- }
-
- if (q.length === 0) {
- result.end();
- } else {
- q.on('end', function () {
- result.end();
- });
- }
- });
-
- return result;
+function toStream (zip) {
+ const result = through()
+ const q = queue()
+ let didErr = false
+
+ q.on('error', function (err) {
+ didErr = true
+ result.emit('error', err)
+ })
+
+ zip.on('error', function (err) {
+ didErr = true
+ result.emit('error', err)
+ })
+
+ zip.on('entry', function (entry) {
+ if (didErr) {
+ return
+ }
+
+ const stat = new fs.Stats()
+ stat.mode = modeFromEntry(entry)
+ stat.mtime = mtimeFromEntry(entry)
+
+ // directories
+ if (/\/$/.test(entry.fileName)) {
+ stat.mode = (stat.mode & ~constants.S_IFMT) | constants.S_IFDIR
+ }
+
+ const file = {
+ path: entry.fileName,
+ stat,
+ }
+
+ if (stat.isFile()) {
+ stat.size = entry.uncompressedSize
+ if (entry.uncompressedSize === 0) {
+ file.contents = Buffer.alloc(0)
+ result.emit('data', new File(file))
+ } else {
+ q.push(function (cb) {
+ zip.openReadStream(entry, function (err, readStream) {
+ if (err) {
+ return cb(err)
+ }
+ file.contents = readStream
+ result.emit('data', new File(file))
+ cb()
+ })
+ })
+
+ q.start()
+ }
+ } else if (stat.isSymbolicLink()) {
+ stat.size = entry.uncompressedSize
+ q.push(function (cb) {
+ zip.openReadStream(entry, function (err, readStream) {
+ if (err) {
+ return cb(err)
+ }
+ file.symlink = ''
+ readStream.on('data', function (c) {
+ file.symlink += c
+ })
+ readStream.on('error', cb)
+ readStream.on('end', function () {
+ result.emit('data', new File(file))
+ cb()
+ })
+ })
+ })
+
+ q.start()
+ } else if (stat.isDirectory()) {
+ result.emit('data', new File(file))
+ } else {
+ result.emit('data', new File(file))
+ }
+ })
+
+ zip.on('end', function () {
+ if (didErr) {
+ return
+ }
+
+ if (q.length === 0) {
+ result.end()
+ } else {
+ q.on('end', function () {
+ result.end()
+ })
+ }
+ })
+
+ return result
}
-function unzipFile(zipPath) {
- var result = through();
- yauzl.open(zipPath, function (err, zip) {
- if (err) { return result.emit('error', err); }
- toStream(zip).pipe(result);
- });
- return result;
+function unzipFile (zipPath) {
+ const result = through()
+ yauzl.open(zipPath, function (err, zip) {
+ if (err) {
+ return result.emit('error', err)
+ }
+ toStream(zip)
+ .on('error', (err) => result.emit('error', err))
+ .pipe(result)
+ })
+ return result
}
-function unzip() {
- return map(function (file, enc, next) {
- if (!file.isBuffer()) return next(new Error('Only supports buffers'));
- yauzl.fromBuffer(file.contents, (err, zip) => {
- if (err) return this.emit('error', err);
- toStream(zip)
- .on('error', next)
- .on('data', (data) => this.push(data))
- .on('end', next);
- });
- });
+function unzip () {
+ return map(function (file, enc, next) {
+ if (!file.isBuffer()) return next(new Error('Only supports buffers'))
+ yauzl.fromBuffer(file.contents, (err, zip) => {
+ if (err) return this.emit('error', err)
+ toStream(zip)
+ .on('error', next)
+ .on('data', (data) => this.push(data))
+ .on('end', next)
+ })
+ })
}
-function src(zipPath) {
- return zipPath ? unzipFile(zipPath) : unzip();
+function src (zipPath) {
+ return zipPath ? unzipFile(zipPath) : unzip()
}
-module.exports = src;
+module.exports = src