mirror of https://github.com/goharbor/harbor.git
Merge 2c3bdbbd24
into c004f2d3e6
This commit is contained in:
commit
7f84024446
|
@ -1,18 +1,18 @@
|
||||||
name: CI
|
name: CI
|
||||||
env:
|
env:
|
||||||
POSTGRESQL_HOST: localhost
|
POSTGRESQL_HOST: localhost
|
||||||
POSTGRESQL_PORT: 5432
|
POSTGRESQL_PORT: 5432
|
||||||
POSTGRESQL_USR: postgres
|
POSTGRESQL_USR: postgres
|
||||||
POSTGRESQL_PWD: root123
|
POSTGRESQL_PWD: root123
|
||||||
POSTGRESQL_DATABASE: registry
|
POSTGRESQL_DATABASE: registry
|
||||||
DOCKER_COMPOSE_VERSION: 2.27.1
|
DOCKER_COMPOSE_VERSION: 2.27.1
|
||||||
HARBOR_ADMIN: admin
|
HARBOR_ADMIN: admin
|
||||||
HARBOR_ADMIN_PASSWD: Harbor12345
|
HARBOR_ADMIN_PASSWD: Harbor12345
|
||||||
CORE_SECRET: tempString
|
CORE_SECRET: tempString
|
||||||
KEY_PATH: "/data/secret/keys/secretkey"
|
KEY_PATH: "/data/secret/keys/secretkey"
|
||||||
REDIS_HOST: localhost
|
REDIS_HOST: localhost
|
||||||
REG_VERSION: v2.7.1-patch-2819-2553
|
REG_VERSION: v2.7.1-patch-2819-2553
|
||||||
UI_BUILDER_VERSION: 1.6.0
|
UI_BUILDER_VERSION: 1.6.0
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# the paths-ignore is the same as the paths in pass-CI.yml, they should be synced together
|
# the paths-ignore is the same as the paths in pass-CI.yml, they should be synced together
|
||||||
|
@ -43,21 +43,24 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
UTTEST:
|
UTTEST:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
UTTEST: true
|
UTTEST: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go 1.23
|
- name: Set up Go 1.23
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.2
|
go-version: 1.23.2
|
||||||
id: go
|
id: go
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
path: src/github.com/goharbor/harbor
|
path: src/github.com/goharbor/harbor
|
||||||
- name: setup env
|
- name: setup env
|
||||||
run: |
|
run: |
|
||||||
cd src/github.com/goharbor/harbor
|
cd src/github.com/goharbor/harbor
|
||||||
|
@ -101,14 +104,17 @@ jobs:
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
files: ./src/github.com/goharbor/harbor/profile.cov
|
files: ./src/github.com/goharbor/harbor/profile.cov
|
||||||
flags: unittests
|
flags: unittests,linux-${{ matrix.arch }}
|
||||||
|
|
||||||
APITEST_DB:
|
APITEST_DB:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
APITEST_DB: true
|
APITEST_DB: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go 1.23
|
- name: Set up Go 1.23
|
||||||
|
@ -164,11 +170,14 @@ jobs:
|
||||||
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
APITEST_DB_PROXY_CACHE:
|
APITEST_DB_PROXY_CACHE:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
APITEST_DB: true
|
APITEST_DB: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go 1.23
|
- name: Set up Go 1.23
|
||||||
|
@ -224,11 +233,14 @@ jobs:
|
||||||
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
APITEST_LDAP:
|
APITEST_LDAP:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
APITEST_LDAP: true
|
APITEST_LDAP: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go 1.23
|
- name: Set up Go 1.23
|
||||||
|
@ -268,6 +280,16 @@ jobs:
|
||||||
env
|
env
|
||||||
df -h
|
df -h
|
||||||
bash ./tests/showtime.sh ./tests/ci/api_common_install.sh $IP LDAP
|
bash ./tests/showtime.sh ./tests/ci/api_common_install.sh $IP LDAP
|
||||||
|
- name: Wait for LDAP config to be active (arm64 only)
|
||||||
|
if: matrix.arch == 'arm64'
|
||||||
|
run: |
|
||||||
|
echo "Waiting for Harbor LDAP config to become active..."
|
||||||
|
for i in {1..30}; do
|
||||||
|
out=$(curl -sk -u admin:Harbor12345 https://127.0.0.1/api/v2.0/systeminfo || true)
|
||||||
|
echo "$out" | grep -q '"auth_mode":"ldap_auth"' && { echo "LDAP active"; break; }
|
||||||
|
echo "Retry $i: LDAP not active yet"
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
- name: script
|
- name: script
|
||||||
run: |
|
run: |
|
||||||
echo IP: $IP
|
echo IP: $IP
|
||||||
|
@ -282,11 +304,14 @@ jobs:
|
||||||
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
path: /home/runner/work/harbor/harbor/src/github.com/goharbor/harbor/integration_logs.tar.gz
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
OFFLINE:
|
OFFLINE:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
OFFLINE: true
|
OFFLINE: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go 1.23
|
- name: Set up Go 1.23
|
||||||
|
@ -334,11 +359,14 @@ jobs:
|
||||||
df -h
|
df -h
|
||||||
|
|
||||||
UI_UT:
|
UI_UT:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||||
env:
|
env:
|
||||||
UI_UT: true
|
UI_UT: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
#- self-hosted
|
|
||||||
- ubuntu-latest
|
|
||||||
timeout-minutes: 100
|
timeout-minutes: 100
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-node@v5
|
- uses: actions/setup-node@v5
|
||||||
|
@ -347,6 +375,15 @@ jobs:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
path: src/github.com/goharbor/harbor
|
path: src/github.com/goharbor/harbor
|
||||||
|
|
||||||
|
- name: Install Chromium (arm64 only)
|
||||||
|
if: matrix.arch == 'arm64'
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y chromium-browser || sudo apt-get install -y chromium
|
||||||
|
which chromium-browser || which chromium
|
||||||
|
echo "CHROME_BIN=$(which chromium-browser || which chromium)" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: script
|
- name: script
|
||||||
run: |
|
run: |
|
||||||
echo IP: $IP
|
echo IP: $IP
|
||||||
|
@ -358,4 +395,4 @@ jobs:
|
||||||
uses: codecov/codecov-action@v5
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
files: ./src/github.com/goharbor/harbor/src/portal/coverage/lcov.info
|
files: ./src/github.com/goharbor/harbor/src/portal/coverage/lcov.info
|
||||||
flags: unittests
|
flags: unittests,linux-${{ matrix.arch }}
|
||||||
|
|
|
@ -10,10 +10,14 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
BUILD_PACKAGE:
|
BUILD_PACKAGE:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
arch: [amd64, arm64]
|
||||||
env:
|
env:
|
||||||
BUILD_PACKAGE: true
|
BUILD_PACKAGE: true
|
||||||
runs-on:
|
ARCH: ${{ matrix.arch }}
|
||||||
- ubuntu-22.04
|
runs-on: ${{ matrix.arch == 'arm64' && 'ubuntu-24.04-arm' || 'ubuntu-22.04' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Configure AWS credentials
|
- name: Configure AWS credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v5.0.0
|
uses: aws-actions/configure-aws-credentials@v5.0.0
|
||||||
|
@ -21,16 +25,19 @@ jobs:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
aws-region: us-east-1
|
aws-region: us-east-1
|
||||||
- name: Set up Go 1.22
|
|
||||||
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.2
|
go-version: 1.23.2
|
||||||
id: go
|
id: go
|
||||||
|
|
||||||
- name: Setup Docker
|
- name: Setup Docker
|
||||||
uses: docker-practice/actions-setup-docker@master
|
uses: docker-practice/actions-setup-docker@master
|
||||||
with:
|
with:
|
||||||
docker_version: 20.10
|
docker_version: 20.10
|
||||||
docker_channel: stable
|
docker_channel: stable
|
||||||
|
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
- uses: jitterbit/get-changed-files@v1
|
- uses: jitterbit/get-changed-files@v1
|
||||||
id: changed-files
|
id: changed-files
|
||||||
|
@ -40,15 +47,16 @@ jobs:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
path: src/github.com/goharbor/harbor
|
path: src/github.com/goharbor/harbor
|
||||||
|
|
||||||
- name: Build Base Image
|
- name: Build Base Image
|
||||||
if: |
|
if: |
|
||||||
contains(steps.changed-files.outputs.modified, 'Dockerfile.base') ||
|
contains(steps.changed-files.outputs.modified, 'Dockerfile.base') ||
|
||||||
contains(steps.changed-files.outputs.modified, 'VERSION') ||
|
contains(steps.changed-files.outputs.modified, 'VERSION') ||
|
||||||
contains(steps.changed-files.outputs.modified, '.buildbaselog') ||
|
contains(steps.changed-files.outputs.modified, '.buildbaselog') ||
|
||||||
github.ref == 'refs/heads/main'
|
github.ref == 'refs/heads/main'
|
||||||
run: |
|
run: |
|
||||||
set -x
|
|
||||||
echo "BUILD_BASE=true" >> $GITHUB_ENV
|
echo "BUILD_BASE=true" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build Package
|
- name: Build Package
|
||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
|
@ -62,7 +70,6 @@ jobs:
|
||||||
harbor_builds_bucket="harbor-builds"
|
harbor_builds_bucket="harbor-builds"
|
||||||
harbor_releases_bucket="harbor-releases"
|
harbor_releases_bucket="harbor-releases"
|
||||||
harbor_ci_pipeline_store_bucket="harbor-ci-pipeline-store/latest"
|
harbor_ci_pipeline_store_bucket="harbor-ci-pipeline-store/latest"
|
||||||
# the target release version is the version of next release(RC or GA). It needs to be updated on creating new release branch.
|
|
||||||
target_release_version=$(cat ./VERSION)
|
target_release_version=$(cat ./VERSION)
|
||||||
Harbor_Package_Version=$target_release_version-'build.'$GITHUB_RUN_NUMBER
|
Harbor_Package_Version=$target_release_version-'build.'$GITHUB_RUN_NUMBER
|
||||||
|
|
||||||
|
@ -82,25 +89,126 @@ jobs:
|
||||||
|
|
||||||
build_base_params=" BUILD_BASE=false"
|
build_base_params=" BUILD_BASE=false"
|
||||||
cd src/github.com/goharbor/harbor
|
cd src/github.com/goharbor/harbor
|
||||||
if [ -z "$BUILD_BASE" ] || [ "$BUILD_BASE" != "true" ]; then
|
if [ -n "$BUILD_BASE" ] && [ "$BUILD_BASE" = "true" ]; then
|
||||||
echo "Do not need to build base images!"
|
|
||||||
else
|
|
||||||
build_base_params=" BUILD_BASE=true PULL_BASE_FROM_DOCKERHUB=true PUSHBASEIMAGE=true REGISTRYUSER=\"${{ secrets.DOCKER_HUB_USERNAME }}\" REGISTRYPASSWORD=\"${{ secrets.DOCKER_HUB_PASSWORD }}\""
|
build_base_params=" BUILD_BASE=true PULL_BASE_FROM_DOCKERHUB=true PUSHBASEIMAGE=true REGISTRYUSER=\"${{ secrets.DOCKER_HUB_USERNAME }}\" REGISTRYPASSWORD=\"${{ secrets.DOCKER_HUB_PASSWORD }}\""
|
||||||
|
else
|
||||||
|
echo "Do not need to build base images!"
|
||||||
fi
|
fi
|
||||||
sudo make package_offline GOBUILDTAGS="include_oss include_gcs" BASEIMAGETAG=${Harbor_Build_Base_Tag} VERSIONTAG=${Harbor_Assets_Version} PKGVERSIONTAG=${Harbor_Package_Version} TRIVYFLAG=true EXPORTERFLAG=true HTTPPROXY= ${build_base_params}
|
|
||||||
sudo make package_online GOBUILDTAGS="include_oss include_gcs" BASEIMAGETAG=${Harbor_Build_Base_Tag} VERSIONTAG=${Harbor_Assets_Version} PKGVERSIONTAG=${Harbor_Package_Version} TRIVYFLAG=true EXPORTERFLAG=true HTTPPROXY= ${build_base_params}
|
# Build per-arch
|
||||||
|
sudo make package_offline ARCH=${ARCH} GOBUILDTAGS="include_oss include_gcs" \
|
||||||
|
BASEIMAGETAG=${Harbor_Build_Base_Tag} VERSIONTAG=${Harbor_Assets_Version} \
|
||||||
|
PKGVERSIONTAG=${Harbor_Package_Version} TRIVYFLAG=true EXPORTERFLAG=true HTTPPROXY= ${build_base_params}
|
||||||
|
sudo make package_online ARCH=${ARCH} GOBUILDTAGS="include_oss include_gcs" \
|
||||||
|
BASEIMAGETAG=${Harbor_Build_Base_Tag} VERSIONTAG=${Harbor_Assets_Version} \
|
||||||
|
PKGVERSIONTAG=${Harbor_Package_Version} TRIVYFLAG=true EXPORTERFLAG=true HTTPPROXY= ${build_base_params}
|
||||||
|
|
||||||
|
# Add arch suffix to artifacts
|
||||||
harbor_offline_build_bundle=$(basename harbor-offline-installer-*.tgz)
|
harbor_offline_build_bundle=$(basename harbor-offline-installer-*.tgz)
|
||||||
harbor_online_build_bundle=$(basename harbor-online-installer-*.tgz)
|
harbor_online_build_bundle=$(basename harbor-online-installer-*.tgz)
|
||||||
echo "Package name is: $harbor_offline_build_bundle"
|
mv "${harbor_offline_build_bundle}" "harbor-offline-installer-${Harbor_Assets_Version}-${ARCH}.tgz"
|
||||||
echo "Package name is: $harbor_online_build_bundle"
|
mv "${harbor_online_build_bundle}" "harbor-online-installer-${Harbor_Assets_Version}-${ARCH}.tgz"
|
||||||
|
harbor_offline_build_bundle="harbor-offline-installer-${Harbor_Assets_Version}-${ARCH}.tgz"
|
||||||
|
harbor_online_build_bundle="harbor-online-installer-${Harbor_Assets_Version}-${ARCH}.tgz"
|
||||||
|
|
||||||
|
echo "Package (offline): $harbor_offline_build_bundle"
|
||||||
|
echo "Package (online) : $harbor_online_build_bundle"
|
||||||
|
|
||||||
source tests/ci/build_util.sh
|
source tests/ci/build_util.sh
|
||||||
cp ${harbor_offline_build_bundle} harbor-offline-installer-latest.tgz
|
cp "${harbor_offline_build_bundle}" "harbor-offline-installer-latest-${ARCH}.tgz"
|
||||||
cp ${harbor_online_build_bundle} harbor-online-installer-latest.tgz
|
cp "${harbor_online_build_bundle}" "harbor-online-installer-latest-${ARCH}.tgz"
|
||||||
uploader ${harbor_offline_build_bundle} $harbor_target_bucket
|
|
||||||
uploader ${harbor_online_build_bundle} $harbor_target_bucket
|
|
||||||
uploader harbor-offline-installer-latest.tgz $harbor_target_bucket
|
|
||||||
uploader harbor-online-installer-latest.tgz $harbor_target_bucket
|
|
||||||
echo "BUILD_BUNDLE=$harbor_offline_build_bundle" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
publishImage $target_branch $Harbor_Assets_Version "${{ secrets.DOCKER_HUB_USERNAME }}" "${{ secrets.DOCKER_HUB_PASSWORD }}"
|
uploader "${harbor_offline_build_bundle}" $harbor_target_bucket
|
||||||
|
uploader "${harbor_online_build_bundle}" $harbor_target_bucket
|
||||||
|
uploader "harbor-offline-installer-latest-${ARCH}.tgz" $harbor_target_bucket
|
||||||
|
uploader "harbor-online-installer-latest-${ARCH}.tgz" $harbor_target_bucket
|
||||||
|
|
||||||
|
echo "BUILD_BUNDLE_${ARCH}=${harbor_offline_build_bundle}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# Publish images for both amd64 and arm64
|
||||||
|
publishImage $target_branch ${Harbor_Assets_Version} "${{ secrets.DOCKER_HUB_USERNAME }}" "${{ secrets.DOCKER_HUB_PASSWORD }}" ${ARCH}
|
||||||
|
- name: Save repo list for this arch
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
source tests/ci/build_util.sh
|
||||||
|
source_file="$GITHUB_WORKSPACE/_repos_${ARCH}.txt"
|
||||||
|
saveRepoList "$source_file"
|
||||||
|
|
||||||
|
- name: Upload repo list artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: repos-${{ matrix.arch }}
|
||||||
|
path: ${{ github.workspace }}/_repos_${{ matrix.arch }}.txt
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
CREATE_MANIFEST:
|
||||||
|
needs: [BUILD_PACKAGE]
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
steps:
|
||||||
|
- name: Setup Docker
|
||||||
|
uses: docker-practice/actions-setup-docker@master
|
||||||
|
with:
|
||||||
|
docker_version: 20.10
|
||||||
|
docker_channel: stable
|
||||||
|
|
||||||
|
- name: Checkout repo (for VERSION file)
|
||||||
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
|
- name: Download repo lists
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: repos-amd64
|
||||||
|
path: .
|
||||||
|
- name: Download repo lists (arm64)
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: repos-arm64
|
||||||
|
path: .
|
||||||
|
|
||||||
|
- name: Merge repo lists
|
||||||
|
id: repos
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
cat _repos_amd64.txt _repos_arm64.txt | sort -u > _repos_all.txt
|
||||||
|
echo "count=$(wc -l < _repos_all.txt)" >> $GITHUB_OUTPUT
|
||||||
|
echo "---- repos to manifest ----"
|
||||||
|
cat _repos_all.txt
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
run: |
|
||||||
|
docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" -p "${{ secrets.DOCKER_HUB_PASSWORD }}"
|
||||||
|
|
||||||
|
- name: Create multi-arch manifests
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
branch="${GITHUB_REF#refs/heads/}"
|
||||||
|
version="$(cat VERSION)"
|
||||||
|
if [[ "$branch" == "main" ]]; then
|
||||||
|
image_tag="dev"
|
||||||
|
elif [[ "$branch" == release-* ]]; then
|
||||||
|
image_tag="${version}-dev"
|
||||||
|
else
|
||||||
|
image_tag="${version}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Creating multi-arch manifests for tag: ${image_tag}"
|
||||||
|
|
||||||
|
# Ensure buildx imagetools is ready (usually is via the action)
|
||||||
|
docker buildx version || true
|
||||||
|
|
||||||
|
while IFS= read -r repo; do
|
||||||
|
# Only manifest if both arch tags exist in the registry
|
||||||
|
if docker buildx imagetools inspect "${repo}:${image_tag}-amd64" >/dev/null 2>&1 \
|
||||||
|
&& docker buildx imagetools inspect "${repo}:${image_tag}-arm64" >/dev/null 2>&1; then
|
||||||
|
echo "→ ${repo}:${image_tag}"
|
||||||
|
docker buildx imagetools create \
|
||||||
|
-t "${repo}:${image_tag}" \
|
||||||
|
"${repo}:${image_tag}-amd64" \
|
||||||
|
"${repo}:${image_tag}-arm64"
|
||||||
|
else
|
||||||
|
echo "SKIP ${repo}:${image_tag} (missing one of: -amd64 / -arm64)"
|
||||||
|
fi
|
||||||
|
done < _repos_all.txt
|
||||||
|
|
||||||
|
docker logout
|
|
@ -10,6 +10,7 @@ jobs:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Setup env
|
- name: Setup env
|
||||||
run: |
|
run: |
|
||||||
echo "CUR_TAG=${{ github.ref_name }}" >> $GITHUB_ENV
|
echo "CUR_TAG=${{ github.ref_name }}" >> $GITHUB_ENV
|
||||||
|
@ -19,66 +20,129 @@ jobs:
|
||||||
echo "PRE_TAG=$(echo $release | jq -r '.body' | jq -r '.preTag')" >> $GITHUB_ENV
|
echo "PRE_TAG=$(echo $release | jq -r '.body' | jq -r '.preTag')" >> $GITHUB_ENV
|
||||||
echo "BRANCH=$(echo $release | jq -r '.target_commitish')" >> $GITHUB_ENV
|
echo "BRANCH=$(echo $release | jq -r '.target_commitish')" >> $GITHUB_ENV
|
||||||
echo "PRERELEASE=$(echo $release | jq -r '.prerelease')" >> $GITHUB_ENV
|
echo "PRERELEASE=$(echo $release | jq -r '.prerelease')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Configure AWS credentials
|
- name: Configure AWS credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v5.0.0
|
uses: aws-actions/configure-aws-credentials@v5.0.0
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
aws-region: us-east-1
|
aws-region: us-east-1
|
||||||
- name: Prepare Assets
|
|
||||||
|
- name: Prepare Assets (amd64 + arm64)
|
||||||
run: |
|
run: |
|
||||||
if [ ! ${{ env.BUILD_NO }} -o ${{ env.BUILD_NO }} = "null" ]
|
if [ -z "${{ env.BUILD_NO }}" ] || [ "${{ env.BUILD_NO }}" = "null" ]; then
|
||||||
then
|
echo "missing required parameter buildNo."; exit 1
|
||||||
echo "missing required parameter buildNo."
|
|
||||||
exit 1
|
|
||||||
fi
|
fi
|
||||||
echo "buildNo:${{ env.BUILD_NO }}"
|
echo "buildNo:${{ env.BUILD_NO }}"
|
||||||
echo "preTag:${{ env.PRE_TAG }}"
|
echo "preTag:${{ env.PRE_TAG }}"
|
||||||
|
|
||||||
src_offline_package=harbor-offline-installer-${{ env.BASE_TAG }}-${{ env.BUILD_NO }}.tgz
|
base=${{ env.BASE_TAG }}
|
||||||
src_online_package=harbor-online-installer-${{ env.BASE_TAG }}-${{ env.BUILD_NO }}.tgz
|
cur=${{ env.CUR_TAG }}
|
||||||
dst_offline_package=harbor-offline-installer-${{ env.CUR_TAG }}.tgz
|
branch=${{ env.BRANCH }}
|
||||||
dst_online_package=harbor-online-installer-${{ env.CUR_TAG }}.tgz
|
bucket=${{ secrets.HARBOR_RELEASE_BUILD }}
|
||||||
aws s3 cp s3://${{ secrets.HARBOR_RELEASE_BUILD }}/${{ env.BRANCH }}/${src_offline_package} s3://${{ secrets.HARBOR_RELEASE_BUILD }}/${{ env.BRANCH }}/${dst_offline_package}
|
|
||||||
aws s3 cp s3://${{ secrets.HARBOR_RELEASE_BUILD }}/${{ env.BRANCH }}/${src_online_package} s3://${{ secrets.HARBOR_RELEASE_BUILD }}/${{ env.BRANCH }}/${dst_online_package}
|
# Source names produced by build-package (per-arch)
|
||||||
|
src_offline_amd64="harbor-offline-installer-${base}-${{ env.BUILD_NO }}-amd64.tgz"
|
||||||
|
src_online_amd64="harbor-online-installer-${base}-${{ env.BUILD_NO }}-amd64.tgz"
|
||||||
|
src_offline_arm64="harbor-offline-installer-${base}-${{ env.BUILD_NO }}-arm64.tgz"
|
||||||
|
src_online_arm64="harbor-online-installer-${base}-${{ env.BUILD_NO }}-arm64.tgz"
|
||||||
|
|
||||||
|
# Destination names attached to GitHub release
|
||||||
|
dst_offline_amd64="harbor-offline-installer-${cur}-amd64.tgz"
|
||||||
|
dst_online_amd64="harbor-online-installer-${cur}-amd64.tgz"
|
||||||
|
dst_offline_arm64="harbor-offline-installer-${cur}-arm64.tgz"
|
||||||
|
dst_online_arm64="harbor-online-installer-${cur}-arm64.tgz"
|
||||||
|
|
||||||
|
# Copy/rename in S3
|
||||||
|
aws s3 cp s3://$bucket/$branch/$src_offline_amd64 s3://$bucket/$branch/$dst_offline_amd64
|
||||||
|
aws s3 cp s3://$bucket/$branch/$src_online_amd64 s3://$bucket/$branch/$dst_online_amd64
|
||||||
|
aws s3 cp s3://$bucket/$branch/$src_offline_arm64 s3://$bucket/$branch/$dst_offline_arm64
|
||||||
|
aws s3 cp s3://$bucket/$branch/$src_online_arm64 s3://$bucket/$branch/$dst_online_arm64
|
||||||
|
|
||||||
assets_path=$(pwd)/assets
|
assets_path=$(pwd)/assets
|
||||||
source tools/release/release_utils.sh && getAssets ${{ secrets.HARBOR_RELEASE_BUILD }} ${{ env.BRANCH }} $dst_offline_package $dst_online_package ${{ env.PRERELEASE }} $assets_path
|
mkdir -p "$assets_path"
|
||||||
echo "OFFLINE_PACKAGE_PATH=$assets_path/$dst_offline_package" >> $GITHUB_ENV
|
source tools/release/release_utils.sh
|
||||||
echo "ONLINE_PACKAGE_PATH=$assets_path/$dst_online_package" >> $GITHUB_ENV
|
|
||||||
|
# Pull down both arch sets into ./assets and generate md5s
|
||||||
|
getAssets $bucket $branch $dst_offline_amd64 $dst_online_amd64 ${{ env.PRERELEASE }} $assets_path
|
||||||
|
getAssets $bucket $branch $dst_offline_arm64 $dst_online_arm64 ${{ env.PRERELEASE }} $assets_path
|
||||||
|
|
||||||
|
echo "ASSETS_DIR=$assets_path" >> $GITHUB_ENV
|
||||||
|
# Back-compat (points to amd64)
|
||||||
|
echo "OFFLINE_PACKAGE_PATH=$assets_path/$dst_offline_amd64" >> $GITHUB_ENV
|
||||||
|
echo "ONLINE_PACKAGE_PATH=$assets_path/$dst_online_amd64" >> $GITHUB_ENV
|
||||||
echo "MD5SUM_PATH=$assets_path/md5sum" >> $GITHUB_ENV
|
echo "MD5SUM_PATH=$assets_path/md5sum" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Setup Docker
|
- name: Setup Docker
|
||||||
uses: docker-practice/actions-setup-docker@master
|
uses: docker-practice/actions-setup-docker@master
|
||||||
with:
|
with:
|
||||||
docker_version: 20.10
|
docker_version: 20.10
|
||||||
docker_channel: stable
|
docker_channel: stable
|
||||||
- name: Publish Images
|
|
||||||
|
- name: Publish Images (amd64 + arm64)
|
||||||
run: |
|
run: |
|
||||||
tar -zxf ${{ env.OFFLINE_PACKAGE_PATH }}
|
set -euo pipefail
|
||||||
docker load -i ./harbor/harbor.${{ env.BASE_TAG }}.tar.gz
|
base=${{ env.BASE_TAG }}
|
||||||
images="$(docker images --format "{{.Repository}}" --filter=reference='goharbor/*:${{ env.BASE_TAG }}' | xargs)"
|
cur=${{ env.CUR_TAG }}
|
||||||
source tools/release/release_utils.sh
|
: > "$GITHUB_WORKSPACE/_images_all.txt"
|
||||||
publishImages ${{ env.CUR_TAG }} ${{ env.BASE_TAG }} "${{ secrets.DOCKER_HUB_USERNAME }}" "${{ secrets.DOCKER_HUB_PASSWORD }}" $images
|
|
||||||
publishPackages ${{ env.CUR_TAG }} ${{ env.BASE_TAG }} ${{ github.actor }} ${{ secrets.GITHUB_TOKEN }} $images
|
for arch in amd64 arm64; do
|
||||||
|
export ARCH="$arch"
|
||||||
|
pkg="$PWD/assets/harbor-offline-installer-${cur}-${arch}.tgz"
|
||||||
|
echo "Processing $pkg"
|
||||||
|
tar -zxf "$pkg"
|
||||||
|
docker load -i ./harbor/harbor.${base}.tar.gz
|
||||||
|
images="$(docker images --format "{{.Repository}}" --filter=reference="goharbor/*:${base}" | xargs)"
|
||||||
|
# keep a union of repo names for the manifest step
|
||||||
|
echo "$images" | tr ' ' '\n' >> "$GITHUB_WORKSPACE/_images_all.txt"
|
||||||
|
source tools/release/release_utils.sh
|
||||||
|
publishImages "$cur" "$base" "${{ secrets.DOCKER_HUB_USERNAME }}" "${{ secrets.DOCKER_HUB_PASSWORD }}" $images
|
||||||
|
publishPackages "$cur" "$base" "${{ github.actor }}" "${{ secrets.GITHUB_TOKEN }}" $images
|
||||||
|
rm -rf harbor
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Create multi-arch manifests (Docker Hub)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
cur=${{ env.CUR_TAG }}
|
||||||
|
# unique repo list
|
||||||
|
mapfile -t repos < <(sort -u "$GITHUB_WORKSPACE/_images_all.txt")
|
||||||
|
echo "Repos to manifest: ${#repos[@]}"
|
||||||
|
docker login -u "${{ secrets.DOCKER_HUB_USERNAME }}" -p "${{ secrets.DOCKER_HUB_PASSWORD }}"
|
||||||
|
docker buildx create --use --name harbor-multi || docker buildx use harbor-multi
|
||||||
|
for repo in "${repos[@]}"; do
|
||||||
|
echo "Creating manifest ${repo}:${cur} (amd64+arm64)"
|
||||||
|
docker buildx imagetools create \
|
||||||
|
-t ${repo}:${cur} \
|
||||||
|
${repo}:${cur}-amd64 \
|
||||||
|
${repo}:${cur}-arm64
|
||||||
|
done
|
||||||
|
docker logout
|
||||||
|
|
||||||
- name: Generate release notes
|
- name: Generate release notes
|
||||||
run: |
|
run: |
|
||||||
release_notes_path=$(pwd)/release-notes.txt
|
release_notes_path=$(pwd)/release-notes.txt
|
||||||
source tools/release/release_utils.sh && generateReleaseNotes ${{ env.CUR_TAG }} ${{ env.PRE_TAG }} ${{ secrets.GITHUB_TOKEN }} $release_notes_path
|
source tools/release/release_utils.sh && generateReleaseNotes ${{ env.CUR_TAG }} ${{ env.PRE_TAG }} ${{ secrets.GITHUB_TOKEN }} $release_notes_path
|
||||||
echo "RELEASE_NOTES_PATH=$release_notes_path" >> $GITHUB_ENV
|
echo "RELEASE_NOTES_PATH=$release_notes_path" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: RC Release
|
- name: RC Release
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
if: ${{ env.PRERELEASE == 'true' }}
|
if: ${{ env.PRERELEASE == 'true' }}
|
||||||
with:
|
with:
|
||||||
body_path: ${{ env.RELEASE_NOTES_PATH }}
|
body_path: ${{ env.RELEASE_NOTES_PATH }}
|
||||||
files: |
|
files: |
|
||||||
${{ env.OFFLINE_PACKAGE_PATH }}
|
${{ env.ASSETS_DIR }}/harbor-offline-installer-*-amd64.tgz
|
||||||
${{ env.MD5SUM_PATH }}
|
${{ env.ASSETS_DIR }}/harbor-offline-installer-*-arm64.tgz
|
||||||
|
${{ env.ASSETS_DIR }}/md5sum
|
||||||
|
|
||||||
- name: GA Release
|
- name: GA Release
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
if: ${{ env.PRERELEASE == 'false' }}
|
if: ${{ env.PRERELEASE == 'false' }}
|
||||||
with:
|
with:
|
||||||
body_path: ${{ env.RELEASE_NOTES_PATH }}
|
body_path: ${{ env.RELEASE_NOTES_PATH }}
|
||||||
files: |
|
files: |
|
||||||
${{ env.OFFLINE_PACKAGE_PATH }}
|
${{ env.ASSETS_DIR }}/harbor-offline-installer-*-amd64.tgz
|
||||||
${{ env.ONLINE_PACKAGE_PATH }}
|
${{ env.ASSETS_DIR }}/harbor-offline-installer-*-arm64.tgz
|
||||||
${{ env.MD5SUM_PATH }}
|
${{ env.ASSETS_DIR }}/harbor-online-installer-*-amd64.tgz
|
||||||
|
${{ env.ASSETS_DIR }}/harbor-online-installer-*-arm64.tgz
|
||||||
|
${{ env.ASSETS_DIR }}/md5sum
|
17
Makefile
17
Makefile
|
@ -72,7 +72,10 @@ TOOLSPATH=$(BUILDPATH)/tools
|
||||||
CORE_PATH=$(BUILDPATH)/src/core
|
CORE_PATH=$(BUILDPATH)/src/core
|
||||||
PORTAL_PATH=$(BUILDPATH)/src/portal
|
PORTAL_PATH=$(BUILDPATH)/src/portal
|
||||||
CHECKENVCMD=checkenv.sh
|
CHECKENVCMD=checkenv.sh
|
||||||
|
ARCH ?= $(shell uname -m)
|
||||||
|
ifeq ($(ARCH), aarch64)
|
||||||
|
ARCH := arm64
|
||||||
|
endif
|
||||||
# parameters
|
# parameters
|
||||||
REGISTRYSERVER=
|
REGISTRYSERVER=
|
||||||
REGISTRYPROJECTNAME=goharbor
|
REGISTRYPROJECTNAME=goharbor
|
||||||
|
@ -122,9 +125,15 @@ DISTRIBUTION_SRC=https://github.com/goharbor/distribution.git
|
||||||
|
|
||||||
# dependency binaries
|
# dependency binaries
|
||||||
REGISTRYURL=https://storage.googleapis.com/harbor-builds/bin/registry/release-${REGISTRYVERSION}/registry
|
REGISTRYURL=https://storage.googleapis.com/harbor-builds/bin/registry/release-${REGISTRYVERSION}/registry
|
||||||
TRIVY_DOWNLOAD_URL=https://github.com/aquasecurity/trivy/releases/download/$(TRIVYVERSION)/trivy_$(TRIVYVERSION:v%=%)_Linux-64bit.tar.gz
|
ifeq ($(ARCH), arm64)
|
||||||
TRIVY_ADAPTER_DOWNLOAD_URL=https://github.com/goharbor/harbor-scanner-trivy/archive/refs/tags/$(TRIVYADAPTERVERSION).tar.gz
|
TRIVY_DOWNLOAD_URL = https://github.com/aquasecurity/trivy/releases/download/$(TRIVYVERSION)/trivy_$(TRIVYVERSION:v%=%)_Linux-ARM64.tar.gz
|
||||||
|
|
||||||
|
else
|
||||||
|
TRIVY_DOWNLOAD_URL = https://github.com/aquasecurity/trivy/releases/download/$(TRIVYVERSION)/trivy_$(TRIVYVERSION:v%=%)_Linux-64bit.tar.gz
|
||||||
|
|
||||||
|
endif
|
||||||
|
|
||||||
|
TRIVY_ADAPTER_DOWNLOAD_URL=https://github.com/goharbor/harbor-scanner-trivy/archive/refs/tags/$(TRIVYADAPTERVERSION).tar.gz
|
||||||
define VERSIONS_FOR_PREPARE
|
define VERSIONS_FOR_PREPARE
|
||||||
VERSION_TAG: $(VERSIONTAG)
|
VERSION_TAG: $(VERSIONTAG)
|
||||||
REGISTRY_VERSION: $(REGISTRYVERSION)
|
REGISTRY_VERSION: $(REGISTRYVERSION)
|
||||||
|
@ -613,4 +622,4 @@ clean:
|
||||||
@echo " make cleandockercomposefile: remove specific version docker-compose"
|
@echo " make cleandockercomposefile: remove specific version docker-compose"
|
||||||
@echo " make cleanpackage: remove online and offline install package"
|
@echo " make cleanpackage: remove online and offline install package"
|
||||||
|
|
||||||
all: install
|
all: install
|
|
@ -15,10 +15,13 @@ SEDCMD=$(shell which sed)
|
||||||
WGET=$(shell which wget)
|
WGET=$(shell which wget)
|
||||||
CURL=$(shell which curl)
|
CURL=$(shell which curl)
|
||||||
TIMESTAMP=$(shell date +"%Y%m%d")
|
TIMESTAMP=$(shell date +"%Y%m%d")
|
||||||
|
ARCH ?= $(shell uname -m)
|
||||||
|
PLATFORM = $(if $(filter $(ARCH),aarch64 arm64),linux/arm64,linux/amd64)
|
||||||
|
|
||||||
|
|
||||||
# docker parameters
|
# docker parameters
|
||||||
DOCKERCMD=$(shell which docker)
|
DOCKERCMD=$(shell which docker)
|
||||||
DOCKERBUILD=$(DOCKERCMD) build --no-cache --network=$(DOCKERNETWORK)
|
DOCKERBUILD=$(DOCKERCMD) build --no-cache --network=$(DOCKERNETWORK) --platform=$(PLATFORM)
|
||||||
DOCKERBUILD_WITH_PULL_PARA=$(DOCKERBUILD) --pull=$(PULL_BASE_FROM_DOCKERHUB)
|
DOCKERBUILD_WITH_PULL_PARA=$(DOCKERBUILD) --pull=$(PULL_BASE_FROM_DOCKERHUB)
|
||||||
DOCKERRMIMAGE=$(DOCKERCMD) rmi
|
DOCKERRMIMAGE=$(DOCKERCMD) rmi
|
||||||
DOCKERIMAGES=$(DOCKERCMD) images
|
DOCKERIMAGES=$(DOCKERCMD) images
|
||||||
|
@ -194,7 +197,20 @@ _build_registryctl:
|
||||||
_build_redis:
|
_build_redis:
|
||||||
@$(call _build_base,$(REDIS),$(DOCKERFILEPATH_REDIS))
|
@$(call _build_base,$(REDIS),$(DOCKERFILEPATH_REDIS))
|
||||||
@echo "building redis container for photon..."
|
@echo "building redis container for photon..."
|
||||||
@$(DOCKERBUILD_WITH_PULL_PARA) --build-arg harbor_base_image_version=$(BASEIMAGETAG) --build-arg harbor_base_namespace=$(BASEIMAGENAMESPACE) -f $(DOCKERFILEPATH_REDIS)/$(DOCKERFILENAME_REDIS) -t $(DOCKERIMAGENAME_REDIS):$(VERSIONTAG) .
|
ifneq ($(PUSHBASEIMAGE), true)
|
||||||
|
@$(DOCKERBUILD_WITH_PULL_PARA) \
|
||||||
|
--build-arg harbor_base_image_version=$(BASEIMAGETAG) \
|
||||||
|
--build-arg harbor_base_namespace=$(BASEIMAGENAMESPACE) \
|
||||||
|
-f $(DOCKERFILEPATH_REDIS)/$(DOCKERFILENAME_REDIS) \
|
||||||
|
-t $(DOCKERIMAGENAME_REDIS):$(VERSIONTAG) .
|
||||||
|
else
|
||||||
|
docker buildx build --platform linux/amd64,linux/arm64 \
|
||||||
|
--build-arg harbor_base_image_version=$(BASEIMAGETAG) \
|
||||||
|
--build-arg harbor_base_namespace=$(BASEIMAGENAMESPACE) \
|
||||||
|
-f $(DOCKERFILEPATH_REDIS)/$(DOCKERFILENAME_REDIS) \
|
||||||
|
-t $(DOCKERIMAGENAME_REDIS):$(VERSIONTAG) \
|
||||||
|
--push .
|
||||||
|
endif
|
||||||
@echo "Done."
|
@echo "Done."
|
||||||
|
|
||||||
_build_standalone_db_migrator:
|
_build_standalone_db_migrator:
|
||||||
|
|
|
@ -17,4 +17,4 @@ RUN tdnf install -y gzip postgresql15-server findutils bc >> /dev/null \
|
||||||
&& sed -i "s|#unix_socket_directories = '/tmp'.*|unix_socket_directories = '/run/postgresql'|g" /usr/pgsql/15/share/postgresql/postgresql.conf.sample \
|
&& sed -i "s|#unix_socket_directories = '/tmp'.*|unix_socket_directories = '/run/postgresql'|g" /usr/pgsql/15/share/postgresql/postgresql.conf.sample \
|
||||||
&& tdnf clean all
|
&& tdnf clean all
|
||||||
|
|
||||||
RUN tdnf erase -y toybox && tdnf install -y util-linux net-tools
|
RUN tdnf erase -y toybox && tdnf install -y util-linux net-tools
|
|
@ -1,12 +1,13 @@
|
||||||
ARG build_image
|
ARG build_image
|
||||||
ARG harbor_base_image_version
|
ARG harbor_base_image_version
|
||||||
ARG harbor_base_namespace
|
ARG harbor_base_namespace
|
||||||
|
ARG TARGETARCH
|
||||||
|
|
||||||
FROM ${build_image} AS build
|
FROM ${build_image} AS build
|
||||||
|
|
||||||
ENV CGO_ENABLED=0
|
ENV CGO_ENABLED=0
|
||||||
ENV GOOS=linux
|
ENV GOOS=linux
|
||||||
ENV GOARCH=amd64
|
ENV GOARCH=${TARGETARCH}
|
||||||
|
|
||||||
COPY src /harbor/src
|
COPY src /harbor/src
|
||||||
WORKDIR /harbor/src/cmd/exporter
|
WORKDIR /harbor/src/cmd/exporter
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
services:
|
services:
|
||||||
log:
|
log:
|
||||||
image: goharbor/harbor-log:{{version}}
|
image: goharbor/harbor-log:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-log
|
container_name: harbor-log
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -24,6 +25,7 @@ services:
|
||||||
- harbor
|
- harbor
|
||||||
registry:
|
registry:
|
||||||
image: goharbor/registry-photon:{{reg_version}}
|
image: goharbor/registry-photon:{{reg_version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: registry
|
container_name: registry
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -68,6 +70,7 @@ services:
|
||||||
tag: "registry"
|
tag: "registry"
|
||||||
registryctl:
|
registryctl:
|
||||||
image: goharbor/harbor-registryctl:{{version}}
|
image: goharbor/harbor-registryctl:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: registryctl
|
container_name: registryctl
|
||||||
env_file:
|
env_file:
|
||||||
- ./common/config/registryctl/env
|
- ./common/config/registryctl/env
|
||||||
|
@ -112,6 +115,7 @@ services:
|
||||||
{% if external_database == False %}
|
{% if external_database == False %}
|
||||||
postgresql:
|
postgresql:
|
||||||
image: goharbor/harbor-db:{{version}}
|
image: goharbor/harbor-db:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-db
|
container_name: harbor-db
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -138,6 +142,7 @@ services:
|
||||||
{% endif %}
|
{% endif %}
|
||||||
core:
|
core:
|
||||||
image: goharbor/harbor-core:{{version}}
|
image: goharbor/harbor-core:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-core
|
container_name: harbor-core
|
||||||
env_file:
|
env_file:
|
||||||
- ./common/config/core/env
|
- ./common/config/core/env
|
||||||
|
@ -194,6 +199,7 @@ services:
|
||||||
tag: "core"
|
tag: "core"
|
||||||
portal:
|
portal:
|
||||||
image: goharbor/harbor-portal:{{version}}
|
image: goharbor/harbor-portal:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-portal
|
container_name: harbor-portal
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -227,6 +233,7 @@ services:
|
||||||
|
|
||||||
jobservice:
|
jobservice:
|
||||||
image: goharbor/harbor-jobservice:{{version}}
|
image: goharbor/harbor-jobservice:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-jobservice
|
container_name: harbor-jobservice
|
||||||
env_file:
|
env_file:
|
||||||
- ./common/config/jobservice/env
|
- ./common/config/jobservice/env
|
||||||
|
@ -265,6 +272,7 @@ services:
|
||||||
{% if external_redis == False %}
|
{% if external_redis == False %}
|
||||||
redis:
|
redis:
|
||||||
image: goharbor/redis-photon:{{redis_version}}
|
image: goharbor/redis-photon:{{redis_version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: redis
|
container_name: redis
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -287,6 +295,7 @@ services:
|
||||||
{% endif %}
|
{% endif %}
|
||||||
proxy:
|
proxy:
|
||||||
image: goharbor/nginx-photon:{{version}}
|
image: goharbor/nginx-photon:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: nginx
|
container_name: nginx
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
|
@ -336,6 +345,7 @@ services:
|
||||||
trivy-adapter:
|
trivy-adapter:
|
||||||
container_name: trivy-adapter
|
container_name: trivy-adapter
|
||||||
image: goharbor/trivy-adapter-photon:{{trivy_adapter_version}}
|
image: goharbor/trivy-adapter-photon:{{trivy_adapter_version}}
|
||||||
|
platform: {{platform}}
|
||||||
restart: always
|
restart: always
|
||||||
cap_drop:
|
cap_drop:
|
||||||
- ALL
|
- ALL
|
||||||
|
@ -375,6 +385,7 @@ services:
|
||||||
{% if metric.enabled %}
|
{% if metric.enabled %}
|
||||||
exporter:
|
exporter:
|
||||||
image: goharbor/harbor-exporter:{{version}}
|
image: goharbor/harbor-exporter:{{version}}
|
||||||
|
platform: {{platform}}
|
||||||
container_name: harbor-exporter
|
container_name: harbor-exporter
|
||||||
env_file:
|
env_file:
|
||||||
- ./common/config/exporter/env
|
- ./common/config/exporter/env
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
from g import templates_dir
|
from g import templates_dir
|
||||||
from .configs import parse_versions
|
from .configs import parse_versions
|
||||||
from .jinja import render_jinja
|
from .jinja import render_jinja
|
||||||
|
@ -58,5 +58,11 @@ def prepare_docker_compose(configs, with_trivy):
|
||||||
metric = configs.get('metric')
|
metric = configs.get('metric')
|
||||||
if metric:
|
if metric:
|
||||||
rendering_variables['metric'] = metric
|
rendering_variables['metric'] = metric
|
||||||
|
|
||||||
|
arch = platform.machine()
|
||||||
|
if arch in ("arm64", "aarch64"):
|
||||||
|
rendering_variables['platform'] = "linux/arm64"
|
||||||
|
else:
|
||||||
|
rendering_variables['platform'] = "linux/amd64"
|
||||||
|
|
||||||
render_jinja(docker_compose_template_path, docker_compose_yml_path, mode=0o644, **rendering_variables)
|
render_jinja(docker_compose_template_path, docker_compose_yml_path, mode=0o644, **rendering_variables)
|
||||||
|
|
|
@ -18,6 +18,18 @@ function uploader {
|
||||||
set +e
|
set +e
|
||||||
|
|
||||||
docker ps
|
docker ps
|
||||||
|
# Ensure E2E engine is available on arm64
|
||||||
|
if [ "$ARCH" = "arm64" ]; then
|
||||||
|
echo "Forcing use of locally built E2E engine on arm64"
|
||||||
|
if ! docker images | grep -q "harbor-e2e-engine.*latest-api"; then
|
||||||
|
echo "No local e2e image found, building..."
|
||||||
|
cd $DIR/../test-engine-image
|
||||||
|
chmod +x build.sh
|
||||||
|
./build.sh api latest --load
|
||||||
|
docker tag goharbor/harbor-e2e-engine:dev-api goharbor/harbor-e2e-engine:latest-api
|
||||||
|
cd -
|
||||||
|
fi
|
||||||
|
fi
|
||||||
# run db auth api cases
|
# run db auth api cases
|
||||||
if [ "$1" = 'DB' ]; then
|
if [ "$1" = 'DB' ]; then
|
||||||
docker run -i --privileged -v $DIR/../../:/drone -v $DIR/../:/ca -w /drone $E2E_IMAGE robot --exclude proxy_cache -v DOCKER_USER:"${DOCKER_USER}" -v DOCKER_PWD:${DOCKER_PWD} -v ip:$2 -v ip1: -v http_get_ca:false -v HARBOR_PASSWORD:${HARBOR_ADMIN_PASSWD} -v HARBOR_ADMIN:${HARBOR_ADMIN} /drone/tests/robot-cases/Group1-Nightly/Setup.robot /drone/tests/robot-cases/Group0-BAT/API_DB.robot
|
docker run -i --privileged -v $DIR/../../:/drone -v $DIR/../:/ca -w /drone $E2E_IMAGE robot --exclude proxy_cache -v DOCKER_USER:"${DOCKER_USER}" -v DOCKER_PWD:${DOCKER_PWD} -v ip:$2 -v ip1: -v http_get_ca:false -v HARBOR_PASSWORD:${HARBOR_ADMIN_PASSWD} -v HARBOR_ADMIN:${HARBOR_ADMIN} /drone/tests/robot-cases/Group1-Nightly/Setup.robot /drone/tests/robot-cases/Group0-BAT/API_DB.robot
|
||||||
|
|
|
@ -1,16 +1,15 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
function s3_to_https() {
|
function s3_to_https() {
|
||||||
local s3_url="$1"
|
local s3_url="$1"
|
||||||
|
|
||||||
|
|
||||||
if [[ "$s3_url" =~ ^s3://([^/]+)/(.+)$ ]]; then
|
if [[ "$s3_url" =~ ^s3://([^/]+)/(.+)$ ]]; then
|
||||||
local bucket="${BASH_REMATCH[1]}"
|
local bucket="${BASH_REMATCH[1]}"
|
||||||
local path="${BASH_REMATCH[2]}"
|
local path="${BASH_REMATCH[2]}"
|
||||||
# current s3 bucket is create in this region
|
local region="us-west-1"
|
||||||
local region="us-west-1"
|
|
||||||
echo "https://${bucket}.s3.${region}.amazonaws.com/${path}"
|
echo "https://${bucket}.s3.${region}.amazonaws.com/${path}"
|
||||||
else
|
else
|
||||||
echo "Invalid S3 URL: $s3_url" >&2
|
echo "Invalid S3 URL: $s3_url" >&2
|
||||||
|
@ -20,26 +19,58 @@ function s3_to_https() {
|
||||||
|
|
||||||
|
|
||||||
function uploader {
|
function uploader {
|
||||||
converted_url=$(s3_to_https "s3://$2/$1")
|
converted_url=$(s3_to_https "s3://$2/$1")
|
||||||
echo "download url $converted_url"
|
echo "download url $converted_url"
|
||||||
aws s3 cp $1 s3://$2/$1
|
aws s3 cp "$1" "s3://$2/$1"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# NEW: persist the repo list from this runner
|
||||||
|
# Writes one line per repo (no tags), e.g. goharbor/harbor-core
|
||||||
|
function saveRepoList() {
|
||||||
|
local outfile="$1"
|
||||||
|
docker images --format "{{.Repository}}" \
|
||||||
|
| grep '^goharbor/' \
|
||||||
|
| grep -v '\-base' \
|
||||||
|
| sort -u > "$outfile"
|
||||||
|
echo "Saved repo list to $outfile"
|
||||||
|
cat "$outfile"
|
||||||
|
}
|
||||||
|
|
||||||
|
# UPDATED: arch-aware publish
|
||||||
|
# Usage: publishImage <branch> <version> <docker_user> <docker_pass> <arch>
|
||||||
|
# - main - base_tag=dev
|
||||||
|
# - release-* - base_tag=<version>-dev
|
||||||
|
# Pushes: <repo>:<base_tag>-<arch> (e.g. core:dev-amd64 / core:dev-arm64)
|
||||||
function publishImage {
|
function publishImage {
|
||||||
echo "Publishing images to Docker Hub..."
|
branch=$1
|
||||||
echo "The images on the host:"
|
version=$2
|
||||||
# for main, will use 'dev' as the tag name
|
user=$3
|
||||||
# for release-*, will use 'release-*-dev' as the tag name, like release-v1.8.0-dev
|
pass=$4
|
||||||
if [[ $1 == "main" ]]; then
|
arch=$5
|
||||||
image_tag=dev
|
|
||||||
fi
|
if [[ "$branch" == "main" ]]; then
|
||||||
if [[ $1 == "release-"* ]]; then
|
base_tag="dev"
|
||||||
image_tag=$2-dev
|
elif [[ "$branch" == release-* ]]; then
|
||||||
fi
|
base_tag="${version}-dev"
|
||||||
# rename the images with tag "dev" and push to Docker Hub
|
else
|
||||||
docker images
|
base_tag="${version}"
|
||||||
docker login -u $3 -p $4
|
fi
|
||||||
docker images | grep goharbor | grep -v "\-base" | sed -n "s|\(goharbor/[-._a-z0-9]*\)\s*\(.*$2\).*|docker tag \1:\2 \1:$image_tag;docker push \1:$image_tag|p" | bash
|
|
||||||
echo "Images are published successfully"
|
arch_tag="${base_tag}-${arch}"
|
||||||
docker images
|
echo "Publishing images for arch=${arch}; tag=${arch_tag}"
|
||||||
|
|
||||||
|
docker login -u "$user" -p "$pass"
|
||||||
|
|
||||||
|
# Retag & push every non-base goharbor image we built on this runner that already carries $version
|
||||||
|
docker images --format '{{.Repository}}:{{.Tag}}' \
|
||||||
|
| grep '^goharbor/' \
|
||||||
|
| grep -v '\-base' \
|
||||||
|
| awk -F: -v version="$version" -v tag="$arch_tag" '
|
||||||
|
$2 ~ version {
|
||||||
|
printf("docker tag %s:%s %s:%s\n", $1, $2, $1, tag);
|
||||||
|
printf("docker push %s:%s\n", $1, tag);
|
||||||
|
}' | bash
|
||||||
|
|
||||||
|
docker logout
|
||||||
|
echo "Done pushing arch tag: ${arch_tag}"
|
||||||
}
|
}
|
|
@ -27,6 +27,12 @@ sudo -E env "PATH=$PATH" make go_check
|
||||||
sudo ./tests/hostcfg.sh
|
sudo ./tests/hostcfg.sh
|
||||||
sudo ./tests/generateCerts.sh
|
sudo ./tests/generateCerts.sh
|
||||||
sudo make build -e BUILDTARGET="_build_db _build_registry _build_prepare" -e PULL_BASE_FROM_DOCKERHUB=false -e BUILDREG=true -e BUILDTRIVYADP=true
|
sudo make build -e BUILDTARGET="_build_db _build_registry _build_prepare" -e PULL_BASE_FROM_DOCKERHUB=false -e BUILDREG=true -e BUILDTRIVYADP=true
|
||||||
|
if [ "$(uname -m)" = "aarch64" ] || [ "$(uname -m)" = "arm64" ]; then
|
||||||
|
echo "Building redis-photon locally for arm64"
|
||||||
|
sudo make -f make/photon/Makefile _build_redis \
|
||||||
|
-e BASEIMAGETAG=dev -e VERSIONTAG=dev \
|
||||||
|
-e PULL_BASE_FROM_DOCKERHUB=false -e BUILD_BASE=true
|
||||||
|
fi
|
||||||
docker run --rm -v /:/hostfs:z goharbor/prepare:dev gencert -p /etc/harbor/tls/internal
|
docker run --rm -v /:/hostfs:z goharbor/prepare:dev gencert -p /etc/harbor/tls/internal
|
||||||
sudo MAKEPATH=$(pwd)/make ./make/prepare
|
sudo MAKEPATH=$(pwd)/make ./make/prepare
|
||||||
sudo mkdir -p "/data/redis"
|
sudo mkdir -p "/data/redis"
|
||||||
|
|
|
@ -1,43 +1,59 @@
|
||||||
FROM ubuntu:20.04 as tool_builder
|
FROM ubuntu:20.04 as tool_builder
|
||||||
|
ARG TARGETARCH
|
||||||
ENV TZ=Asia/Shanghai \
|
ENV TZ=Asia/Shanghai \
|
||||||
DEBIAN_FRONTEND=noninteractive
|
DEBIAN_FRONTEND=noninteractive
|
||||||
ENV LANG C.UTF-8
|
ENV LANG C.UTF-8
|
||||||
WORKDIR /tool
|
WORKDIR /tool
|
||||||
|
|
||||||
#RUN tdnf install -y \
|
# base deps
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
build-essential \
|
build-essential \
|
||||||
wget \
|
wget \
|
||||||
git \
|
git \
|
||||||
tar \
|
tar \
|
||||||
#go
|
|
||||||
#ubuntu
|
|
||||||
curl \
|
curl \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
uuid-dev
|
uuid-dev \
|
||||||
#ubuntu
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Go (needed for cnab-to-oci and wasm-to-oci on arm64)
|
||||||
RUN apt-get update && apt-get install -y software-properties-common && \
|
RUN apt-get update && apt-get install -y software-properties-common && \
|
||||||
add-apt-repository -y ppa:longsleep/golang-backports && \
|
add-apt-repository -y ppa:longsleep/golang-backports && \
|
||||||
apt-get install -y golang-go
|
apt-get install -y golang-go
|
||||||
|
|
||||||
|
ENV ARCH_LINUX=amd64
|
||||||
|
ENV ARCH_UNDERSCORE=amd64
|
||||||
|
|
||||||
|
# Allow overriding from buildx TARGETARCH, or detect via dpkg
|
||||||
|
ARG TARGETARCH
|
||||||
|
RUN set -eux; \
|
||||||
|
arch="${TARGETARCH:-$(dpkg --print-architecture)}"; \
|
||||||
|
case "$arch" in \
|
||||||
|
amd64|x86_64) ARCH_LINUX=amd64; ARCH_UNDERSCORE=amd64 ;; \
|
||||||
|
arm64|aarch64) ARCH_LINUX=arm64; ARCH_UNDERSCORE=arm64 ;; \
|
||||||
|
*) echo "Unsupported arch: $arch" >&2; exit 1 ;; \
|
||||||
|
esac; \
|
||||||
|
echo "ARCH_LINUX=$ARCH_LINUX" > /arch.env; \
|
||||||
|
echo "ARCH_UNDERSCORE=$ARCH_UNDERSCORE" >> /arch.env
|
||||||
|
# Persist values into environment for all later layers
|
||||||
|
ENV $(cat /arch.env | xargs)`
|
||||||
RUN pwd && mkdir /tool/binary && \
|
RUN pwd && mkdir /tool/binary && \
|
||||||
# Install CONTAINERD
|
# Install CONTAINERD
|
||||||
CONTAINERD_VERSION=1.7.20 && \
|
CONTAINERD_VERSION=1.7.20 && \
|
||||||
wget https://github.com/containerd/containerd/releases/download/v$CONTAINERD_VERSION/containerd-$CONTAINERD_VERSION-linux-amd64.tar.gz && \
|
wget https://github.com/containerd/containerd/releases/download/v$CONTAINERD_VERSION/containerd-$CONTAINERD_VERSION-linux-${ARCH_LINUX}.tar.gz && \
|
||||||
tar zxvf containerd-$CONTAINERD_VERSION-linux-amd64.tar.gz && \
|
tar zxvf containerd-$CONTAINERD_VERSION-linux-${ARCH_LINUX}.tar.gz && \
|
||||||
cd bin && cp -f containerd ctr /tool/binary/ && \
|
cd bin && cp -f containerd ctr /tool/binary/ && cd .. && \
|
||||||
# docker compose
|
# docker compose (multi-arch from upstream)
|
||||||
curl -L "https://github.com/docker/compose/releases/download/v2.29.1/docker-compose-$(uname -s)-$(uname -m)" -o /tool/binary/docker-compose && \
|
curl -L "https://github.com/docker/compose/releases/download/v2.29.1/docker-compose-$(uname -s)-$(uname -m)" -o /tool/binary/docker-compose && \
|
||||||
chmod +x /tool/binary/docker-compose && \
|
chmod +x /tool/binary/docker-compose && \
|
||||||
# Install helm
|
# Install helm
|
||||||
HELM_VERSION=3.15.3 && wget https://get.helm.sh/helm-v$HELM_VERSION-linux-amd64.tar.gz && \
|
HELM_VERSION=3.15.3 && wget https://get.helm.sh/helm-v$HELM_VERSION-linux-${ARCH_LINUX}.tar.gz && \
|
||||||
tar zxvf helm-v$HELM_VERSION-linux-amd64.tar.gz && \
|
tar zxvf helm-v$HELM_VERSION-linux-${ARCH_LINUX}.tar.gz && \
|
||||||
ls || pwd && \
|
mv linux-${ARCH_LINUX}/helm /tool/binary/helm && \
|
||||||
mv linux-amd64/helm /tool/binary/helm && \
|
|
||||||
# Install ORAS
|
# Install ORAS
|
||||||
ORAS_VERSION=1.2.0 && curl -LO https://github.com/deislabs/oras/releases/download/v$ORAS_VERSION/oras_${ORAS_VERSION}_linux_amd64.tar.gz && \
|
ORAS_VERSION=1.2.0 && curl -LO https://github.com/deislabs/oras/releases/download/v$ORAS_VERSION/oras_${ORAS_VERSION}_linux_${ARCH_UNDERSCORE}.tar.gz && \
|
||||||
mkdir -p oras-install/ && \
|
mkdir -p oras-install/ && \
|
||||||
tar -zxf oras_${ORAS_VERSION}_*.tar.gz -C oras-install/ && \
|
tar -zxf oras_${ORAS_VERSION}_linux_${ARCH_UNDERSCORE}.tar.gz -C oras-install/ && \
|
||||||
mv oras-install/oras /tool/binary/ && \
|
mv oras-install/oras /tool/binary/ && \
|
||||||
# Install CNAB
|
# Install CNAB
|
||||||
CNAB_PATH=$(go env GOPATH)/src/github.com/cnabio && \
|
CNAB_PATH=$(go env GOPATH)/src/github.com/cnabio && \
|
||||||
|
@ -48,22 +64,29 @@ RUN pwd && mkdir /tool/binary && \
|
||||||
mv bin/cnab-to-oci /tool/binary/ && \
|
mv bin/cnab-to-oci /tool/binary/ && \
|
||||||
# Install DIND
|
# Install DIND
|
||||||
DIND_COMMIT=3b5fac462d21ca164b3778647420016315289034 && \
|
DIND_COMMIT=3b5fac462d21ca164b3778647420016315289034 && \
|
||||||
wget "https://raw.githubusercontent.com/docker/docker/${DIND_COMMIT}/hack/dind" -O /tool/binary/dind \
|
wget "https://raw.githubusercontent.com/docker/docker/${DIND_COMMIT}/hack/dind" -O /tool/binary/dind && \
|
||||||
&& chmod +x /tool/binary/dind && \
|
chmod +x /tool/binary/dind && \
|
||||||
# Install wasm-to-oci
|
# Install wasm-to-oci
|
||||||
WASM_TO_OCI_VERSION=0.1.2 && wget https://github.com/engineerd/wasm-to-oci/releases/download/v${WASM_TO_OCI_VERSION}/linux-amd64-wasm-to-oci && \
|
WASM_TO_OCI_VERSION=0.1.2 && \
|
||||||
chmod +x linux-amd64-wasm-to-oci && mv linux-amd64-wasm-to-oci /tool/binary/wasm-to-oci && \
|
if [ "$ARCH_LINUX" = "amd64" ]; then \
|
||||||
|
wget https://github.com/engineerd/wasm-to-oci/releases/download/v${WASM_TO_OCI_VERSION}/linux-amd64-wasm-to-oci && \
|
||||||
|
chmod +x linux-amd64-wasm-to-oci && mv linux-amd64-wasm-to-oci /tool/binary/wasm-to-oci; \
|
||||||
|
else \
|
||||||
|
git clone --depth 1 --branch v${WASM_TO_OCI_VERSION} https://github.com/engineerd/wasm-to-oci.git /tmp/wasm-to-oci && \
|
||||||
|
cd /tmp/wasm-to-oci && GOOS=linux GOARCH=${ARCH_LINUX} go build -o /tool/binary/wasm-to-oci ./cmd/wasm-to-oci && \
|
||||||
|
cd /tool && rm -rf /tmp/wasm-to-oci; \
|
||||||
|
fi && \
|
||||||
# Install imgpkg
|
# Install imgpkg
|
||||||
IMGPKG_VERSION=0.43.0 && wget https://github.com/vmware-tanzu/carvel-imgpkg/releases/download/v$IMGPKG_VERSION/imgpkg-linux-amd64 && \
|
IMGPKG_VERSION=0.43.0 && wget https://github.com/vmware-tanzu/carvel-imgpkg/releases/download/v$IMGPKG_VERSION/imgpkg-linux-${ARCH_LINUX} && \
|
||||||
mv imgpkg-linux-amd64 /tool/binary/imgpkg && chmod +x /tool/binary/imgpkg && \
|
mv imgpkg-linux-${ARCH_LINUX} /tool/binary/imgpkg && chmod +x /tool/binary/imgpkg && \
|
||||||
# Install cosign
|
# Install cosign
|
||||||
COSIGN_VERSION=2.4.0 && wget https://github.com/sigstore/cosign/releases/download/v$COSIGN_VERSION/cosign-linux-amd64 && \
|
COSIGN_VERSION=2.4.0 && wget https://github.com/sigstore/cosign/releases/download/v$COSIGN_VERSION/cosign-linux-${ARCH_LINUX} && \
|
||||||
mv cosign-linux-amd64 /tool/binary/cosign && chmod +x /tool/binary/cosign && \
|
mv cosign-linux-${ARCH_LINUX} /tool/binary/cosign && chmod +x /tool/binary/cosign && \
|
||||||
# # Install notation
|
# Install notation
|
||||||
NOTATION_VERSION=1.1.1 && wget https://github.com/notaryproject/notation/releases/download/v$NOTATION_VERSION/notation_${NOTATION_VERSION}_linux_amd64.tar.gz && \
|
NOTATION_VERSION=1.1.1 && wget https://github.com/notaryproject/notation/releases/download/v$NOTATION_VERSION/notation_${NOTATION_VERSION}_linux_${ARCH_UNDERSCORE}.tar.gz && \
|
||||||
tar zxvf notation_${NOTATION_VERSION}_linux_amd64.tar.gz && \
|
tar zxvf notation_${NOTATION_VERSION}_linux_${ARCH_UNDERSCORE}.tar.gz && \
|
||||||
mv notation /tool/binary/notation && chmod +x /tool/binary/notation && \
|
mv notation /tool/binary/notation && chmod +x /tool/binary/notation && \
|
||||||
pwd
|
pwd
|
||||||
|
|
||||||
RUN cd /tool/binary/ && tar czvf tools.tar.gz * && cp tools.tar.gz /tool
|
RUN cd /tool/binary/ && tar czvf tools.tar.gz * && cp tools.tar.gz /tool
|
||||||
# --- End of base file ---
|
# --- End of base file ---
|
|
@ -28,5 +28,12 @@ $CMD_BASE $SRC_FILE >> $DST_FILE
|
||||||
|
|
||||||
echo "Starting to build image ..."
|
echo "Starting to build image ..."
|
||||||
TARGET_IMAGE=goharbor/harbor-e2e-engine:${VERSION}-${IMAGE_FOR}
|
TARGET_IMAGE=goharbor/harbor-e2e-engine:${VERSION}-${IMAGE_FOR}
|
||||||
docker build -t $TARGET_IMAGE .
|
|
||||||
|
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
|
||||||
|
echo "Detected ARM64 host, building image for linux/arm64"
|
||||||
|
docker buildx build --platform linux/arm64 --load -t $TARGET_IMAGE .
|
||||||
|
else
|
||||||
|
echo "Detected AMD64 host (or default), building image normally"
|
||||||
|
docker build -t $TARGET_IMAGE .
|
||||||
|
fi
|
|
@ -1,15 +1,24 @@
|
||||||
FROM python:3.8.5-slim
|
FROM python:3.8.5-slim
|
||||||
|
|
||||||
ENV HELM_EXPERIMENTAL_OCI=1
|
ENV HELM_EXPERIMENTAL_OCI=1 \
|
||||||
ENV REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
|
REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
|
||||||
|
|
||||||
|
ARG TARGETARCH
|
||||||
COPY ./migrate_chart.py ./migrate_chart.sh /
|
COPY ./migrate_chart.py ./migrate_chart.sh /
|
||||||
ADD https://get.helm.sh/helm-v3.9.1-linux-amd64.tar.gz /
|
|
||||||
|
|
||||||
RUN tar zxvf /helm-v3.9.1-linux-amd64.tar.gz && \
|
# Fetch correct Helm binary for the architecture
|
||||||
pip install click==7.1.2 && \
|
RUN set -e; \
|
||||||
pip install requests==2.24.0 && \
|
case "${TARGETARCH:-amd64}" in \
|
||||||
pip install pyyaml && \
|
amd64) HELM_URL="https://get.helm.sh/helm-v3.9.1-linux-amd64.tar.gz" ;; \
|
||||||
|
arm64) HELM_URL="https://get.helm.sh/helm-v3.9.1-linux-arm64.tar.gz" ;; \
|
||||||
|
*) echo "Unsupported arch: ${TARGETARCH}" >&2; exit 1 ;; \
|
||||||
|
esac; \
|
||||||
|
curl -fsSL "$HELM_URL" -o /tmp/helm.tgz; \
|
||||||
|
tar -xzf /tmp/helm.tgz -C /tmp; \
|
||||||
|
cp /tmp/linux-*/helm /usr/local/bin/helm; \
|
||||||
|
chmod +x /usr/local/bin/helm; \
|
||||||
|
rm -rf /tmp/helm*; \
|
||||||
|
pip install --no-cache-dir click==7.1.2 requests==2.24.0 pyyaml; \
|
||||||
chmod +x /migrate_chart.sh ./migrate_chart.py
|
chmod +x /migrate_chart.sh ./migrate_chart.py
|
||||||
|
|
||||||
ENTRYPOINT [ "/migrate_chart.py" ]
|
ENTRYPOINT ["/migrate_chart.py"]
|
|
@ -7,12 +7,23 @@ import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tarfile
|
import tarfile
|
||||||
import yaml
|
import yaml
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
import click
|
import click
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
MIGRATE_CHART_SCRIPT = '/migrate_chart.sh'
|
MIGRATE_CHART_SCRIPT = '/migrate_chart.sh'
|
||||||
HELM_CMD = '/linux-amd64/helm'
|
_helm_env = os.environ.get('HELM_CMD')
|
||||||
|
if _helm_env:
|
||||||
|
HELM_CMD = _helm_env
|
||||||
|
else:
|
||||||
|
_arch = platform.machine().lower()
|
||||||
|
_candidates = ['/linux-amd64/helm', 'helm']
|
||||||
|
if _arch in ('arm64', 'aarch64'):
|
||||||
|
_candidates = ['/linux-arm64/helm', 'helm']
|
||||||
|
HELM_CMD = next((c for c in _candidates
|
||||||
|
if (shutil.which(c) is not None) or Path(c).exists()), _candidates[-1])
|
||||||
|
|
||||||
CA_UPDATE_CMD = 'update-ca-certificates'
|
CA_UPDATE_CMD = 'update-ca-certificates'
|
||||||
CHART_URL_PATTERN = "https://{host}/api/v2.0/projects/{project}/repositories/{name}/artifacts/{version}"
|
CHART_URL_PATTERN = "https://{host}/api/v2.0/projects/{project}/repositories/{name}/artifacts/{version}"
|
||||||
CHART_SOURCE_DIR = Path('/chart_storage')
|
CHART_SOURCE_DIR = Path('/chart_storage')
|
||||||
|
|
|
@ -50,34 +50,39 @@ function generateReleaseNotes {
|
||||||
}
|
}
|
||||||
|
|
||||||
function publishImages {
|
function publishImages {
|
||||||
# Create curTag and push it to the goharbor namespace of dockerhub
|
# Push images to Docker Hub; if $ARCH is set, push :${curTag}-$ARCH (per-arch)
|
||||||
local curTag=$1
|
local curTag=$1
|
||||||
local baseTag=$2
|
local baseTag=$2
|
||||||
local dockerHubUser=$3
|
local dockerHubUser=$3
|
||||||
local dockerHubPassword=$4
|
local dockerHubPassword=$4
|
||||||
local images=${@:5}
|
local images=${@:5}
|
||||||
|
local suffix=""
|
||||||
|
if [ -n "${ARCH:-}" ]; then suffix="-$ARCH"; fi
|
||||||
docker login -u $dockerHubUser -p $dockerHubPassword
|
docker login -u $dockerHubUser -p $dockerHubPassword
|
||||||
for image in $images
|
for image in $images
|
||||||
do
|
do
|
||||||
echo "push image: $image"
|
echo "push image: $image"
|
||||||
docker tag $image:$baseTag $image:$curTag
|
docker tag $image:$baseTag $image:${curTag}${suffix}
|
||||||
retry 5 docker push $image:$curTag
|
retry 5 docker push $image:${curTag}${suffix}
|
||||||
done
|
done
|
||||||
docker logout
|
docker logout
|
||||||
}
|
}
|
||||||
|
|
||||||
function publishPackages {
|
function publishPackages {
|
||||||
|
# Push images to GHCR; if $ARCH is set, push :${curTag}-$ARCH (per-arch)
|
||||||
local curTag=$1
|
local curTag=$1
|
||||||
local baseTag=$2
|
local baseTag=$2
|
||||||
local ghcrUser=$3
|
local ghcrUser=$3
|
||||||
local ghcrPassword=$4
|
local ghcrPassword=$4
|
||||||
local images=${@:5}
|
local images=${@:5}
|
||||||
|
local suffix=""
|
||||||
|
if [ -n "${ARCH:-}" ]; then suffix="-$ARCH"; fi
|
||||||
docker login ghcr.io -u $ghcrUser -p $ghcrPassword
|
docker login ghcr.io -u $ghcrUser -p $ghcrPassword
|
||||||
for image in $images
|
for image in $images
|
||||||
do
|
do
|
||||||
echo "push image: $image"
|
echo "push image: $image"
|
||||||
docker tag $image:$baseTag "ghcr.io/"$image:$curTag
|
docker tag $image:$baseTag "ghcr.io/${image}:${curTag}${suffix}"
|
||||||
retry 5 docker push "ghcr.io/"$image:$curTag
|
retry 5 docker push "ghcr.io/${image}:${curTag}${suffix}"
|
||||||
done
|
done
|
||||||
docker logout ghcr.io
|
docker logout ghcr.io
|
||||||
}
|
}
|
||||||
|
@ -85,7 +90,7 @@ function publishPackages {
|
||||||
function retry {
|
function retry {
|
||||||
local -r -i max="$1"; shift
|
local -r -i max="$1"; shift
|
||||||
local -i n=1
|
local -i n=1
|
||||||
until "$@"
|
until "$?"
|
||||||
do
|
do
|
||||||
if ((n==max))
|
if ((n==max))
|
||||||
then
|
then
|
||||||
|
|
|
@ -1,17 +1,21 @@
|
||||||
|
# tools/spectral/Dockerfile
|
||||||
|
|
||||||
ARG NODE
|
ARG NODE
|
||||||
FROM ${NODE}
|
FROM ${NODE}
|
||||||
|
|
||||||
ARG SPECTRAL_VERSION
|
ARG SPECTRAL_VERSION
|
||||||
|
|
||||||
|
# Map arch → Spectral binary name and fetch
|
||||||
RUN case "$(dpkg --print-architecture)" in \
|
RUN case "$(dpkg --print-architecture)" in \
|
||||||
amd64) ARCH="x64" ;; \
|
amd64) ARCH="x64" ;; \
|
||||||
arm64) ARCH="arm64" ;; \
|
arm64) ARCH="arm64" ;; \
|
||||||
*) echo "Unsupported architecture" && exit 1 ;; \
|
*) echo "Unsupported architecture: $(dpkg --print-architecture)" && exit 1 ;; \
|
||||||
esac && \
|
esac && \
|
||||||
echo "Architecture: $ARCH" && \
|
echo "Architecture: $ARCH" && \
|
||||||
echo "Spectral version: $SPECTRAL_VERSION" && \
|
echo "Spectral version: $SPECTRAL_VERSION" && \
|
||||||
URL="https://github.com/stoplightio/spectral/releases/download/$SPECTRAL_VERSION/spectral-linux-$ARCH" && \
|
URL="https://github.com/stoplightio/spectral/releases/download/$SPECTRAL_VERSION/spectral-linux-$ARCH" && \
|
||||||
echo "URL: $URL" && \
|
echo "URL: $URL" && \
|
||||||
curl -fsSL -o /usr/bin/spectral $URL && chmod +x /usr/bin/spectral
|
curl -fsSL -o /usr/bin/spectral "$URL" && chmod +x /usr/bin/spectral
|
||||||
|
|
||||||
ENTRYPOINT ["/usr/bin/spectral"]
|
ENTRYPOINT ["/usr/bin/spectral"]
|
||||||
CMD ["--version"]
|
CMD ["--version"]
|
Loading…
Reference in New Issue