Remove unused GitHub Actions workflow files
This commit is contained in:
parent
80845dec41
commit
ea54e2e330
|
@ -1,175 +0,0 @@
|
|||
## The contents of this file are subject to the Mozilla Public License
|
||||
## Version 1.1 (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License
|
||||
## at http://www.mozilla.org/MPL/
|
||||
#
|
||||
## Software distributed under the License is distributed on an "AS IS"
|
||||
## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
|
||||
## the License for the specific language governing rights and
|
||||
## limitations under the License.
|
||||
#
|
||||
## The Original Code is RabbitMQ.
|
||||
#
|
||||
## The Initial Developer of the Original Code is GoPivotal, Inc.
|
||||
## Copyright (c) 2018-2019 Pivotal Software, Inc. All rights reserved.
|
||||
|
||||
FROM buildpack-deps:stretch
|
||||
|
||||
ENV LANG='C.UTF-8'
|
||||
|
||||
# Enable backports.
|
||||
RUN echo 'deb http://httpredir.debian.org/debian stretch-backports main' \
|
||||
>> /etc/apt/sources.list.d/backports.list
|
||||
|
||||
# Prerequisites to mess with packages.
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
wget \
|
||||
debconf-utils \
|
||||
gnupg
|
||||
|
||||
# Our own rabbitmq-erlang repository on Bintray to take Erlang and Elixir.
|
||||
RUN echo 'deb https://dl.bintray.com/rabbitmq-erlang/debian stretch erlang' > /etc/apt/sources.list.d/rabbitmq-erlang.list && \
|
||||
wget -O- https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc | apt-key add -
|
||||
|
||||
# We need to set an APT preference to make sure $ERLANG_VERSION is
|
||||
# used for all erlang* packages. Without this, apt-get(1) would try to
|
||||
# install dependencies using the latest version. This would conflict
|
||||
# with the strict pinning in all packages, and thus fail.
|
||||
RUN ERLANG_VERSION=1:22.3.4.2-1 && \
|
||||
echo 'Package: erlang*' > /etc/apt/preferences.d/erlang && \
|
||||
echo "Pin: version $ERLANG_VERSION" >> /etc/apt/preferences.d/erlang && \
|
||||
echo 'Pin-Priority: 1000' >> /etc/apt/preferences.d/erlang
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages to build RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
build-essential \
|
||||
curl \
|
||||
erlang-base-hipe \
|
||||
erlang-nox \
|
||||
erlang-dev \
|
||||
erlang-src \
|
||||
erlang-common-test \
|
||||
erlang-dialyzer \
|
||||
libcurl3-gnutls \
|
||||
man \
|
||||
mandoc \
|
||||
openssh-client \
|
||||
rsync \
|
||||
xmlto \
|
||||
xsltproc \
|
||||
zip \
|
||||
unzip
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
-t stretch-backports \
|
||||
git
|
||||
|
||||
# Verify the version of the installed Erlang packages.
|
||||
# The version was pinned above, but if that specific version is unavailable,
|
||||
# the latest version will be installed, which we don't want.
|
||||
RUN set -xe \
|
||||
&& installed_version=$(dpkg -s erlang-nox | grep -E '^Version:' | awk '{ print $2; }') \
|
||||
&& wanted_version=$(awk '/^Pin:/ { print $3; }' < /etc/apt/preferences.d/erlang) \
|
||||
&& test "$installed_version" = "$wanted_version"
|
||||
|
||||
RUN set -xe \
|
||||
&& REBAR3_VERSION="3.14.1" \
|
||||
&& REBAR3_DOWNLOAD_URL="https://github.com/erlang/rebar3/archive/${REBAR3_VERSION}.tar.gz" \
|
||||
&& REBAR3_DOWNLOAD_SHA256="b01275b6cbdb354dcf9ed686fce2b5f9dfdd58972ded9e970e31b9215a8521f2" \
|
||||
&& mkdir -p /usr/src/rebar3-src \
|
||||
&& curl -fSL -o rebar3-src.tar.gz "$REBAR3_DOWNLOAD_URL" \
|
||||
&& echo "$REBAR3_DOWNLOAD_SHA256 rebar3-src.tar.gz" | sha256sum -c - \
|
||||
&& tar -xzf rebar3-src.tar.gz -C /usr/src/rebar3-src --strip-components=1 \
|
||||
&& rm rebar3-src.tar.gz \
|
||||
&& cd /usr/src/rebar3-src \
|
||||
&& HOME=$PWD ./bootstrap \
|
||||
&& install -v ./rebar3 /usr/local/bin/ \
|
||||
&& rm -rf /usr/src/rebar3-src
|
||||
|
||||
RUN set -xe \
|
||||
&& ELIXIR_VERSION="v1.10.4" \
|
||||
&& ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/releases/download/${ELIXIR_VERSION}/Precompiled.zip" \
|
||||
&& ELIXIR_DOWNLOAD_SHA256="2ec9891ec75a7cbd22396c6e7874b912b526d5a4bfd3c27206eee2a198b250a5" \
|
||||
&& curl -fSL -o elixir-precompiled.zip $ELIXIR_DOWNLOAD_URL \
|
||||
&& echo "$ELIXIR_DOWNLOAD_SHA256 elixir-precompiled.zip" | sha256sum -c - \
|
||||
&& unzip -d /usr/local elixir-precompiled.zip \
|
||||
&& rm elixir-precompiled.zip
|
||||
|
||||
# Put erl_call(1) in the $PATH.
|
||||
RUN ln -fs /usr/lib/erlang/lib/erl_interface-*/bin/erl_call /usr/bin/erl_call
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages used to test RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN echo 'slapd slapd/internal/generated_adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/internal/adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password2 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password1 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/backend select HDB' | debconf-set-selections
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
ldap-utils \
|
||||
netcat \
|
||||
python-dev \
|
||||
python-simplejson \
|
||||
python3 \
|
||||
slapd \
|
||||
daemonize
|
||||
|
||||
RUN git clone --depth 1 https://github.com/bats-core/bats-core.git && \
|
||||
cd bats-core && \
|
||||
./install.sh /usr && \
|
||||
cd .. && \
|
||||
rm -rf bats-core
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
ca-certificates-java \
|
||||
openjdk-8-jre-headless \
|
||||
openjdk-8-jdk-headless
|
||||
|
||||
# Install Java tools separately to be sure it picks the version of
|
||||
# OpenJDK installed above.
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
maven
|
||||
|
||||
# .NET Core 2.0 requirements (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
apt-transport-https
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg && \
|
||||
mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg && \
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/dotnetdev.list'
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
gettext \
|
||||
libunwind8 \
|
||||
dotnet-sdk-5.*
|
||||
|
||||
# .NET Core 2.0 warmup
|
||||
RUN mkdir warmup \
|
||||
&& cd warmup \
|
||||
&& dotnet new console \
|
||||
&& cd .. \
|
||||
&& rm -rf warmup \
|
||||
&& rm -rf /tmp/NuGetScratch
|
||||
|
||||
# Terraform, used to run some testsuites on AWS.
|
||||
RUN TERRAFORM_VERSION=0.12.24 && \
|
||||
wget -O terraform.zip "https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip" && \
|
||||
unzip terraform.zip && \
|
||||
mv terraform /usr/bin && \
|
||||
rm -f terraform.zip && \
|
||||
terraform --version
|
|
@ -1,175 +0,0 @@
|
|||
## The contents of this file are subject to the Mozilla Public License
|
||||
## Version 1.1 (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License
|
||||
## at http://www.mozilla.org/MPL/
|
||||
#
|
||||
## Software distributed under the License is distributed on an "AS IS"
|
||||
## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
|
||||
## the License for the specific language governing rights and
|
||||
## limitations under the License.
|
||||
#
|
||||
## The Original Code is RabbitMQ.
|
||||
#
|
||||
## The Initial Developer of the Original Code is GoPivotal, Inc.
|
||||
## Copyright (c) 2018-2019 Pivotal Software, Inc. All rights reserved.
|
||||
|
||||
FROM buildpack-deps:stretch
|
||||
|
||||
ENV LANG='C.UTF-8'
|
||||
|
||||
# Enable backports.
|
||||
RUN echo 'deb http://httpredir.debian.org/debian stretch-backports main' \
|
||||
>> /etc/apt/sources.list.d/backports.list
|
||||
|
||||
# Prerequisites to mess with packages.
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
wget \
|
||||
debconf-utils \
|
||||
gnupg
|
||||
|
||||
# Our own rabbitmq-erlang repository on Bintray to take Erlang and Elixir.
|
||||
RUN echo 'deb https://dl.bintray.com/rabbitmq-erlang/debian stretch erlang' > /etc/apt/sources.list.d/rabbitmq-erlang.list && \
|
||||
wget -O- https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc | apt-key add -
|
||||
|
||||
# We need to set an APT preference to make sure $ERLANG_VERSION is
|
||||
# used for all erlang* packages. Without this, apt-get(1) would try to
|
||||
# install dependencies using the latest version. This would conflict
|
||||
# with the strict pinning in all packages, and thus fail.
|
||||
RUN ERLANG_VERSION=1:23.1-1 && \
|
||||
echo 'Package: erlang*' > /etc/apt/preferences.d/erlang && \
|
||||
echo "Pin: version $ERLANG_VERSION" >> /etc/apt/preferences.d/erlang && \
|
||||
echo 'Pin-Priority: 1000' >> /etc/apt/preferences.d/erlang
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages to build RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
build-essential \
|
||||
curl \
|
||||
erlang-base-hipe \
|
||||
erlang-nox \
|
||||
erlang-dev \
|
||||
erlang-src \
|
||||
erlang-common-test \
|
||||
erlang-dialyzer \
|
||||
libcurl3-gnutls \
|
||||
man \
|
||||
mandoc \
|
||||
openssh-client \
|
||||
rsync \
|
||||
xmlto \
|
||||
xsltproc \
|
||||
zip \
|
||||
unzip
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
-t stretch-backports \
|
||||
git
|
||||
|
||||
# Verify the version of the installed Erlang packages.
|
||||
# The version was pinned above, but if that specific version is unavailable,
|
||||
# the latest version will be installed, which we don't want.
|
||||
RUN set -xe \
|
||||
&& installed_version=$(dpkg -s erlang-nox | grep -E '^Version:' | awk '{ print $2; }') \
|
||||
&& wanted_version=$(awk '/^Pin:/ { print $3; }' < /etc/apt/preferences.d/erlang) \
|
||||
&& test "$installed_version" = "$wanted_version"
|
||||
|
||||
RUN set -xe \
|
||||
&& REBAR3_VERSION="3.14.1" \
|
||||
&& REBAR3_DOWNLOAD_URL="https://github.com/erlang/rebar3/archive/${REBAR3_VERSION}.tar.gz" \
|
||||
&& REBAR3_DOWNLOAD_SHA256="b01275b6cbdb354dcf9ed686fce2b5f9dfdd58972ded9e970e31b9215a8521f2" \
|
||||
&& mkdir -p /usr/src/rebar3-src \
|
||||
&& curl -fSL -o rebar3-src.tar.gz "$REBAR3_DOWNLOAD_URL" \
|
||||
&& echo "$REBAR3_DOWNLOAD_SHA256 rebar3-src.tar.gz" | sha256sum -c - \
|
||||
&& tar -xzf rebar3-src.tar.gz -C /usr/src/rebar3-src --strip-components=1 \
|
||||
&& rm rebar3-src.tar.gz \
|
||||
&& cd /usr/src/rebar3-src \
|
||||
&& HOME=$PWD ./bootstrap \
|
||||
&& install -v ./rebar3 /usr/local/bin/ \
|
||||
&& rm -rf /usr/src/rebar3-src
|
||||
|
||||
RUN set -xe \
|
||||
&& ELIXIR_VERSION="v1.10.4" \
|
||||
&& ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/releases/download/${ELIXIR_VERSION}/Precompiled.zip" \
|
||||
&& ELIXIR_DOWNLOAD_SHA256="2ec9891ec75a7cbd22396c6e7874b912b526d5a4bfd3c27206eee2a198b250a5" \
|
||||
&& curl -fSL -o elixir-precompiled.zip $ELIXIR_DOWNLOAD_URL \
|
||||
&& echo "$ELIXIR_DOWNLOAD_SHA256 elixir-precompiled.zip" | sha256sum -c - \
|
||||
&& unzip -d /usr/local elixir-precompiled.zip \
|
||||
&& rm elixir-precompiled.zip
|
||||
|
||||
# Put erl_call(1) in the $PATH.
|
||||
RUN ln -fs /usr/lib/erlang/lib/erl_interface-*/bin/erl_call /usr/bin/erl_call
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages used to test RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN echo 'slapd slapd/internal/generated_adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/internal/adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password2 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password1 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/backend select HDB' | debconf-set-selections
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
ldap-utils \
|
||||
netcat \
|
||||
python-dev \
|
||||
python-simplejson \
|
||||
python3 \
|
||||
slapd \
|
||||
daemonize
|
||||
|
||||
RUN git clone --depth 1 https://github.com/bats-core/bats-core.git && \
|
||||
cd bats-core && \
|
||||
./install.sh /usr && \
|
||||
cd .. && \
|
||||
rm -rf bats-core
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
ca-certificates-java \
|
||||
openjdk-8-jre-headless \
|
||||
openjdk-8-jdk-headless
|
||||
|
||||
# Install Java tools separately to be sure it picks the version of
|
||||
# OpenJDK installed above.
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
maven
|
||||
|
||||
# .NET Core 2.0 requirements (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
apt-transport-https
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg && \
|
||||
mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg && \
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/dotnetdev.list'
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
gettext \
|
||||
libunwind8 \
|
||||
dotnet-sdk-5.*
|
||||
|
||||
# .NET Core 2.0 warmup
|
||||
RUN mkdir warmup \
|
||||
&& cd warmup \
|
||||
&& dotnet new console \
|
||||
&& cd .. \
|
||||
&& rm -rf warmup \
|
||||
&& rm -rf /tmp/NuGetScratch
|
||||
|
||||
# Terraform, used to run some testsuites on AWS.
|
||||
RUN TERRAFORM_VERSION=0.12.24 && \
|
||||
wget -O terraform.zip "https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip" && \
|
||||
unzip terraform.zip && \
|
||||
mv terraform /usr/bin && \
|
||||
rm -f terraform.zip && \
|
||||
terraform --version
|
|
@ -1,43 +0,0 @@
|
|||
ARG ERLANG_VERSION
|
||||
|
||||
FROM eu.gcr.io/cf-rabbitmq-core/ci-base:${ERLANG_VERSION}
|
||||
|
||||
ARG GITHUB_RUN_ID
|
||||
ARG BUILDEVENT_APIKEY
|
||||
|
||||
ARG GITHUB_SHA
|
||||
|
||||
ARG base_rmq_ref
|
||||
ARG current_rmq_ref
|
||||
|
||||
ARG RABBITMQ_VERSION
|
||||
|
||||
ENV GITHUB_RUN_ID=${GITHUB_RUN_ID}
|
||||
ENV GITHUB_SHA=${GITHUB_SHA}
|
||||
|
||||
ENV base_rmq_ref=${base_rmq_ref}
|
||||
ENV current_rmq_ref=${current_rmq_ref}
|
||||
|
||||
ENV ERLANG_VERSION=${ERLANG_VERSION}
|
||||
ENV RABBITMQ_VERSION=${RABBITMQ_VERSION}
|
||||
|
||||
ENV BUILDEVENT_CIPROVIDER=GitHubActions
|
||||
|
||||
WORKDIR /workspace/rabbitmq
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV UNPRIVILEGED_USER=rabbitmq
|
||||
RUN useradd \
|
||||
--create-home \
|
||||
--comment 'CI unprivileged user' \
|
||||
${UNPRIVILEGED_USER}
|
||||
|
||||
RUN chown --recursive ${UNPRIVILEGED_USER} /workspace
|
||||
|
||||
USER ${UNPRIVILEGED_USER}
|
||||
|
||||
RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-prepare deps -- \
|
||||
make deps test-deps \
|
||||
RABBITMQ_VERSION=${RABBITMQ_VERSION}
|
|
@ -1,22 +0,0 @@
|
|||
ARG ERLANG_VERSION
|
||||
|
||||
FROM eu.gcr.io/cf-rabbitmq-core/erlang_elixir:${ERLANG_VERSION}
|
||||
|
||||
ENV ERLANG_VERSION=${ERLANG_VERSION}
|
||||
|
||||
RUN apt-get update && apt-get install -y rsync zip
|
||||
|
||||
RUN curl -L -o buildevents https://github.com/honeycombio/buildevents/releases/latest/download/buildevents-linux-amd64
|
||||
RUN chmod 755 buildevents
|
||||
RUN mv buildevents /usr/bin/
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
COPY ci/scripts/fetch_secondary_umbrellas.sh .
|
||||
|
||||
# If we clone the monorepo at a ref when the monorepo was still rabbitmq-server,
|
||||
# then we just get rabbitmq-server (not the monorepo as it would have looked, had
|
||||
# it existed at that time). So for the time being, secondary umbrellas will derive
|
||||
# from rabbitmq-public-umbrella (as they always have)
|
||||
ARG SECONDARY_UMBRELLA_GITREFS
|
||||
RUN bash fetch_secondary_umbrellas.sh ${SECONDARY_UMBRELLA_GITREFS}
|
|
@ -1,19 +0,0 @@
|
|||
ARG IMAGE_TAG
|
||||
|
||||
FROM eu.gcr.io/cf-rabbitmq-core/ci:${IMAGE_TAG}
|
||||
|
||||
ARG BUILDEVENT_APIKEY
|
||||
ARG project
|
||||
|
||||
WORKDIR /workspace/rabbitmq/deps/${project}
|
||||
|
||||
RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} test-build -- \
|
||||
make test-build
|
||||
|
||||
RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} tests -- \
|
||||
make eunit \
|
||||
FULL= \
|
||||
FAIL_FAST=1 \
|
||||
SKIP_AS_ERROR=1
|
|
@ -1,198 +0,0 @@
|
|||
## The contents of this file are subject to the Mozilla Public License
|
||||
## Version 1.1 (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License
|
||||
## at http://www.mozilla.org/MPL/
|
||||
#
|
||||
## Software distributed under the License is distributed on an "AS IS"
|
||||
## basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
|
||||
## the License for the specific language governing rights and
|
||||
## limitations under the License.
|
||||
#
|
||||
## The Original Code is RabbitMQ.
|
||||
#
|
||||
## The Initial Developer of the Original Code is GoPivotal, Inc.
|
||||
## Copyright (c) 2018 Pivotal Software, Inc. All rights reserved.
|
||||
|
||||
FROM buildpack-deps:stretch
|
||||
|
||||
# We'll install the build dependencies for erlang-odbc along with the erlang
|
||||
# build process:
|
||||
ADD erlang-git-master/ /usr/src/erlang/
|
||||
RUN set -xe \
|
||||
&& ERLANG_VERSION=master \
|
||||
&& runtimeDeps='libodbc1 \
|
||||
libsctp1 \
|
||||
libwxgtk3.0-0v5' \
|
||||
&& buildDeps='unixodbc-dev \
|
||||
libsctp-dev \
|
||||
libwxgtk3.0-dev' \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends $runtimeDeps \
|
||||
&& apt-get install -y --no-install-recommends $buildDeps \
|
||||
&& export ERL_TOP="/usr/src/erlang" \
|
||||
&& cd $ERL_TOP \
|
||||
&& ./otp_build autoconf \
|
||||
&& ./configure \
|
||||
--enable-dirty-schedulers \
|
||||
&& make -j$(nproc) \
|
||||
&& make install \
|
||||
&& find /usr/local -name examples | xargs rm -rf \
|
||||
&& apt-get purge -y --auto-remove $buildDeps \
|
||||
&& rm -rf $ERL_TOP /var/lib/apt/lists/* \
|
||||
&& erl -version
|
||||
|
||||
CMD ["erl"]
|
||||
|
||||
# extra useful tools here: rebar & rebar3
|
||||
|
||||
RUN set -xe \
|
||||
&& REBAR_VERSION="2.6.4" \
|
||||
&& REBAR_DOWNLOAD_URL="https://github.com/rebar/rebar/archive/${REBAR_VERSION}.tar.gz" \
|
||||
&& REBAR_DOWNLOAD_SHA256="577246bafa2eb2b2c3f1d0c157408650446884555bf87901508ce71d5cc0bd07" \
|
||||
&& mkdir -p /usr/src/rebar-src \
|
||||
&& curl -fSL -o rebar-src.tar.gz "$REBAR_DOWNLOAD_URL" \
|
||||
&& echo "$REBAR_DOWNLOAD_SHA256 rebar-src.tar.gz" | sha256sum -c - \
|
||||
&& tar -xzf rebar-src.tar.gz -C /usr/src/rebar-src --strip-components=1 \
|
||||
&& rm rebar-src.tar.gz \
|
||||
&& cd /usr/src/rebar-src \
|
||||
&& ./bootstrap \
|
||||
&& install -v ./rebar /usr/local/bin/ \
|
||||
&& rm -rf /usr/src/rebar-src
|
||||
|
||||
RUN set -xe \
|
||||
&& REBAR3_VERSION="3.14.2" \
|
||||
&& REBAR3_DOWNLOAD_URL="https://github.com/erlang/rebar3/archive/${REBAR3_VERSION}.tar.gz" \
|
||||
&& REBAR3_DOWNLOAD_SHA256="19fcdc73debb90d405864f728e188cbc5b61c3939b911e58c0b59bf1619c4810" \
|
||||
&& mkdir -p /usr/src/rebar3-src \
|
||||
&& curl -fSL -o rebar3-src.tar.gz "$REBAR3_DOWNLOAD_URL" \
|
||||
&& echo "$REBAR3_DOWNLOAD_SHA256 rebar3-src.tar.gz" | sha256sum -c - \
|
||||
&& tar -xzf rebar3-src.tar.gz -C /usr/src/rebar3-src --strip-components=1 \
|
||||
&& rm rebar3-src.tar.gz \
|
||||
&& cd /usr/src/rebar3-src \
|
||||
&& HOME=$PWD ./bootstrap \
|
||||
&& install -v ./rebar3 /usr/local/bin/ \
|
||||
&& rm -rf /usr/src/rebar3-src
|
||||
|
||||
ENV LANG='C.UTF-8'
|
||||
|
||||
# Enable backports.
|
||||
RUN echo 'deb http://httpredir.debian.org/debian stretch-backports main' \
|
||||
>> /etc/apt/sources.list.d/backports.list
|
||||
|
||||
# Prerequisites to mess with packages.
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --no-install-recommends \
|
||||
ca-certificates \
|
||||
wget \
|
||||
debconf-utils
|
||||
|
||||
# We need to set an APT preference to make sure $ERLANG_VERSION is
|
||||
# used for all erlang* packages. Without this, apt-get(1) would try to
|
||||
# install dependencies using the latest version. This would conflict
|
||||
# with the strict pinning in all packages, and thus fail.
|
||||
RUN echo 'Package: erlang*' > /etc/apt/preferences.d/erlang && \
|
||||
echo "Pin: version $ERLANG_VERSION" >> /etc/apt/preferences.d/erlang && \
|
||||
echo 'Pin-Priority: 1000' >> /etc/apt/preferences.d/erlang
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages to build RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN apt-get clean && \
|
||||
apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
build-essential \
|
||||
curl \
|
||||
git \
|
||||
libcurl3-gnutls \
|
||||
man \
|
||||
mandoc \
|
||||
openssh-client \
|
||||
rsync \
|
||||
unzip \
|
||||
xmlto \
|
||||
xsltproc \
|
||||
zip
|
||||
|
||||
RUN set -xe \
|
||||
&& ELIXIR_VERSION="v1.10.4" \
|
||||
&& ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/releases/download/${ELIXIR_VERSION}/Precompiled.zip" \
|
||||
&& ELIXIR_DOWNLOAD_SHA256="2ec9891ec75a7cbd22396c6e7874b912b526d5a4bfd3c27206eee2a198b250a5" \
|
||||
&& curl -fSL -o elixir-precompiled.zip $ELIXIR_DOWNLOAD_URL \
|
||||
&& echo "$ELIXIR_DOWNLOAD_SHA256 elixir-precompiled.zip" | sha256sum -c - \
|
||||
&& unzip -d /usr/local elixir-precompiled.zip \
|
||||
&& rm elixir-precompiled.zip
|
||||
|
||||
# Put erl_call(1) in the $PATH.
|
||||
RUN ln -fs /usr/local/lib/erlang/lib/erl_interface-*/bin/erl_call /usr/local/bin/erl_call
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Packages used to test RabbitMQ.
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
RUN echo 'slapd slapd/internal/generated_adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/internal/adminpw password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password2 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/password1 password rabbitmq' | debconf-set-selections && \
|
||||
echo 'slapd slapd/backend select HDB' | debconf-set-selections
|
||||
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
daemonize \
|
||||
ldap-utils \
|
||||
netcat \
|
||||
python-dev \
|
||||
python-simplejson \
|
||||
python3 \
|
||||
slapd
|
||||
|
||||
RUN git clone --depth 1 https://github.com/bats-core/bats-core.git && \
|
||||
cd bats-core && \
|
||||
./install.sh /usr && \
|
||||
cd .. && \
|
||||
rm -rf bats-core
|
||||
|
||||
# We take OpenJDK from the backports. We need to be explicit
|
||||
# on the repository and with ca-certificates-java, otherwise,
|
||||
# ca-certificates-java pulls OpenJDK 7 in.
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
ca-certificates-java \
|
||||
openjdk-8-jre-headless \
|
||||
openjdk-8-jdk-headless
|
||||
|
||||
# Install Java tools separately to be sure it picks the version of
|
||||
# OpenJDK installed above.
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
maven
|
||||
|
||||
# .NET Core 2.0 requirements (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
apt-transport-https
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN curl https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > microsoft.gpg && \
|
||||
mv microsoft.gpg /etc/apt/trusted.gpg.d/microsoft.gpg && \
|
||||
sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/dotnetdev.list'
|
||||
|
||||
# .NET Core 2.0 (https://www.microsoft.com/net/core#linuxdebian).
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -V --fix-missing --no-install-recommends \
|
||||
gettext \
|
||||
libunwind8 \
|
||||
dotnet-sdk-5.*
|
||||
|
||||
# .NET Core 2.0 warmup
|
||||
RUN mkdir warmup \
|
||||
&& cd warmup \
|
||||
&& dotnet new console \
|
||||
&& cd .. \
|
||||
&& rm -rf warmup \
|
||||
&& rm -rf /tmp/NuGetScratch
|
||||
|
||||
# Terraform, used to run some testsuites on AWS.
|
||||
RUN TERRAFORM_VERSION=0.12.24 && \
|
||||
wget -O terraform.zip "https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip" && \
|
||||
unzip terraform.zip && \
|
||||
mv terraform /usr/bin && \
|
||||
rm -f terraform.zip && \
|
||||
terraform --version
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "Recording buildevents step finish for ${project} started at ${STEP_START}..."
|
||||
buildevents step ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} ${STEP_START} ${project}
|
||||
echo "done."
|
|
@ -1,28 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
! test -d ebin || touch ebin/*
|
||||
|
||||
trap 'catch $?' EXIT
|
||||
|
||||
catch() {
|
||||
if [ "$1" != "0" ]; then
|
||||
make ct-logs-archive && mv *-ct-logs-*.tar.xz /workspace/ct-logs/
|
||||
fi
|
||||
}
|
||||
|
||||
CMD=ct-${CT_SUITE}
|
||||
SECONDARY_UMBRELLA_ARGS=""
|
||||
if [[ "${SECONDARY_UMBRELLA_VERSION:-}" != "" ]]; then
|
||||
CMD=ct-${CT_SUITE}-mixed-${SECONDARY_UMBRELLA_VERSION}
|
||||
SECONDARY_UMBRELLA_ARGS="SECONDARY_UMBRELLA=/workspace/rabbitmq-${SECONDARY_UMBRELLA_VERSION} RABBITMQ_FEATURE_FLAGS="
|
||||
fi
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-${project} ${CMD} -- \
|
||||
make ct-${CT_SUITE} \
|
||||
FULL= \
|
||||
FAIL_FAST=1 \
|
||||
SKIP_AS_ERROR=1 ${SECONDARY_UMBRELLA_ARGS}
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-dialyze ${project} -- \
|
||||
make dialyze
|
|
@ -1,45 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
refs="$@"
|
||||
|
||||
for version in ${refs}; do
|
||||
umbrella="umbrellas/$version"
|
||||
if ! test -d "$umbrella" ||
|
||||
! make -C "$umbrella/deps/rabbit" test-dist; then
|
||||
rm -rf "$umbrella"
|
||||
git config --global advice.detachedHead false
|
||||
git clone \
|
||||
https://github.com/rabbitmq/rabbitmq-public-umbrella.git \
|
||||
"$umbrella"
|
||||
# `make co` in the public umbrella will use files from rabbitmq-server
|
||||
# to know what to fetch, and these are now different post monorepo. So,
|
||||
# we must clone rabbitmq-server manually and check out $version before
|
||||
# we run `make co`
|
||||
mkdir -p "$umbrella"/deps
|
||||
git clone \
|
||||
https://github.com/rabbitmq/rabbitmq-server.git \
|
||||
"$umbrella"/deps/rabbit
|
||||
git -C "$umbrella"/deps/rabbit checkout "$version"
|
||||
make -C "$umbrella" co
|
||||
make -C "$umbrella" up BRANCH="$version"
|
||||
# To remove third-party deps which were checked out when the
|
||||
# projects were on the `master` branch. Thus, possibly not the
|
||||
# version pinning we expect. We update the Umbrella one last time
|
||||
# to fetch the correct third-party deps.
|
||||
make -C "$umbrella" clean-3rd-party-repos
|
||||
make -C "$umbrella" up
|
||||
make -C "$umbrella/deps/rabbit" test-dist
|
||||
rm -rf "$umbrella"/deps/rabbitmq_website
|
||||
rm -rf "$umbrella"/deps/rabbitmq_prometheus/docker
|
||||
rm -rf "$umbrella"/deps/*/{.git,test} "$umbrella"/.git
|
||||
fi
|
||||
done
|
||||
|
||||
for version in ${refs}; do
|
||||
umbrella="umbrellas/$version"
|
||||
mv ${umbrella} rabbitmq-${version}
|
||||
done
|
||||
|
||||
rm -fr umbrellas
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
buildevents build ${GITHUB_RUN_ID} ${BUILD_START} ${BUILD_RESULT}
|
|
@ -1,49 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
trap 'catch $?' EXIT
|
||||
|
||||
SPAN_ID=${GITHUB_RUN_ID}-${project}
|
||||
|
||||
catch() {
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} stop-node -- \
|
||||
make stop-node -C ../.. \
|
||||
DEPS_DIR=/workspace/rabbitmq/deps \
|
||||
PLUGINS='rabbitmq_federation rabbitmq_stomp'
|
||||
|
||||
if [ "$1" != "0" ]; then
|
||||
tar -c -f - /tmp/rabbitmq-test-instances/*/log | \
|
||||
xz > /workspace/broker-logs/broker-logs.tar.xz
|
||||
fi
|
||||
|
||||
buildevents step ${GITHUB_RUN_ID} ${SPAN_ID} ${STEP_START} ${project}
|
||||
}
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} make -- \
|
||||
make DEPS_DIR=/workspace/rabbitmq/deps
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} start-background-broker -- \
|
||||
make start-background-broker \
|
||||
-C ../.. \
|
||||
DEPS_DIR=/workspace/rabbitmq/deps \
|
||||
PLUGINS='rabbitmq_federation rabbitmq_stomp'
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} rebar -- \
|
||||
mix local.rebar --force
|
||||
|
||||
# due to https://github.com/elixir-lang/elixir/issues/7699 we
|
||||
# "run" the tests, but skip them all, in order to trigger
|
||||
# compilation of all *_test.exs files before we actually run themq
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} compile-tests -- \
|
||||
make tests \
|
||||
MIX_TEST_OPTS="--exclude test" \
|
||||
DEPS_DIR=/workspace/rabbitmq/deps
|
||||
|
||||
# rabbitmq-diagnostics erlang-cookie-sources reads USER from then env
|
||||
export USER=$(whoami)
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} tests -- \
|
||||
make tests \
|
||||
DEPS_DIR=/workspace/rabbitmq/deps
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
trap 'catch $?' EXIT
|
||||
|
||||
SPAN_ID=${GITHUB_RUN_ID}-${project}
|
||||
|
||||
catch() {
|
||||
if [ "$1" != "0" ]; then
|
||||
make ct-logs-archive && mv *-ct-logs-*.tar.xz /workspace/ct-logs/
|
||||
fi
|
||||
|
||||
buildevents step ${GITHUB_RUN_ID} ${SPAN_ID} ${STEP_START} ${project}
|
||||
}
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} test-build -- \
|
||||
make test-build
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${SPAN_ID} tests -- \
|
||||
make tests \
|
||||
FULL= \
|
||||
FAIL_FAST=1 \
|
||||
SKIP_AS_ERROR=1
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd deps/${project}
|
||||
|
||||
trap 'catch $?' EXIT
|
||||
|
||||
catch() {
|
||||
rm expected_suites.txt actual_suites.txt
|
||||
}
|
||||
|
||||
touch expected_suites.txt
|
||||
for arg in "$@"; do
|
||||
echo "test/${arg}_SUITE.erl" >> expected_suites.txt
|
||||
done
|
||||
sort -o expected_suites.txt expected_suites.txt
|
||||
|
||||
touch actual_suites.txt
|
||||
for f in test/*_SUITE.erl; do
|
||||
echo "$f" >> actual_suites.txt
|
||||
done
|
||||
sort -o actual_suites.txt actual_suites.txt
|
||||
|
||||
set -x
|
||||
diff actual_suites.txt expected_suites.txt
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${GITHUB_RUN_ID}-xref ${project} -- \
|
||||
make xref
|
|
@ -1,8 +1,3 @@
|
|||
YTT ?= /usr/local/bin/ytt
|
||||
|
||||
$(YTT):
|
||||
$(error Please install ytt from https://get-ytt.io/)
|
||||
|
||||
VENDORED_COMPONENTS = rabbit_common \
|
||||
rabbit \
|
||||
amqp_client \
|
||||
|
@ -47,116 +42,6 @@ VENDORED_COMPONENTS = rabbit_common \
|
|||
rabbitmq_web_stomp \
|
||||
rabbitmq_web_stomp_examples
|
||||
|
||||
DEPS_YAML_FILE = workflow_sources/deps.yml
|
||||
|
||||
define dep_yaml_chunk
|
||||
$(eval SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(wildcard deps/$(1)/test/*_SUITE.erl)))))
|
||||
echo "\n- name: $(1)\n suites:$(if $(SUITES),$(foreach suite,$(SUITES),\n - name: $(suite)), [])" >> $(DEPS_YAML_FILE);
|
||||
endef
|
||||
|
||||
$(DEPS_YAML_FILE):
|
||||
@echo "#@data/values\n---\n#@overlay/match missing_ok=True\ndeps:" > $@
|
||||
@$(foreach dep,$(VENDORED_COMPONENTS),$(call dep_yaml_chunk,$(dep)))
|
||||
@cat $@ | git stripspace > $@.fixed && mv $@.fixed $@
|
||||
|
||||
.github/workflows/base-images.yaml: $(YTT) $(wildcard workflow_sources/base_image/*)
|
||||
ytt -f workflow_sources/base_image \
|
||||
-f workflow_sources/base_values.yml \
|
||||
--output-files /tmp
|
||||
cat /tmp/workflow.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> $@
|
||||
|
||||
.github/workflows/test-erlang-otp-%.yaml: \
|
||||
$(YTT) $(DEPS_YAML_FILE) workflow_sources/test-erlang-otp-%.yml $(wildcard workflow_sources/test/*)
|
||||
ytt -f workflow_sources/test \
|
||||
-f workflow_sources/base_values.yml \
|
||||
-f $(DEPS_YAML_FILE) \
|
||||
-f workflow_sources/test-erlang-otp-$*.yml \
|
||||
--output-files /tmp
|
||||
cat /tmp/test-erlang-otp-$*.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> $@
|
||||
|
||||
monorepo-actions: \
|
||||
.github/workflows/base-images.yaml \
|
||||
$(patsubst workflow_sources/%.yml,.github/workflows/%.yaml,$(wildcard workflow_sources/test-erlang-otp-*.yml))
|
||||
|
||||
DOCKER_REPO ?= eu.gcr.io/cf-rabbitmq-core
|
||||
|
||||
.PHONY: erlang-elixir-image-%
|
||||
erlang-elixir-image-%:
|
||||
docker build . \
|
||||
-f ci/dockerfiles/$*/erlang_elixir \
|
||||
-t $(DOCKER_REPO)/erlang_elixir:$*
|
||||
|
||||
.PHONY: ci-base-image-%
|
||||
ci-base-image-%:
|
||||
docker build . \
|
||||
-f ci/dockerfiles/ci-base \
|
||||
-t $(DOCKER_REPO)/ci-base:$* \
|
||||
--build-arg ERLANG_VERSION=$*
|
||||
|
||||
.PHONY: ci-base-images
|
||||
ci-base-images: ci-base-image-23.1
|
||||
|
||||
PUSHES = $(foreach v,$(ERLANG_VERSIONS),push-base-image-$(v))
|
||||
.PHONY: $(PUSHES)
|
||||
$(PUSHES):
|
||||
docker push $(DOCKER_REPO)/ci-base:$(subst push-base-image-,,$@)
|
||||
|
||||
.PHONY: push-base-images
|
||||
push-base-images: $(PUSHES)
|
||||
|
||||
LOCAL_CI_GOALS = $(foreach dep,$(filter-out rabbitmq_cli,$(VENDORED_COMPONENTS)),ci-$(dep))
|
||||
ERLANG_VERSION ?= 23.1
|
||||
SKIP_DIALYZE ?= False
|
||||
|
||||
TAG = erlang-$(ERLANG_VERSION)-rabbitmq-$(shell git rev-parse HEAD)$(shell git diff-index --quiet HEAD -- || echo -dirty)
|
||||
LOCAL_IMAGE = $(DOCKER_REPO)/ci:$(TAG)
|
||||
|
||||
.PHONY: local-ci-image
|
||||
local-ci-image:
|
||||
docker build . \
|
||||
-f ci/dockerfiles/ci \
|
||||
-t $(LOCAL_IMAGE) \
|
||||
--build-arg ERLANG_VERSION=$(ERLANG_VERSION) \
|
||||
--build-arg GITHUB_RUN_ID=none \
|
||||
--build-arg BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
|
||||
--build-arg GITHUB_SHA=$$(git rev-parse HEAD) \
|
||||
--build-arg base_rmq_ref=master \
|
||||
--build-arg current_rmq_ref=$$(git rev-parse --abbrev-ref HEAD) \
|
||||
--build-arg RABBITMQ_VERSION=3.9.0
|
||||
|
||||
.PHONY: $(LOCAL_CI_GOALS)
|
||||
$(LOCAL_CI_GOALS): local-ci-image
|
||||
docker run --rm \
|
||||
--env project=$(subst ci-,,$@) \
|
||||
--env SKIP_DIALYZE=$(SKIP_DIALYZE) \
|
||||
--env GITHUB_RUN_ID=none \
|
||||
--env BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
|
||||
--env STEP_START=$$(date +%s) \
|
||||
--volume /tmp/ct-logs:/ct-logs \
|
||||
--oom-score-adj -500 \
|
||||
$(LOCAL_IMAGE) \
|
||||
/workspace/rabbitmq/ci/scripts/tests.sh
|
||||
|
||||
ci-rabbitmq_cli: local-ci-image
|
||||
docker run --rm \
|
||||
--env project=$(subst ci-,,$@) \
|
||||
--env SKIP_DIALYZE=$(SKIP_DIALYZE) \
|
||||
--env GITHUB_RUN_ID=none \
|
||||
--env BUILDEVENT_APIKEY=$(BUILDEVENT_APIKEY) \
|
||||
--env STEP_START=$$(date +%s) \
|
||||
--volume /tmp/broker-logs:/broker-logs \
|
||||
$(LOCAL_IMAGE) \
|
||||
/workspace/rabbitmq/ci/scripts/rabbitmq_cli.sh
|
||||
|
||||
.PHONY: docker
|
||||
docker: local-ci-image
|
||||
docker run --rm -it \
|
||||
--oom-score-adj -500 \
|
||||
$(LOCAL_IMAGE) \
|
||||
/bin/bash
|
||||
|
||||
.PHONY: distclean-%
|
||||
distclean-%:
|
||||
$(MAKE) -C deps/$* distclean || echo "Failed to distclean $*"
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
# Overview
|
||||
|
||||
These are files used to build GitHub Actions workflows.
|
||||
|
||||
## Build
|
||||
|
||||
To generate the full workflow files in the `.github/` subdirectory:
|
||||
```
|
||||
# Change to base dir of the rabbitmq/rabbitmq-server clone
|
||||
cd ..
|
||||
make monorepo-actions
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
Sometimes when diagnosing a failed test suite, you only wish to run tests for that suite in GitHub Actions. To do so, follow these steps:
|
||||
|
||||
* Check out a new branch if you haven't already.
|
||||
* Remove everything but the suites you wish to run from `worflow_sources/deps.yml`. For instance, the following will only run `deps/rabbit/test/cluster_rename_SUITE.erl`:
|
||||
```
|
||||
#@data/values
|
||||
---
|
||||
#@overlay/match missing_ok=True
|
||||
deps:
|
||||
- name: rabbit
|
||||
test_suites_in_parallel: true
|
||||
suites:
|
||||
- name: cluster_rename
|
||||
time: 284
|
||||
```
|
||||
* Re-generate the workflow definitions:
|
||||
```
|
||||
make monorepo-actions
|
||||
```
|
||||
* Commit and push the changes.
|
|
@ -1,58 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
|
||||
#@ def to_build_args(d):
|
||||
#@ return ",".join(['{0}={1}'.format(k,d[k]) for k in d.keys()])
|
||||
#@ end
|
||||
|
||||
---
|
||||
name: Workflow Base Images
|
||||
#! https://github.com/k14s/ytt/issues/189
|
||||
a_magic_string_that_we_will_sed_to_on: [workflow_dispatch]
|
||||
jobs:
|
||||
ci-base:
|
||||
name: ci-base
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
erlang_version: #@ data.values.erlang_versions
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: eu.gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
- name: CREATE ERLANG+ELIXIR IMAGE (${{ matrix.erlang_version }})
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
file: ci/dockerfiles/${{ matrix.erlang_version }}/erlang_elixir
|
||||
tags: eu.gcr.io/cf-rabbitmq-core/erlang_elixir:${{ matrix.erlang_version }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
- name: CREATE BASE CI IMAGE (${{ matrix.erlang_version }})
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
file: ci/dockerfiles/ci-base
|
||||
#@yaml/text-templated-strings
|
||||
build-args: |
|
||||
ERLANG_VERSION=${{ matrix.erlang_version }}
|
||||
SECONDARY_UMBRELLA_GITREFS=(@= ' '.join(data.values.secondary_umbrella_gitrefs) @)
|
||||
tags: eu.gcr.io/cf-rabbitmq-core/ci-base:${{ matrix.erlang_version }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
|
@ -1,9 +0,0 @@
|
|||
#@data/values
|
||||
---
|
||||
base_rmq_ref: master
|
||||
secondary_umbrella_gitrefs:
|
||||
- v3.7.28
|
||||
- v3.8.9
|
||||
erlang_versions:
|
||||
- "22.3"
|
||||
- "23.1"
|
|
@ -1,393 +0,0 @@
|
|||
#@data/values
|
||||
---
|
||||
#@overlay/match missing_ok=True
|
||||
deps:
|
||||
|
||||
- name: rabbit_common
|
||||
suites:
|
||||
- name: rabbit_env
|
||||
- name: supervisor2
|
||||
- name: unit
|
||||
- name: unit_priority_queue
|
||||
- name: worker_pool
|
||||
|
||||
- name: rabbit
|
||||
#! 2021-01-19 setting 'test_suites_in_parallel' false due to lack of cache space - PJK
|
||||
test_suites_in_parallel: false
|
||||
suites:
|
||||
- name: amqqueue_backward_compatibility
|
||||
- name: backing_queue
|
||||
- name: channel_interceptor
|
||||
- name: channel_operation_timeout
|
||||
- name: cluster
|
||||
- name: cluster_rename
|
||||
- name: clustering_management
|
||||
- name: config_schema
|
||||
- name: confirms_rejects
|
||||
- name: consumer_timeout
|
||||
- name: crashing_queues
|
||||
- name: dead_lettering
|
||||
- name: definition_import
|
||||
- name: disconnect_detected_during_alarm
|
||||
- name: dynamic_ha
|
||||
- name: dynamic_qq
|
||||
- name: eager_sync
|
||||
- name: feature_flags
|
||||
- name: lazy_queue
|
||||
- name: list_consumers_sanity_check
|
||||
- name: list_queues_online_and_offline
|
||||
- name: maintenance_mode
|
||||
- name: many_node_ha
|
||||
- name: message_size_limit
|
||||
- name: metrics
|
||||
- name: mirrored_supervisor
|
||||
- name: msg_store
|
||||
- name: peer_discovery_classic_config
|
||||
- name: peer_discovery_dns
|
||||
- name: per_user_connection_channel_limit
|
||||
- name: per_user_connection_channel_limit_partitions
|
||||
- name: per_user_connection_channel_tracking
|
||||
- name: per_user_connection_tracking
|
||||
- name: per_vhost_connection_limit
|
||||
- name: per_vhost_connection_limit_partitions
|
||||
- name: per_vhost_msg_store
|
||||
- name: per_vhost_queue_limit
|
||||
- name: policy
|
||||
- name: priority_queue
|
||||
- name: priority_queue_recovery
|
||||
- name: product_info
|
||||
- name: proxy_protocol
|
||||
- name: publisher_confirms_parallel
|
||||
- name: queue_length_limits
|
||||
- name: queue_master_location
|
||||
- name: queue_parallel
|
||||
- name: queue_type
|
||||
- name: quorum_queue
|
||||
- name: rabbit_confirms
|
||||
- name: rabbit_core_metrics_gc
|
||||
- name: rabbit_fifo
|
||||
- name: rabbit_fifo_int
|
||||
- name: rabbit_fifo_prop
|
||||
- name: rabbit_fifo_v0
|
||||
- name: rabbit_msg_record
|
||||
- name: rabbit_stream_queue
|
||||
- name: rabbitmq_queues_cli_integration
|
||||
- name: rabbitmqctl_integration
|
||||
- name: rabbitmqctl_shutdown
|
||||
- name: signal_handling
|
||||
- name: simple_ha
|
||||
- name: single_active_consumer
|
||||
- name: sync_detection
|
||||
- name: term_to_binary_compat_prop
|
||||
- name: topic_permission
|
||||
- name: unit_access_control
|
||||
- name: unit_access_control_authn_authz_context_propagation
|
||||
- name: unit_access_control_credential_validation
|
||||
- name: unit_amqp091_content_framing
|
||||
- name: unit_amqp091_server_properties
|
||||
- name: unit_app_management
|
||||
- name: unit_cluster_formation_locking_mocks
|
||||
- name: unit_collections
|
||||
- name: unit_config_value_encryption
|
||||
- name: unit_connection_tracking
|
||||
- name: unit_credit_flow
|
||||
- name: unit_disk_monitor
|
||||
- name: unit_disk_monitor_mocks
|
||||
- name: unit_file_handle_cache
|
||||
- name: unit_gen_server2
|
||||
- name: unit_gm
|
||||
- name: unit_log_config
|
||||
- name: unit_log_management
|
||||
- name: unit_operator_policy
|
||||
- name: unit_pg_local
|
||||
- name: unit_plugin_directories
|
||||
- name: unit_plugin_versioning
|
||||
- name: unit_policy_validators
|
||||
- name: unit_priority_queue
|
||||
- name: unit_queue_consumers
|
||||
- name: unit_stats_and_metrics
|
||||
- name: unit_supervisor2
|
||||
- name: unit_vm_memory_monitor
|
||||
- name: upgrade_preparation
|
||||
- name: vhost
|
||||
|
||||
- name: amqp_client
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: system
|
||||
- name: unit
|
||||
|
||||
- name: amqp10_client
|
||||
suites:
|
||||
- name: msg
|
||||
- name: system
|
||||
|
||||
- name: amqp10_common
|
||||
suites:
|
||||
- name: binary_generator
|
||||
|
||||
- name: rabbitmq_amqp1_0
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: amqp10_client
|
||||
- name: command
|
||||
- name: proxy_protocol
|
||||
- name: system
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_auth_backend_cache
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: rabbit_auth_backend_cache
|
||||
- name: rabbit_auth_cache
|
||||
|
||||
- name: rabbitmq_auth_backend_http
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: auth
|
||||
- name: config_schema
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_auth_backend_ldap
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: system
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_auth_backend_oauth2
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: add_uaa_key_command
|
||||
- name: scope
|
||||
- name: system
|
||||
- name: unit
|
||||
- name: wildcard_match
|
||||
|
||||
- name: rabbitmq_auth_mechanism_ssl
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_aws
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_cli
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_codegen
|
||||
skip_xref: true
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_consistent_hash_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_exchange_type_consistent_hash
|
||||
|
||||
- name: rabbitmq_event_exchange
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: system
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_federation
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: exchange
|
||||
- name: federation_status_command
|
||||
- name: queue
|
||||
- name: rabbit_federation_status
|
||||
- name: restart_federation_link_command
|
||||
- name: unit
|
||||
- name: unit_inbroker
|
||||
|
||||
- name: rabbitmq_federation_management
|
||||
suites:
|
||||
- name: federation_mgmt
|
||||
|
||||
- name: rabbitmq_jms_topic_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rjms_topic_selector
|
||||
- name: rjms_topic_selector_unit
|
||||
- name: sjx_evaluation
|
||||
|
||||
- name: rabbitmq_management
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: cache
|
||||
- name: clustering
|
||||
- name: clustering_prop
|
||||
- name: config_schema
|
||||
- name: listener_config
|
||||
- name: rabbit_mgmt_http
|
||||
- name: rabbit_mgmt_http_health_checks
|
||||
- name: rabbit_mgmt_only_http
|
||||
- name: rabbit_mgmt_rabbitmqadmin
|
||||
- name: rabbit_mgmt_stats
|
||||
- name: rabbit_mgmt_test_db
|
||||
- name: rabbit_mgmt_test_unit
|
||||
- name: stats
|
||||
|
||||
- name: rabbitmq_management_agent
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: exometer_slide
|
||||
- name: metrics
|
||||
- name: rabbit_mgmt_gc
|
||||
- name: rabbit_mgmt_slide
|
||||
|
||||
- name: rabbitmq_mqtt
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: auth
|
||||
- name: cluster
|
||||
- name: command
|
||||
- name: config_schema
|
||||
- name: java
|
||||
- name: mqtt_machine
|
||||
- name: processor
|
||||
- name: proxy_protocol
|
||||
- name: reader
|
||||
- name: retainer
|
||||
- name: util
|
||||
|
||||
- name: rabbitmq_peer_discovery_common
|
||||
suites:
|
||||
- name: config_schema
|
||||
|
||||
- name: rabbitmq_peer_discovery_aws
|
||||
skip_dialyzer: true
|
||||
use_terraform: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: integration
|
||||
- name: rabbitmq_peer_discovery_aws
|
||||
|
||||
- name: rabbitmq_peer_discovery_k8s
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: rabbitmq_peer_discovery_k8s
|
||||
|
||||
- name: rabbitmq_peer_discovery_consul
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: rabbitmq_peer_discovery_consul
|
||||
|
||||
- name: rabbitmq_peer_discovery_etcd
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: system
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_prometheus
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: rabbit_prometheus_http
|
||||
|
||||
- name: rabbitmq_random_exchange
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_recent_history_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: system
|
||||
|
||||
- name: rabbitmq_sharding
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_shovel
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: amqp10
|
||||
- name: amqp10_dynamic
|
||||
- name: amqp10_shovel
|
||||
- name: config
|
||||
- name: configuration
|
||||
- name: delete_shovel_command
|
||||
- name: dynamic
|
||||
- name: parameters
|
||||
- name: shovel_status_command
|
||||
|
||||
- name: rabbitmq_shovel_management
|
||||
suites:
|
||||
- name: http
|
||||
|
||||
- name: rabbitmq_stomp
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: amqqueue
|
||||
- name: command
|
||||
- name: config_schema
|
||||
- name: connections
|
||||
- name: frame
|
||||
- name: proxy_protocol
|
||||
- name: python
|
||||
- name: topic
|
||||
- name: util
|
||||
|
||||
- name: rabbitmq_stream
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: command
|
||||
- name: config_schema
|
||||
- name: rabbit_stream
|
||||
|
||||
- name: rabbitmq_stream_management
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: http
|
||||
|
||||
- name: rabbitmq_stream_prometheus
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: prometheus_http
|
||||
|
||||
- name: rabbitmq_top
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_tracing
|
||||
suites:
|
||||
- name: rabbit_tracing
|
||||
|
||||
- name: rabbitmq_trust_store
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: system
|
||||
|
||||
- name: rabbitmq_web_dispatch
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_web_dispatch
|
||||
- name: rabbit_web_dispatch_unit
|
||||
|
||||
- name: rabbitmq_web_mqtt
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: config_schema
|
||||
- name: proxy_protocol
|
||||
|
||||
- name: rabbitmq_web_mqtt_examples
|
||||
suites: []
|
||||
|
||||
- name: rabbitmq_web_stomp
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: amqp_stomp
|
||||
- name: config_schema
|
||||
- name: cowboy_websocket
|
||||
- name: proxy_protocol
|
||||
- name: raw_websocket
|
||||
- name: unit
|
||||
|
||||
- name: rabbitmq_web_stomp_examples
|
||||
suites: []
|
|
@ -1,18 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("@ytt:template", "template")
|
||||
#@ load("util.star", "merge")
|
||||
#@ load("prepare.lib.yml", "prepare_jobs")
|
||||
#@ load("dep.star", "dep_jobs")
|
||||
#@ load("finish.lib.yml", "finish_jobs")
|
||||
|
||||
#@ erlang_version = data.values.erlang_versions[-1]
|
||||
|
||||
#@ prepare = prepare_jobs(erlang_version=erlang_version, build_base_image=False)
|
||||
#@ deps = [dep_jobs(dep, erlang_version=erlang_version) for dep in data.values.deps]
|
||||
#@ finish = finish_jobs([prepare[k]['name'] for k in prepare], erlang_version=erlang_version)
|
||||
|
||||
---
|
||||
name: #@ "Test - Erlang " + erlang_version
|
||||
#! https://github.com/k14s/ytt/issues/189
|
||||
a_magic_string_that_we_will_sed_to_on: push
|
||||
jobs: #@ merge([prepare] + deps + [finish])
|
|
@ -1,21 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("@ytt:template", "template")
|
||||
#@ load("util.star", "merge")
|
||||
#@ load("prepare.lib.yml", "prepare_jobs")
|
||||
#@ load("dep.star", "dep_jobs")
|
||||
#@ load("finish.lib.yml", "finish_jobs")
|
||||
|
||||
#@ erlang_version = "git"
|
||||
|
||||
#@ prepare = prepare_jobs(erlang_version=erlang_version, build_base_image=True)
|
||||
#@ deps = [dep_jobs(dep, erlang_version=erlang_version) for dep in data.values.deps]
|
||||
#@ finish = finish_jobs([prepare[k]['name'] for k in prepare], erlang_version=erlang_version)
|
||||
|
||||
---
|
||||
name: Test - Erlang git master
|
||||
#! https://github.com/k14s/ytt/issues/189
|
||||
a_magic_string_that_we_will_sed_to_on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *'
|
||||
workflow_dispatch:
|
||||
jobs: #@ merge([prepare] + deps + [finish])
|
|
@ -1,144 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("@ytt:assert", "assert")
|
||||
#@ load("util.star", "is_unique", "to_build_args")
|
||||
#@ load("helpers.star", "ci_image_tag", "ci_dep_image", "skip_ci_condition")
|
||||
|
||||
#@ def checks_job(dep, erlang_version=None):
|
||||
name: #@ dep.name + "-checks"
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
outputs:
|
||||
step_start: ${{ steps.buildevents.outputs.step_start }}
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD STEP START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: VALIDATE KNOWN CT SUITES
|
||||
env:
|
||||
project: #@ dep.name
|
||||
#@ suite_names = [suite.name for suite in dep.suites]
|
||||
#@ None if is_unique(suite_names) else assert.fail('{} suite names are not unique'.format(dep.name))
|
||||
run: |
|
||||
ci/scripts/validate-workflow.sh (@= " ".join(suite_names) @)
|
||||
- name: FETCH ci DOCKER IMAGE CACHE
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version)
|
||||
path: ci.tar
|
||||
- name: LOAD ci DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci.tar
|
||||
- name: RUN CHECKS
|
||||
run: |
|
||||
docker build . \
|
||||
--file ci/dockerfiles/ci-dep \
|
||||
--build-arg IMAGE_TAG=(@= ci_image_tag(erlang_version) @) \
|
||||
--build-arg BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--build-arg project=(@= dep.name @) \
|
||||
--tag eu.gcr.io/cf-rabbitmq-core/ci-(@= dep.name @):(@= ci_image_tag(erlang_version) @)
|
||||
- name: FETCH ci-(@= dep.name @) DOCKER IMAGE CACHE
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version) + "+" + dep.name
|
||||
path: ci-(@= dep.name @).tar
|
||||
- name: SAVE CI DOCKER IMAGE IN CACHE
|
||||
run: |
|
||||
docker save --output ci-(@= dep.name @).tar (@= ci_dep_image(erlang_version, dep.name) @)
|
||||
#@ end
|
||||
|
||||
#@ def ct_suites_job(dep, suite_name, erlang_version=None):
|
||||
name: #@ dep.name + "-ct-" + suite_name
|
||||
needs:
|
||||
- prepare
|
||||
- #@ dep.name + "-checks"
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version) + "+" + dep.name
|
||||
path: ci-(@= dep.name @).tar
|
||||
- name: LOAD CI DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci-(@= dep.name @).tar
|
||||
- name: #@ "RUN ct-" + suite_name
|
||||
id: tests
|
||||
run: |
|
||||
mkdir ct-(@= suite_name @)-logs && chmod 777 ct-(@= suite_name @)-logs
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env CT_SUITE=(@= suite_name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--volume ${PWD}/ct-(@= suite_name @)-logs:/workspace/ct-logs \
|
||||
(@= ci_dep_image(erlang_version, dep.name) @) \
|
||||
/workspace/rabbitmq/ci/scripts/ct-suite.sh
|
||||
- name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
|
||||
#! https://github.com/marketplace/actions/upload-artifact
|
||||
uses: actions/upload-artifact@v2-preview
|
||||
if: failure() && steps.tests.outcome == 'failure'
|
||||
with:
|
||||
name: (@= dep.name @)-ct-(@= suite_name @)-logs
|
||||
path: "ct-(@= suite_name @)-logs/*-ct-logs-*.tar.xz"
|
||||
#@ if erlang_version == data.values.erlang_versions[0]:
|
||||
#@ for version in data.values.secondary_umbrella_gitrefs:
|
||||
#@ logs_dir = 'ct-{}-logs-{}'.format(suite_name, version)
|
||||
#@ step_id = "tests-{}".format(version.replace(".","_"))
|
||||
- name: #@ "RUN ct-{} [mixed {}]".format(suite_name, version)
|
||||
id: #@ step_id
|
||||
run: |
|
||||
mkdir (@= logs_dir @) && chmod 777 (@= logs_dir @)
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env CT_SUITE=(@= suite_name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env SECONDARY_UMBRELLA_VERSION=(@= version @) \
|
||||
--volume ${PWD}/(@= logs_dir @):/workspace/ct-logs \
|
||||
(@= ci_dep_image(erlang_version, dep.name) @) \
|
||||
/workspace/rabbitmq/ci/scripts/ct-suite.sh
|
||||
- name: #@ 'ON FAILURE UPLOAD TESTS LOGS ARTIFACT [mixed {}]'.format(version)
|
||||
#! https://github.com/marketplace/actions/upload-artifact
|
||||
uses: actions/upload-artifact@v2-preview
|
||||
#! For whatever reason, "if: steps.(@= suite @).outcome == 'failure'" never executes,
|
||||
#! so just run always
|
||||
if: failure() && steps.(@= step_id @).outcome == 'failure'
|
||||
with:
|
||||
name: (@= dep.name @)-ct-(@= suite_name @)-logs-mixed-(@= version @)
|
||||
path: "(@= logs_dir @)/*-ct-logs-*.tar.xz"
|
||||
#@ end
|
||||
#@ end
|
||||
#@ end
|
||||
|
||||
#@ def collect_job(dep, erlang_version=None):
|
||||
name: #@ dep.name
|
||||
needs: #@ [dep.name + "-checks"] + [dep.name + "-ct-" + suite.name for suite in dep.suites]
|
||||
runs-on: ubuntu-18.04
|
||||
#! See https://docs.github.com/en/free-pro-team@latest/actions/reference/context-and-expression-syntax-for-github-actions#job-status-check-functions
|
||||
#! as to why '(success() || failure())' is needed
|
||||
if: #@ skip_ci_condition() + " && (success() || failure())"
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version) + "+" + dep.name
|
||||
path: ci-(@= dep.name @).tar
|
||||
- name: LOAD CI DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci-(@= dep.name @).tar
|
||||
- name: RECORD STEP FINISH
|
||||
run: |
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ needs.(@= dep.name @)-checks.outputs.step_start }} \
|
||||
(@= ci_dep_image(erlang_version, dep.name) @) \
|
||||
/workspace/rabbitmq/ci/scripts/collect.sh
|
||||
#@ end
|
|
@ -1,21 +0,0 @@
|
|||
load("rabbitmq_cli.lib.yml", "rabbitmq_cli_job")
|
||||
load("ct.lib.yml", "checks_job", "ct_suites_job", "collect_job")
|
||||
load("tests.lib.yml", "tests_job")
|
||||
|
||||
def dep_jobs(dep, erlang_version=None):
|
||||
jobs = {}
|
||||
if not getattr(dep, "skip_tests", False):
|
||||
if dep.name == "rabbitmq_cli":
|
||||
jobs[dep.name] = rabbitmq_cli_job(dep, erlang_version=erlang_version)
|
||||
elif getattr(dep, "test_suites_in_parallel", False):
|
||||
jobs[dep.name + "-checks"] = checks_job(dep, erlang_version=erlang_version)
|
||||
for suite in dep.suites:
|
||||
jobs[dep.name + "-ct-" + suite.name] = ct_suites_job(dep, suite.name, erlang_version=erlang_version)
|
||||
end
|
||||
jobs[dep.name] = collect_job(dep, erlang_version=erlang_version)
|
||||
else:
|
||||
jobs[dep.name] = tests_job(dep, erlang_version=erlang_version)
|
||||
end
|
||||
end
|
||||
return jobs
|
||||
end
|
|
@ -1,47 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag", "skip_ci_condition")
|
||||
|
||||
#@ def gcs_path():
|
||||
#@ c = ['monorepo_github_actions_conclusions']
|
||||
#@ c.append('${{ github.sha }}')
|
||||
#@ c.append('${{ github.workflow }}')
|
||||
#@ return '/'.join(c)
|
||||
#@ end
|
||||
|
||||
#@ def finish_jobs(prepare_jobs_names, erlang_version=None):
|
||||
finish:
|
||||
name: finish
|
||||
needs: #@ prepare_jobs_names + [dep.name for dep in data.values.deps if not getattr(dep, "skip_tests", False)]
|
||||
runs-on: ubuntu-18.04
|
||||
#! See https://docs.github.com/en/free-pro-team@latest/actions/reference/context-and-expression-syntax-for-github-actions#job-status-check-functions
|
||||
#! as to why '(success() || failure())' is needed
|
||||
if: #@ skip_ci_condition() + " && (success() || failure())"
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- uses: technote-space/workflow-conclusion-action@v1
|
||||
- uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
service_account_key: ${{ secrets.GCR_JSON_KEY }}
|
||||
export_default_credentials: true
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version)
|
||||
path: ci.tar
|
||||
- name: LOAD CI DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci.tar
|
||||
- name: RECORD BUILD FINISH
|
||||
run: |
|
||||
echo -n "${{ env.WORKFLOW_CONCLUSION }}" > conclusion
|
||||
|
||||
gsutil cp conclusion \
|
||||
'gs://(@= gcs_path() @)'
|
||||
|
||||
docker run \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env BUILD_START=${{ needs.prepare.outputs.build_start }} \
|
||||
--env BUILD_RESULT=${{ env.WORKFLOW_CONCLUSION }} \
|
||||
(@= ci_image(erlang_version) @) \
|
||||
ci/scripts/finish.sh
|
||||
#@ end
|
|
@ -1,17 +0,0 @@
|
|||
load("@ytt:data", "data")
|
||||
|
||||
def ci_image_tag(erlang_version):
|
||||
return "erlang-" + erlang_version + "-rabbitmq-${{ github.sha }}"
|
||||
end
|
||||
|
||||
def ci_image(erlang_version):
|
||||
return "eu.gcr.io/cf-rabbitmq-core/ci:" + ci_image_tag(erlang_version)
|
||||
end
|
||||
|
||||
def ci_dep_image(erlang_version, dep_name):
|
||||
return "eu.gcr.io/cf-rabbitmq-core/ci-" + dep_name + ":" + ci_image_tag(erlang_version)
|
||||
end
|
||||
|
||||
def skip_ci_condition():
|
||||
return "!contains(github.event.head_commit.message, '[ci skip]')"
|
||||
end
|
|
@ -1,214 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("util.star", "to_build_args")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag", "skip_ci_condition")
|
||||
|
||||
#@ def prepare_jobs(erlang_version=None, build_base_image=False):
|
||||
prepare:
|
||||
name: prepare
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
outputs:
|
||||
build_start: ${{ steps.buildevents.outputs.build_start }}
|
||||
branch_or_tag_name: ${{ steps.buildevents.outputs.branch_or_tag_name }}
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD BUILD START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=build_start::$(date +%s)"
|
||||
branch_or_tag_name=${GITHUB_REF#refs/*/}
|
||||
echo "::set-output name=branch_or_tag_name::$branch_or_tag_name"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: eu.gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
#@ if build_base_image:
|
||||
- name: CHECKOUT ERLANG/OTP MASTER
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: erlang/otp
|
||||
path: erlang-git-master
|
||||
- name: DETERMINE ERLANG SHA
|
||||
id: erlang_sha
|
||||
run: |
|
||||
cd erlang-git-master
|
||||
erlang_sha=$(git rev-parse HEAD)
|
||||
echo "::set-output name=erlang_sha::$erlang_sha"
|
||||
- name: #@ 'CREATE ERLANG+ELIXIR IMAGE ({})'.format(erlang_version)
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
file: #@ 'ci/dockerfiles/{}/erlang_elixir'.format(erlang_version)
|
||||
tags: #@ 'eu.gcr.io/cf-rabbitmq-core/erlang_elixir:{}'.format(erlang_version)
|
||||
build-args: |
|
||||
ERLANG_VERSION=${{ steps.erlang_sha.outputs.erlang_sha }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
- name: #@ 'CREATE BASE CI IMAGE ({})'.format(erlang_version)
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
file: ci/dockerfiles/ci-base
|
||||
tags: #@ 'eu.gcr.io/cf-rabbitmq-core/ci-base:{}'.format(erlang_version)
|
||||
build-args: |
|
||||
ERLANG_VERSION=(@= erlang_version @)
|
||||
SECONDARY_UMBRELLA_GITREFS=(@= ' '.join(data.values.secondary_umbrella_gitrefs) @)
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
- name: CLEANUP ERLANG/OTP MASTER
|
||||
run: |
|
||||
rm -rf erlang-git-master
|
||||
#@ end
|
||||
- name: PREPARE BUILD IMAGE
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
load: true
|
||||
file: ci/dockerfiles/ci
|
||||
tags: #@ ci_image(erlang_version)
|
||||
#@ rabbitmq_version = data.values.base_rmq_ref.replace('master', '3.9.x').replace('.x', '.0')
|
||||
build-args: |
|
||||
ERLANG_VERSION=(@= erlang_version @)
|
||||
GITHUB_RUN_ID=${{ github.run_id }}
|
||||
BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }}
|
||||
GITHUB_SHA=${{ github.sha }}
|
||||
base_rmq_ref=(@= data.values.base_rmq_ref @)
|
||||
current_rmq_ref=${{ steps.buildevents.outputs.branch_or_tag_name }}
|
||||
RABBITMQ_VERSION=(@= rabbitmq_version @)
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version)
|
||||
path: ci.tar
|
||||
- name: SAVE CI DOCKER IMAGE IN CACHE
|
||||
run: |
|
||||
docker save --output ci.tar (@= ci_image(erlang_version) @)
|
||||
- uses: google-github-actions/setup-gcloud@master
|
||||
with:
|
||||
service_account_key: ${{ secrets.GCR_JSON_KEY }}
|
||||
export_default_credentials: true
|
||||
- name: RECORD STEP FINISH
|
||||
run: |
|
||||
docker run \
|
||||
--env project=prepare \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.build_start }} \
|
||||
(@= ci_image(erlang_version) @) \
|
||||
ci/scripts/collect.sh
|
||||
xref:
|
||||
name: xref
|
||||
#@ if build_base_image:
|
||||
needs: [prepare]
|
||||
#@ end
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD XREF START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: eu.gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
- name: PULL IMAGE
|
||||
#@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + erlang_version
|
||||
run: |
|
||||
docker pull (@= base_image @)
|
||||
#@ for dep in [d for d in data.values.deps if not getattr(d, "skip_xref", False)]:
|
||||
- name: RUN XREF (@= dep.name @)
|
||||
run: |
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--volume ${PWD}:/workspace/rabbitmq \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/xref.sh
|
||||
#@ end
|
||||
- name: RECORD STEP FINISH
|
||||
if: always()
|
||||
run: |
|
||||
docker run \
|
||||
--env project=xref \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
--volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/collect.sh
|
||||
|
||||
#@ if/end erlang_version == data.values.erlang_versions[-1]:
|
||||
dialyze:
|
||||
name: dialyze
|
||||
#@ if build_base_image:
|
||||
needs: [prepare]
|
||||
#@ end
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD DIALYZE START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: eu.gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
- name: PULL IMAGE
|
||||
#@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + erlang_version
|
||||
run: |
|
||||
docker pull (@= base_image @)
|
||||
#@ for dep in [d for d in data.values.deps if not getattr(d, "skip_dialyzer", False)]:
|
||||
- name: RUN DIALYZE (@= dep.name @)
|
||||
run: |
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--volume ${PWD}:/workspace/rabbitmq \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/dialyze.sh
|
||||
#@ end
|
||||
- name: RECORD STEP FINISH
|
||||
if: always()
|
||||
run: |
|
||||
docker run \
|
||||
--env project=dialyze \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
--volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/collect.sh
|
||||
#@ end
|
|
@ -1,41 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag", "skip_ci_condition")
|
||||
|
||||
#@ def rabbitmq_cli_job(dep, erlang_version=None):
|
||||
name: #@ dep.name
|
||||
needs:
|
||||
- prepare
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD STEP START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version)
|
||||
path: ci.tar
|
||||
- name: LOAD CI DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci.tar
|
||||
- name: RUN TESTS
|
||||
run: |
|
||||
mkdir broker-logs && chmod 777 broker-logs
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
--volume ${PWD}/broker-logs:/workspace/broker-logs \
|
||||
(@= ci_image(erlang_version) @) \
|
||||
ci/scripts/rabbitmq_cli.sh
|
||||
- name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
|
||||
#! https://github.com/marketplace/actions/upload-artifact
|
||||
uses: actions/upload-artifact@v2-preview
|
||||
if: failure()
|
||||
with:
|
||||
name: (@= dep.name @)-broker-logs
|
||||
path: "broker-logs/broker-logs.tar.xz"
|
||||
#@ end
|
|
@ -1,64 +0,0 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("util.star", "to_build_args")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag", "skip_ci_condition")
|
||||
|
||||
#@ def tests_job(dep, erlang_version=None):
|
||||
name: #@ dep.name
|
||||
needs: [prepare]
|
||||
runs-on: ubuntu-18.04
|
||||
if: #@ skip_ci_condition()
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD STEP START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
key: #@ ci_image_tag(erlang_version)
|
||||
path: ci.tar
|
||||
- name: LOAD CI DOCKER IMAGE FROM CACHE
|
||||
run: |
|
||||
docker load --input ci.tar
|
||||
#@ if getattr(dep, 'use_terraform', False):
|
||||
- name: PREPARE TERRAFORM SECRETS
|
||||
run: |
|
||||
mkdir terraform && chmod 777 terraform
|
||||
echo "${{ secrets.TERRAFORM_SSH_KEY }}" > terraform/id_rsa_terraform
|
||||
echo "${{ secrets.TERRAFORM_SSH_KEY_PUB }}" > terraform/id_rsa_terraform.pub
|
||||
#@ end
|
||||
- name: RUN TESTS
|
||||
run: |
|
||||
mkdir ct-logs && chmod 777 ct-logs
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
(@= additional_docker_args(dep, erlang_version=erlang_version) @) \
|
||||
--volume ${PWD}/ct-logs:/workspace/ct-logs \
|
||||
--oom-kill-disable \
|
||||
(@= ci_image(erlang_version) @) \
|
||||
ci/scripts/tests.sh
|
||||
- name: ON FAILURE UPLOAD TESTS LOGS ARTIFACT
|
||||
#! https://github.com/marketplace/actions/upload-artifact
|
||||
uses: actions/upload-artifact@v2-preview
|
||||
if: failure()
|
||||
with:
|
||||
name: (@= dep.name @)-ct-logs
|
||||
path: "ct-logs/*-ct-logs-*.tar.xz"
|
||||
#@ end
|
||||
|
||||
#@ def additional_docker_args(dep, erlang_version=None):
|
||||
#@ if getattr(dep, 'use_terraform', False):
|
||||
#@ args = []
|
||||
#@ args.append("--env ERLANG_VERSION={}".format(erlang_version))
|
||||
#@ args.append("--env AWS_ACCESS_KEY_ID=${{ secrets.CONCOURSE_AWS_ACCESS_KEY_ID }}")
|
||||
#@ args.append("--env AWS_SECRET_ACCESS_KEY=${{ secrets.CONCOURSE_AWS_SECRET_ACCESS_KEY }}")
|
||||
#@ args.append("--env SSH_KEY=/workspace/terraform/id_rsa_terraform")
|
||||
#@ args.append("--volume ${PWD}/terraform:/workspace/terraform")
|
||||
#@ return " \\\n ".join(args)
|
||||
#@ else:
|
||||
#@ return ""
|
||||
#@ end
|
||||
#@ end
|
|
@ -1,47 +0,0 @@
|
|||
def is_unique(l):
|
||||
return len(l) == len(set(l))
|
||||
end
|
||||
|
||||
def merge(dicts):
|
||||
r = {}
|
||||
for d in dicts:
|
||||
r.update(**d)
|
||||
end
|
||||
return r
|
||||
end
|
||||
|
||||
def name(suites):
|
||||
if len(suites) == 1:
|
||||
return suites[0].name
|
||||
else:
|
||||
return suites[0].name + "-plus-" + str(len(suites) - 1) + "-more"
|
||||
end
|
||||
end
|
||||
|
||||
def sum(ints):
|
||||
s = 0
|
||||
for i in ints:
|
||||
s += i
|
||||
end
|
||||
return s
|
||||
end
|
||||
|
||||
def partition(target, groups, suites):
|
||||
if len(suites) == 0:
|
||||
return groups
|
||||
end
|
||||
group = []
|
||||
rest = []
|
||||
for suite in sorted(suites, key=lambda suite: suite.time):
|
||||
if sum([suite2.time for suite2 in group]) + suite.time <= target:
|
||||
group.append(suite)
|
||||
else:
|
||||
rest.append(suite)
|
||||
end
|
||||
end
|
||||
return partition(target, groups + [group], rest)
|
||||
end
|
||||
|
||||
def to_build_args(d):
|
||||
return ",".join(['{0}={1}'.format(k,d[k]) for k in d.keys()])
|
||||
end
|
Loading…
Reference in New Issue