Correct use of dialyze in GitHub Actions
Dialyze now runs independently for each component on the uncompiled sources to maximize the amount of issues detectable
This commit is contained in:
parent
934127933b
commit
9fabae11a5
|
@ -17,8 +17,8 @@ jobs:
|
|||
repository: cf-rabbitmq-core/ci-base
|
||||
dockerfile: ci/dockerfiles/22.3/base
|
||||
tags: "22.3"
|
||||
ci-base-23_0:
|
||||
name: ci-base-23_0
|
||||
ci-base-23_1:
|
||||
name: ci-base-23_1
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- name: CHECKOUT REPOSITORY
|
||||
|
@ -30,5 +30,5 @@ jobs:
|
|||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
registry: eu.gcr.io
|
||||
repository: cf-rabbitmq-core/ci-base
|
||||
dockerfile: ci/dockerfiles/23.0/base
|
||||
tags: "23.0"
|
||||
dockerfile: ci/dockerfiles/23.1/base
|
||||
tags: "23.1"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,24 +0,0 @@
|
|||
FROM erlang:21.3
|
||||
|
||||
ENV ERLANG_VERSION 21.3
|
||||
|
||||
# elixir expects utf8.
|
||||
ENV ELIXIR_VERSION="v1.8.2" \
|
||||
LANG=C.UTF-8
|
||||
|
||||
RUN set -xe \
|
||||
&& ELIXIR_DOWNLOAD_URL="https://github.com/elixir-lang/elixir/archive/${ELIXIR_VERSION}.tar.gz" \
|
||||
&& ELIXIR_DOWNLOAD_SHA256="cf9bf0b2d92bc4671431e3fe1d1b0a0e5125f1a942cc4fdf7914b74f04efb835" \
|
||||
&& curl -fSL -o elixir-src.tar.gz $ELIXIR_DOWNLOAD_URL \
|
||||
&& echo "$ELIXIR_DOWNLOAD_SHA256 elixir-src.tar.gz" | sha256sum -c - \
|
||||
&& mkdir -p /usr/local/src/elixir \
|
||||
&& tar -xzC /usr/local/src/elixir --strip-components=1 -f elixir-src.tar.gz \
|
||||
&& rm elixir-src.tar.gz \
|
||||
&& cd /usr/local/src/elixir \
|
||||
&& make install clean
|
||||
|
||||
RUN apt-get update && apt-get install -y rsync
|
||||
|
||||
RUN curl -L -o buildevents https://github.com/honeycombio/buildevents/releases/latest/download/buildevents-linux-amd64
|
||||
RUN chmod 755 buildevents
|
||||
RUN mv buildevents /usr/bin/
|
|
@ -1,6 +1,6 @@
|
|||
FROM erlang:23
|
||||
FROM erlang:23.1
|
||||
|
||||
ENV ERLANG_VERSION 23
|
||||
ENV ERLANG_VERSION 23.1
|
||||
|
||||
# elixir expects utf8.
|
||||
ENV ELIXIR_VERSION="v1.10.4" \
|
|
@ -4,21 +4,9 @@ FROM eu.gcr.io/cf-rabbitmq-core/ci:${IMAGE_TAG}
|
|||
|
||||
ARG BUILDEVENT_APIKEY
|
||||
ARG project
|
||||
ARG SKIP_DIALYZE
|
||||
|
||||
WORKDIR /workspace/rabbitmq/deps/${project}
|
||||
|
||||
RUN if [ "${SKIP_DIALYZE}" != "True" ]; then \
|
||||
BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${project} dialyze -- \
|
||||
make dialyze \
|
||||
FULL=; \
|
||||
fi
|
||||
|
||||
RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${project} xref -- \
|
||||
make xref
|
||||
|
||||
RUN BUILDEVENT_APIKEY=${BUILDEVENT_APIKEY} \
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${project} test-build -- \
|
||||
make test-build
|
||||
|
|
|
@ -2,4 +2,6 @@
|
|||
|
||||
set -euo pipefail
|
||||
|
||||
echo "Recording buildevents step finish for ${project} started at ${STEP_START}..."
|
||||
buildevents step ${GITHUB_RUN_ID} ${project} ${STEP_START} ${project}
|
||||
echo "done."
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd /workspace/rabbitmq/deps/$project
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} dialyze ${project} -- \
|
||||
make dialyze
|
|
@ -14,12 +14,6 @@ catch() {
|
|||
buildevents step ${GITHUB_RUN_ID} ${project} ${STEP_START} ${project}
|
||||
}
|
||||
|
||||
if [ $SKIP_DIALYZE != True ]; then
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${project} dialyze -- \
|
||||
make dialyze \
|
||||
FULL=
|
||||
fi
|
||||
|
||||
buildevents cmd ${GITHUB_RUN_ID} ${project} test-build -- \
|
||||
make test-build
|
||||
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd deps/${project}
|
||||
|
||||
trap 'catch $?' EXIT
|
||||
|
||||
catch() {
|
||||
rm expected_suites.txt actual_suites.txt
|
||||
}
|
||||
|
||||
touch expected_suites.txt
|
||||
for arg in "$@"; do
|
||||
echo "test/${arg}_SUITE.erl" >> expected_suites.txt
|
||||
done
|
||||
sort -o expected_suites.txt expected_suites.txt
|
||||
|
||||
touch actual_suites.txt
|
||||
for f in test/*_SUITE.erl; do
|
||||
echo "$f" >> actual_suites.txt
|
||||
done
|
||||
sort -o actual_suites.txt actual_suites.txt
|
||||
|
||||
set -x
|
||||
diff actual_suites.txt expected_suites.txt
|
|
@ -46,7 +46,7 @@ VENDORED_COMPONENTS = rabbit_common \
|
|||
rabbitmq_web_stomp \
|
||||
rabbitmq_web_stomp_examples
|
||||
|
||||
DEPS_YAML_FILE = workflow_sources/test/deps.yml
|
||||
DEPS_YAML_FILE = workflow_sources/deps.yml
|
||||
|
||||
define dep_yaml_chunk
|
||||
$(eval SUITES := $(sort $(subst _SUITE.erl,,$(notdir $(wildcard deps/$(1)/test/*_SUITE.erl)))))
|
||||
|
@ -58,28 +58,35 @@ $(DEPS_YAML_FILE):
|
|||
@$(foreach dep,$(VENDORED_COMPONENTS),$(call dep_yaml_chunk,$(dep)))
|
||||
@cat $@ | git stripspace > $@.fixed && mv $@.fixed $@
|
||||
|
||||
.PHONY: monorepo-actions
|
||||
monorepo-actions: $(YTT) $(DEPS_YAML_FILE)
|
||||
ERLANG_VERSIONS = 22.3 23.1
|
||||
ERLANG_VERSIONS_YAML1 = [$(foreach v,$(ERLANG_VERSIONS),,"$(v)")]
|
||||
UNFIXED := [,"
|
||||
ERLANG_VERSIONS_YAML2 = $(subst $(UNFIXED),[",$(ERLANG_VERSIONS_YAML1))
|
||||
|
||||
.github/workflows/base-images.yaml: $(YTT) $(wildcard workflow_sources/base_image/*)
|
||||
ytt -f workflow_sources/base_image \
|
||||
| sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> .github/workflows/base-images.yaml
|
||||
-f workflow_sources/base_values.yml \
|
||||
--data-value-yaml erlang_versions='$(ERLANG_VERSIONS_YAML2)' \
|
||||
--output-files /tmp
|
||||
cat /tmp/workflow.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> $@
|
||||
|
||||
.github/workflows/test-erlang-otp-%.yaml: $(YTT) $(DEPS_YAML_FILE) $(wildcard workflow_sources/test/*)
|
||||
ytt -f workflow_sources/test \
|
||||
--data-value versions.erlang=23.0 \
|
||||
--data-value versions.elixir=1.8.0 \
|
||||
| sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> .github/workflows/test-erlang-otp-23.0.yaml
|
||||
-f workflow_sources/base_values.yml \
|
||||
-f $(DEPS_YAML_FILE) \
|
||||
--data-value-yaml erlang_versions='$(ERLANG_VERSIONS_YAML2)' \
|
||||
--data-value erlang_version=$* \
|
||||
--output-files /tmp
|
||||
cat /tmp/workflow.yml | sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> $@
|
||||
|
||||
ytt -f workflow_sources/test \
|
||||
--data-value versions.erlang=22.3 \
|
||||
--data-value versions.elixir=1.10.3 \
|
||||
| sed s/a_magic_string_that_we_will_sed_to_on/on/ \
|
||||
> .github/workflows/test-erlang-otp-22.3.yaml
|
||||
monorepo-actions: \
|
||||
.github/workflows/base-images.yaml \
|
||||
$(foreach v,$(ERLANG_VERSIONS), .github/workflows/test-erlang-otp-$(v).yaml)
|
||||
|
||||
DOCKER_REPO ?= eu.gcr.io/cf-rabbitmq-core
|
||||
|
||||
ERLANG_VERSIONS = 23.0 22.3
|
||||
|
||||
CI_BASE_IMAGES = $(foreach v,$(ERLANG_VERSIONS),ci-base-image-$(v))
|
||||
.PHONY: $(CI_BASE_IMAGES)
|
||||
$(CI_BASE_IMAGES):
|
||||
|
@ -99,7 +106,7 @@ $(PUSHES):
|
|||
push-base-images: $(PUSHES)
|
||||
|
||||
LOCAL_CI_GOALS = $(foreach dep,$(filter-out rabbitmq_cli,$(VENDORED_COMPONENTS)),ci-$(dep))
|
||||
ERLANG_VERSION ?= 23.0
|
||||
ERLANG_VERSION ?= 23.1
|
||||
SKIP_DIALYZE ?= False
|
||||
|
||||
TAG = erlang-$(ERLANG_VERSION)-rabbitmq-$(shell git rev-parse HEAD)$(shell git diff-index --quiet HEAD -- || echo -dirty)
|
||||
|
@ -148,3 +155,14 @@ docker: local-ci-image
|
|||
--oom-score-adj -500 \
|
||||
$(LOCAL_IMAGE) \
|
||||
/bin/bash
|
||||
|
||||
# A literal space.
|
||||
space :=
|
||||
space +=
|
||||
|
||||
comma := ,
|
||||
|
||||
# Joins elements of the list in arg 2 with the given separator.
|
||||
# 1. Element separator.
|
||||
# 2. The list.
|
||||
join-with = $(subst $(space),$1,$(strip $(comma)))
|
|
@ -1,3 +1,5 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
|
||||
#@ def jobs_map_from_list(list_of_jobs):
|
||||
#@ jobs = {}
|
||||
#@ for job in list_of_jobs:
|
||||
|
@ -27,4 +29,4 @@ steps:
|
|||
name: Base Images
|
||||
#! https://github.com/k14s/ytt/issues/189
|
||||
a_magic_string_that_we_will_sed_to_on: [workflow_dispatch]
|
||||
jobs: #@ jobs_map_from_list([base_image_job(v) for v in ["22.3", "23.0"]])
|
||||
jobs: #@ jobs_map_from_list([base_image_job(v) for v in data.values.erlang_versions])
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
#@data/values
|
||||
---
|
||||
base_rmq_ref: master
|
||||
#! these values are injected at template time from the cli,
|
||||
#! but must be declared first here
|
||||
erlang_versions: []
|
||||
erlang_version: null
|
|
@ -5,429 +5,366 @@ deps:
|
|||
- name: rabbit_common
|
||||
suites:
|
||||
- name: unit_priority_queue
|
||||
time: 0
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: worker_pool
|
||||
time: 0
|
||||
- name: rabbit_env
|
||||
time: 0
|
||||
- name: rabbit
|
||||
suites:
|
||||
- name: unit_operator_policy
|
||||
time: 66
|
||||
- name: dynamic_qq
|
||||
time: 238
|
||||
- name: unit_log_config
|
||||
time: 75
|
||||
- name: unit_pg_local
|
||||
time: 68
|
||||
- name: per_vhost_msg_store
|
||||
time: 66
|
||||
- name: unit_config_value_encryption
|
||||
time: 99
|
||||
- name: vhost
|
||||
time: 211
|
||||
- name: queue_parallel
|
||||
time: 260
|
||||
- name: eager_sync
|
||||
time: 452
|
||||
- name: cluster_rename
|
||||
time: 284
|
||||
- name: peer_discovery_classic_config
|
||||
time: 136
|
||||
- name: unit_cluster_formation_locking_mocks
|
||||
time: 56
|
||||
- name: many_node_ha
|
||||
time: 112
|
||||
- name: metrics
|
||||
time: 160
|
||||
- name: sup_delayed_restart
|
||||
time: 73
|
||||
- name: upgrade_preparation
|
||||
time: 117
|
||||
- name: mirrored_supervisor
|
||||
time: 82
|
||||
- name: rabbitmqctl_shutdown
|
||||
time: 67
|
||||
- name: definition_import
|
||||
time: 84
|
||||
- name: unit_policy_validators
|
||||
time: 68
|
||||
- name: lazy_queue
|
||||
time: 125
|
||||
- name: unit_disk_monitor
|
||||
time: 80
|
||||
- name: confirms_rejects
|
||||
time: 138
|
||||
- name: list_consumers_sanity_check
|
||||
time: 86
|
||||
- name: sync_detection
|
||||
time: 125
|
||||
- name: unit_gm
|
||||
time: 65
|
||||
- name: rabbit_fifo_prop
|
||||
time: 169
|
||||
- name: signal_handling
|
||||
time: 164
|
||||
- name: clustering_management
|
||||
time: 708
|
||||
- name: backing_queue
|
||||
time: 320
|
||||
- name: product_info
|
||||
time: 93
|
||||
- name: unit_access_control_authn_authz_context_propagation
|
||||
time: 87
|
||||
- name: amqqueue_backward_compatibility
|
||||
time: 63
|
||||
- name: channel_operation_timeout
|
||||
time: 104
|
||||
- name: rabbit_fifo
|
||||
time: 79
|
||||
- name: per_vhost_connection_limit_partitions
|
||||
time: 123
|
||||
- name: dead_lettering
|
||||
time: 173
|
||||
- name: unit_connection_tracking
|
||||
time: 0
|
||||
- name: dynamic_ha
|
||||
time: 900
|
||||
- name: unit_file_handle_cache
|
||||
time: 93
|
||||
- name: unit_gen_server2
|
||||
time: 80
|
||||
- name: priority_queue
|
||||
time: 243
|
||||
- name: unit_access_control_credential_validation
|
||||
time: 74
|
||||
- name: unit_plugin_versioning
|
||||
time: 69
|
||||
- name: unit_credit_flow
|
||||
time: 81
|
||||
- name: single_active_consumer
|
||||
time: 78
|
||||
- name: unit_log_management
|
||||
time: 104
|
||||
- name: unit_stats_and_metrics
|
||||
time: 88
|
||||
- name: message_size_limit
|
||||
time: 78
|
||||
- name: cluster
|
||||
time: 85
|
||||
- name: config_schema
|
||||
time: 81
|
||||
- name: per_vhost_connection_limit
|
||||
time: 252
|
||||
- name: topic_permission
|
||||
time: 77
|
||||
- name: unit_amqp091_content_framing
|
||||
time: 67
|
||||
- name: unit_disk_monitor_mocks
|
||||
time: 76
|
||||
- name: unit_queue_consumers
|
||||
time: 73
|
||||
- name: rabbitmqctl_integration
|
||||
time: 100
|
||||
- name: rabbit_fifo_int
|
||||
time: 86
|
||||
- name: unit_vm_memory_monitor
|
||||
time: 81
|
||||
- name: crashing_queues
|
||||
time: 124
|
||||
- name: rabbitmq_queues_cli_integration
|
||||
time: 85
|
||||
- name: publisher_confirms_parallel
|
||||
time: 126
|
||||
- name: quorum_queue
|
||||
time: 687
|
||||
- name: queue_length_limits
|
||||
time: 84
|
||||
- name: maintenance_mode
|
||||
time: 185
|
||||
- name: peer_discovery_dns
|
||||
time: 76
|
||||
- name: rabbit_core_metrics_gc
|
||||
time: 221
|
||||
- name: queue_master_location
|
||||
time: 401
|
||||
- name: unit_collections
|
||||
time: 61
|
||||
- name: backing_queue
|
||||
time: 320
|
||||
- name: channel_interceptor
|
||||
time: 103
|
||||
- name: per_user_connection_tracking
|
||||
time: 132
|
||||
- name: channel_operation_timeout
|
||||
time: 104
|
||||
- name: cluster
|
||||
time: 85
|
||||
- name: cluster_rename
|
||||
time: 284
|
||||
- name: clustering_management
|
||||
time: 708
|
||||
- name: config_schema
|
||||
time: 81
|
||||
- name: confirms_rejects
|
||||
time: 138
|
||||
- name: consumer_timeout
|
||||
time: 123
|
||||
- name: crashing_queues
|
||||
time: 124
|
||||
- name: dead_lettering
|
||||
time: 173
|
||||
- name: definition_import
|
||||
time: 84
|
||||
- name: disconnect_detected_during_alarm
|
||||
time: 88
|
||||
- name: dynamic_ha
|
||||
time: 900
|
||||
- name: dynamic_qq
|
||||
time: 238
|
||||
- name: eager_sync
|
||||
time: 452
|
||||
- name: feature_flags
|
||||
time: 102
|
||||
- name: lazy_queue
|
||||
time: 125
|
||||
- name: list_consumers_sanity_check
|
||||
time: 86
|
||||
- name: list_queues_online_and_offline
|
||||
time: 85
|
||||
- name: maintenance_mode
|
||||
time: 185
|
||||
- name: many_node_ha
|
||||
time: 112
|
||||
- name: message_size_limit
|
||||
time: 78
|
||||
- name: metrics
|
||||
time: 160
|
||||
- name: mirrored_supervisor
|
||||
time: 82
|
||||
- name: msg_store
|
||||
time: 65
|
||||
- name: peer_discovery_classic_config
|
||||
time: 136
|
||||
- name: peer_discovery_dns
|
||||
time: 76
|
||||
- name: per_user_connection_channel_limit
|
||||
time: 0
|
||||
- name: per_user_connection_channel_limit_partitions
|
||||
time: 0
|
||||
- name: per_user_connection_channel_tracking
|
||||
time: 0
|
||||
- name: per_user_connection_tracking
|
||||
time: 132
|
||||
- name: per_vhost_connection_limit
|
||||
time: 252
|
||||
- name: per_vhost_connection_limit_partitions
|
||||
time: 123
|
||||
- name: per_vhost_msg_store
|
||||
time: 66
|
||||
- name: per_vhost_queue_limit
|
||||
time: 137
|
||||
- name: policy
|
||||
time: 86
|
||||
- name: proxy_protocol
|
||||
time: 78
|
||||
- name: unit_plugin_directories
|
||||
time: 52
|
||||
- name: consumer_timeout
|
||||
time: 123
|
||||
- name: term_to_binary_compat_prop
|
||||
time: 66
|
||||
- name: priority_queue
|
||||
time: 243
|
||||
- name: priority_queue_recovery
|
||||
time: 90
|
||||
- name: feature_flags
|
||||
time: 102
|
||||
- name: unit_amqp091_server_properties
|
||||
time: 95
|
||||
- name: disconnect_detected_during_alarm
|
||||
time: 88
|
||||
- name: product_info
|
||||
time: 93
|
||||
- name: proxy_protocol
|
||||
time: 78
|
||||
- name: publisher_confirms_parallel
|
||||
time: 126
|
||||
- name: queue_length_limits
|
||||
time: 84
|
||||
- name: queue_master_location
|
||||
time: 401
|
||||
- name: queue_parallel
|
||||
time: 260
|
||||
- name: queue_type
|
||||
time: 0
|
||||
- name: quorum_queue
|
||||
time: 687
|
||||
- name: rabbit_confirms
|
||||
time: 0
|
||||
- name: rabbit_core_metrics_gc
|
||||
time: 221
|
||||
- name: rabbit_fifo
|
||||
time: 79
|
||||
- name: rabbit_fifo_int
|
||||
time: 86
|
||||
- name: rabbit_fifo_prop
|
||||
time: 169
|
||||
- name: rabbit_fifo_v0
|
||||
time: 0
|
||||
- name: rabbit_msg_record
|
||||
time: 0
|
||||
- name: rabbit_stream_queue
|
||||
time: 0
|
||||
- name: rabbitmq_queues_cli_integration
|
||||
time: 85
|
||||
- name: rabbitmqctl_integration
|
||||
time: 100
|
||||
- name: rabbitmqctl_shutdown
|
||||
time: 67
|
||||
- name: signal_handling
|
||||
time: 164
|
||||
- name: simple_ha
|
||||
time: 426
|
||||
- name: list_queues_online_and_offline
|
||||
time: 85
|
||||
- name: unit_priority_queue
|
||||
time: 70
|
||||
- name: unit_app_management
|
||||
time: 79
|
||||
- name: single_active_consumer
|
||||
time: 78
|
||||
- name: sync_detection
|
||||
time: 125
|
||||
- name: term_to_binary_compat_prop
|
||||
time: 66
|
||||
- name: topic_permission
|
||||
time: 77
|
||||
- name: unit_access_control
|
||||
time: 90
|
||||
- name: unit_access_control_authn_authz_context_propagation
|
||||
time: 87
|
||||
- name: unit_access_control_credential_validation
|
||||
time: 74
|
||||
- name: unit_amqp091_content_framing
|
||||
time: 67
|
||||
- name: unit_amqp091_server_properties
|
||||
time: 95
|
||||
- name: unit_app_management
|
||||
time: 79
|
||||
- name: unit_cluster_formation_locking_mocks
|
||||
time: 56
|
||||
- name: unit_collections
|
||||
time: 61
|
||||
- name: unit_config_value_encryption
|
||||
time: 99
|
||||
- name: unit_connection_tracking
|
||||
time: 0
|
||||
- name: unit_credit_flow
|
||||
time: 81
|
||||
- name: unit_disk_monitor
|
||||
time: 80
|
||||
- name: unit_disk_monitor_mocks
|
||||
time: 76
|
||||
- name: unit_file_handle_cache
|
||||
time: 93
|
||||
- name: unit_gen_server2
|
||||
time: 80
|
||||
- name: unit_gm
|
||||
time: 65
|
||||
- name: unit_log_config
|
||||
time: 75
|
||||
- name: unit_log_management
|
||||
time: 104
|
||||
- name: unit_operator_policy
|
||||
time: 66
|
||||
- name: unit_pg_local
|
||||
time: 68
|
||||
- name: unit_plugin_directories
|
||||
time: 52
|
||||
- name: unit_plugin_versioning
|
||||
time: 69
|
||||
- name: unit_policy_validators
|
||||
time: 68
|
||||
- name: unit_priority_queue
|
||||
time: 70
|
||||
- name: unit_queue_consumers
|
||||
time: 73
|
||||
- name: unit_stats_and_metrics
|
||||
time: 88
|
||||
- name: unit_supervisor2
|
||||
time: 71
|
||||
- name: unit_vm_memory_monitor
|
||||
time: 81
|
||||
- name: upgrade_preparation
|
||||
time: 117
|
||||
- name: vhost
|
||||
time: 211
|
||||
- name: amqp_client
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: amqp10_client
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: system
|
||||
time: 0
|
||||
- name: msg
|
||||
time: 0
|
||||
- name: amqp10_common
|
||||
suites:
|
||||
- name: binary_generator
|
||||
time: 0
|
||||
- name: rabbitmq_amqp1_0
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: amqp10_client
|
||||
time: 0
|
||||
- name: proxy_protocol
|
||||
time: 0
|
||||
- name: command
|
||||
time: 0
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: rabbitmq_auth_backend_cache
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_auth_backend_cache
|
||||
time: 0
|
||||
- name: rabbit_auth_cache
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_auth_backend_http
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: auth
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_auth_backend_ldap
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_auth_backend_oauth2
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: wildcard_match
|
||||
time: 0
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: scope
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: add_uaa_key_command
|
||||
time: 0
|
||||
- name: rabbitmq_auth_mechanism_ssl
|
||||
suites: []
|
||||
- name: rabbitmq_aws
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
- name: rabbitmq_cli
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
- name: rabbitmq_codegen
|
||||
skip_xref: true
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites: []
|
||||
- name: rabbitmq_consistent_hash_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_exchange_type_consistent_hash
|
||||
time: 0
|
||||
- name: rabbitmq_event_exchange
|
||||
suites:
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_federation
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: unit_inbroker
|
||||
time: 0
|
||||
- name: restart_federation_link_command
|
||||
time: 0
|
||||
- name: rabbit_federation_status
|
||||
time: 0
|
||||
- name: federation_status_command
|
||||
time: 0
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: queue
|
||||
time: 0
|
||||
- name: exchange
|
||||
time: 0
|
||||
- name: rabbitmq_federation_management
|
||||
suites:
|
||||
- name: federation_mgmt
|
||||
time: 0
|
||||
- name: rabbitmq_jms_topic_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: sjx_evaluation
|
||||
time: 0
|
||||
- name: rjms_topic_selector
|
||||
time: 0
|
||||
- name: rjms_topic_selector_unit
|
||||
time: 0
|
||||
- name: rabbitmq_management
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: cache
|
||||
time: 0
|
||||
- name: stats
|
||||
time: 0
|
||||
- name: rabbit_mgmt_rabbitmqadmin
|
||||
time: 0
|
||||
- name: listener_config
|
||||
time: 0
|
||||
- name: rabbit_mgmt_test_db
|
||||
time: 0
|
||||
- name: rabbit_mgmt_only_http
|
||||
time: 0
|
||||
- name: rabbit_mgmt_stats
|
||||
time: 0
|
||||
- name: rabbit_mgmt_test_unit
|
||||
time: 0
|
||||
- name: rabbit_mgmt_http
|
||||
time: 0
|
||||
- name: clustering_prop
|
||||
time: 0
|
||||
- name: clustering
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_management_agent
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: metrics
|
||||
time: 0
|
||||
- name: exometer_slide
|
||||
time: 0
|
||||
- name: rabbit_mgmt_gc
|
||||
time: 0
|
||||
- name: rabbit_mgmt_slide
|
||||
time: 0
|
||||
- name: rabbitmq_mqtt
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: util
|
||||
time: 0
|
||||
- name: retainer
|
||||
time: 0
|
||||
- name: proxy_protocol
|
||||
time: 0
|
||||
- name: command
|
||||
time: 0
|
||||
- name: reader
|
||||
time: 0
|
||||
- name: mqtt_machine
|
||||
time: 0
|
||||
- name: processor
|
||||
time: 0
|
||||
- name: java
|
||||
time: 0
|
||||
- name: auth
|
||||
time: 0
|
||||
- name: cluster
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_peer_discovery_common
|
||||
suites:
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_peer_discovery_aws
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: rabbitmq_peer_discovery_aws
|
||||
time: 0
|
||||
- name: integration
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_peer_discovery_k8s
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbitmq_peer_discovery_k8s
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_peer_discovery_consul
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbitmq_peer_discovery_consul
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_peer_discovery_etcd
|
||||
skip_dialyzer: true
|
||||
skip_tests: true
|
||||
suites:
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: system
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_prometheus
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_prometheus_http
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_random_exchange
|
||||
suites: []
|
||||
- name: rabbitmq_recent_history_exchange
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: system
|
||||
time: 0
|
||||
- name: rabbitmq_sharding
|
||||
skip_dialyzer: true
|
||||
suites: []
|
||||
|
@ -435,91 +372,59 @@ deps:
|
|||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: amqp10_shovel
|
||||
time: 0
|
||||
- name: configuration
|
||||
time: 0
|
||||
- name: dynamic
|
||||
time: 0
|
||||
- name: parameters
|
||||
time: 0
|
||||
- name: config
|
||||
time: 0
|
||||
- name: shovel_status_command
|
||||
time: 0
|
||||
- name: delete_shovel_command
|
||||
time: 0
|
||||
- name: amqp10_dynamic
|
||||
time: 0
|
||||
- name: amqp10
|
||||
time: 0
|
||||
- name: rabbitmq_shovel_management
|
||||
suites:
|
||||
- name: http
|
||||
time: 0
|
||||
- name: rabbitmq_stomp
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: util
|
||||
time: 0
|
||||
- name: proxy_protocol
|
||||
time: 0
|
||||
- name: amqqueue
|
||||
time: 0
|
||||
- name: command
|
||||
time: 0
|
||||
- name: topic
|
||||
time: 0
|
||||
- name: frame
|
||||
time: 0
|
||||
- name: python
|
||||
time: 0
|
||||
- name: connections
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_top
|
||||
suites: []
|
||||
- name: rabbitmq_tracing
|
||||
suites:
|
||||
- name: rabbit_tracing
|
||||
time: 0
|
||||
- name: rabbitmq_trust_store
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: system
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_web_dispatch
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: rabbit_web_dispatch
|
||||
time: 0
|
||||
- name: rabbit_web_dispatch_unit
|
||||
time: 0
|
||||
- name: rabbitmq_web_mqtt
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: proxy_protocol
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: rabbitmq_web_mqtt_examples
|
||||
suites: []
|
||||
- name: rabbitmq_web_stomp
|
||||
skip_dialyzer: true
|
||||
suites:
|
||||
- name: proxy_protocol
|
||||
time: 0
|
||||
- name: raw_websocket
|
||||
time: 0
|
||||
- name: cowboy_websocket
|
||||
time: 0
|
||||
- name: unit
|
||||
time: 0
|
||||
- name: config_schema
|
||||
time: 0
|
||||
- name: amqp_stomp
|
||||
time: 0
|
||||
- name: rabbitmq_web_stomp_examples
|
||||
suites: []
|
|
@ -1,6 +0,0 @@
|
|||
#@data/values
|
||||
---
|
||||
base_rmq_ref: master
|
||||
versions:
|
||||
erlang: "23.0"
|
||||
elixir: "1.8.0"
|
|
@ -1,6 +1,7 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("util.star", "group_by_time", "to_build_args")
|
||||
#@ load("helpers.star", "should_skip_dialyze", "ci_image_tag", "ci_dep_image")
|
||||
#@ load("@ytt:assert", "assert")
|
||||
#@ load("util.star", "is_unique", "group_by_time", "to_build_args")
|
||||
#@ load("helpers.star", "ci_image_tag", "ci_dep_image")
|
||||
|
||||
#@ def checks_job(dep):
|
||||
name: #@ dep.name + "-checks"
|
||||
|
@ -16,6 +17,13 @@ steps:
|
|||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- name: VALIDATE KNOWN CT SUITES
|
||||
env:
|
||||
project: #@ dep.name
|
||||
#@ suite_names = [suite.name for suite in dep.suites]
|
||||
#@ None if is_unique(suite_names) else assert.fail('{} suite names are not unique'.format(dep.name))
|
||||
run: |
|
||||
ci/scripts/validate-workflow.sh (@= " ".join(suite_names) @)
|
||||
- name: RUN CHECKS
|
||||
uses: docker/build-push-action@v1
|
||||
with:
|
||||
|
@ -28,7 +36,6 @@ steps:
|
|||
#@ build_args["IMAGE_TAG"] = ci_image_tag()
|
||||
#@ build_args["BUILDEVENT_APIKEY"] = '${{ secrets.HONEYCOMB_API_KEY }}'
|
||||
#@ build_args["project"] = dep.name
|
||||
#@ build_args["SKIP_DIALYZE"] = should_skip_dialyze(dep)
|
||||
build_args: #@ to_build_args(build_args)
|
||||
tags: (@= ci_image_tag() @)
|
||||
#@ end
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
load("util.star", "group_by_time")
|
||||
load("rabbitmq_cli.lib.yml", "rabbitmq_cli_job")
|
||||
load("ct.lib.yml", "checks_job", "ct_suites_job", "collect_job")
|
||||
load("tests.lib.yml", "tests_job")
|
||||
load("util.star", "group_by_time")
|
||||
|
||||
def dep_jobs(dep):
|
||||
jobs = {}
|
||||
if not hasattr(dep, "skip_tests") or not dep.skip_tests:
|
||||
if not getattr(dep, "skip_tests", False):
|
||||
if dep.name == "rabbitmq_cli":
|
||||
jobs[dep.name] = rabbitmq_cli_job(dep)
|
||||
elif len(dep.suites) > 20:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("helpers.star", "ci_image")
|
||||
|
||||
#@ def finish_jobs():
|
||||
#@ def finish_jobs(prepare_jobs_names):
|
||||
package-generic-unix:
|
||||
name: package-generic-unix
|
||||
needs: [prepare]
|
||||
|
@ -46,7 +46,7 @@ package-generic-unix:
|
|||
ci/scripts/collect.sh
|
||||
finish:
|
||||
name: finish
|
||||
needs: #@ ["prepare"] + [dep.name for dep in data.values.deps if not hasattr(dep, "skip_tests") or not dep.skip_tests]
|
||||
needs: #@ prepare_jobs_names + [dep.name for dep in data.values.deps if not getattr(dep, "skip_tests", False)] + ['package-generic-unix']
|
||||
runs-on: ubuntu-18.04
|
||||
if: always()
|
||||
#@yaml/text-templated-strings
|
||||
|
|
|
@ -1,16 +1,7 @@
|
|||
load("@ytt:data", "data")
|
||||
|
||||
def should_skip_dialyze(dep):
|
||||
return (data.values.versions.erlang != "23.0"
|
||||
or (hasattr(dep, "skip_dialyzer") and dep.skip_dialyzer))
|
||||
end
|
||||
|
||||
def should_skip_xref(dep):
|
||||
return hasattr(dep, "skip_xref") and dep.skip_xref
|
||||
end
|
||||
|
||||
def ci_image_tag():
|
||||
return "erlang-" + data.values.versions.erlang + "-rabbitmq-${{ github.sha }}"
|
||||
return "erlang-" + data.values.erlang_version + "-rabbitmq-${{ github.sha }}"
|
||||
end
|
||||
|
||||
def ci_image():
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("util.star", "to_build_args")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag", "should_skip_xref")
|
||||
#@ load("helpers.star", "ci_image", "ci_image_tag")
|
||||
|
||||
#@ def prepare_jobs():
|
||||
prepare:
|
||||
|
@ -28,7 +28,7 @@ prepare:
|
|||
repository: cf-rabbitmq-core/ci
|
||||
dockerfile: ci/dockerfiles/ci
|
||||
#@ build_args = {}
|
||||
#@ build_args["ERLANG_VERSION"] = data.values.versions.erlang
|
||||
#@ build_args["ERLANG_VERSION"] = data.values.erlang_version
|
||||
#@ build_args["GITHUB_RUN_ID"] = '${{ github.run_id }}'
|
||||
#@ build_args["BUILDEVENT_APIKEY"] = '${{ secrets.HONEYCOMB_API_KEY }}'
|
||||
#@ build_args["GITHUB_SHA"] = '${{ github.sha }}'
|
||||
|
@ -65,12 +65,14 @@ xref:
|
|||
with:
|
||||
service_account_key: ${{ secrets.GCR_JSON_KEY }}
|
||||
export_default_credentials: true
|
||||
#@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + data.values.versions.erlang
|
||||
#@ for dep in [d for d in data.values.deps if not should_skip_xref(d)]:
|
||||
- name: RUN XREF (@= dep.name @)
|
||||
- name: PULL IMAGE
|
||||
#@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + data.values.erlang_version
|
||||
run: |
|
||||
gcloud auth configure-docker
|
||||
docker pull (@= base_image @)
|
||||
#@ for dep in [d for d in data.values.deps if not getattr(d, "skip_xref", False)]:
|
||||
- name: RUN XREF (@= dep.name @)
|
||||
run: |
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
|
@ -92,4 +94,50 @@ xref:
|
|||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/collect.sh
|
||||
|
||||
#@ if/end data.values.erlang_version == data.values.erlang_versions[-1]:
|
||||
dialyze:
|
||||
name: dialyze
|
||||
runs-on: ubuntu-18.04
|
||||
#@yaml/text-templated-strings
|
||||
steps:
|
||||
- name: RECORD DIALYZE START
|
||||
id: buildevents
|
||||
run: |
|
||||
echo "::set-output name=step_start::$(date +%s)"
|
||||
- name: CHECKOUT REPOSITORY
|
||||
uses: actions/checkout@v2
|
||||
- uses: GoogleCloudPlatform/github-actions/setup-gcloud@master
|
||||
with:
|
||||
service_account_key: ${{ secrets.GCR_JSON_KEY }}
|
||||
export_default_credentials: true
|
||||
- name: PULL IMAGE
|
||||
#@ base_image = "eu.gcr.io/cf-rabbitmq-core/ci-base:" + data.values.erlang_version
|
||||
run: |
|
||||
gcloud auth configure-docker
|
||||
docker pull (@= base_image @)
|
||||
#@ for dep in [d for d in data.values.deps if not getattr(d, "skip_dialyzer", False)]:
|
||||
- name: RUN DIALYZE (@= dep.name @)
|
||||
run: |
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--volume ${PWD}:/workspace/rabbitmq \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/dialyze.sh
|
||||
#@ end
|
||||
- name: RECORD STEP FINISH
|
||||
if: always()
|
||||
run: |
|
||||
docker run \
|
||||
--env project=dialyze \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
--volume ${PWD}/ci/scripts:/workspace/rabbitmq/ci/scripts \
|
||||
--workdir /workspace/rabbitmq \
|
||||
(@= base_image @) \
|
||||
ci/scripts/collect.sh
|
||||
#@ end
|
|
@ -1,6 +1,6 @@
|
|||
#@ load("@ytt:data", "data")
|
||||
#@ load("util.star", "to_build_args")
|
||||
#@ load("helpers.star", "should_skip_dialyze", "ci_image")
|
||||
#@ load("helpers.star", "ci_image")
|
||||
|
||||
#@ def tests_job(dep):
|
||||
name: #@ dep.name
|
||||
|
@ -23,8 +23,6 @@ steps:
|
|||
mkdir ct-logs && chmod 777 ct-logs
|
||||
docker run \
|
||||
--env project=(@= dep.name @) \
|
||||
--env SKIP_DIALYZE=(@= str(should_skip_dialyze(dep)) @) \
|
||||
--env SKIP_XREF=(@= str(hasattr(dep, "skip_xref") and dep.skip_xref) @) \
|
||||
--env GITHUB_RUN_ID=${{ github.run_id }} \
|
||||
--env BUILDEVENT_APIKEY=${{ secrets.HONEYCOMB_API_KEY }} \
|
||||
--env STEP_START=${{ steps.buildevents.outputs.step_start }} \
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
def is_unique(l):
|
||||
return len(l) == len(set(l))
|
||||
end
|
||||
|
||||
def merge(dicts):
|
||||
r = {}
|
||||
for d in dicts:
|
||||
|
|
|
@ -4,8 +4,13 @@
|
|||
#@ load("prepare.lib.yml", "prepare_jobs")
|
||||
#@ load("dep.star", "dep_jobs")
|
||||
#@ load("finish.lib.yml", "finish_jobs")
|
||||
|
||||
#@ prepare = prepare_jobs()
|
||||
#@ deps = [dep_jobs(dep) for dep in data.values.deps]
|
||||
#@ finish = finish_jobs([prepare[k]['name'] for k in prepare])
|
||||
|
||||
---
|
||||
name: #@ "Test - Erlang " + data.values.versions.erlang
|
||||
name: #@ "Test - Erlang " + data.values.erlang_version
|
||||
#! https://github.com/k14s/ytt/issues/189
|
||||
a_magic_string_that_we_will_sed_to_on: push
|
||||
jobs: #@ merge([prepare_jobs()] + [dep_jobs(dep) for dep in data.values.deps] + [finish_jobs()])
|
||||
jobs: #@ merge([prepare] + deps + [finish])
|
||||
|
|
Loading…
Reference in New Issue