Skip to content

Commit

Permalink
CI experiments
Browse files Browse the repository at this point in the history
  • Loading branch information
lhoguin committed Sep 2, 2024
1 parent f0932e3 commit 5efb311
Show file tree
Hide file tree
Showing 3 changed files with 311 additions and 2 deletions.
170 changes: 170 additions & 0 deletions .github/workflows/test-plugin-make.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
name: Test Plugin
on:
workflow_call:
inputs:
repo_cache_key:
required: true
type: string
plugin:
required: true
type: string
secrets:
REMOTE_CACHE_BUCKET_NAME:
required: true
REMOTE_CACHE_CREDENTIALS_JSON:
required: true
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
erlang_version:
- 26
metadata_store:
- mnesia
- khepri
include:
- erlang_version: 26
elixir_version: 1.15
timeout-minutes: 120
steps:
# - name: LOAD REPO CACHE
# uses: actions/cache/restore@v4
# with:
# key: ${{ inputs.repo_cache_key }}
# path: /home/runner/repo-cache/
- name: CHECKOUT REPOSITORY
uses: actions/checkout@v4
- name: CONFIGURE OTP & ELIXIR
uses: erlef/setup-beam@v1.17
with:
otp-version: ${{ matrix.erlang_version }}
elixir-version: ${{ matrix.elixir_version }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
# - name: AUTHENTICATE TO GOOGLE CLOUD
# uses: google-github-actions/auth@v2.1.5
# with:
# credentials_json: ${{ secrets.REMOTE_CACHE_CREDENTIALS_JSON }}
# - name: CONFIGURE BAZEL
# run: |
# if [ -n "${{ secrets.REMOTE_CACHE_BUCKET_NAME }}" ]; then
# cat << EOF >> user.bazelrc
# build --remote_cache=https://storage.googleapis.com/${{ secrets.REMOTE_CACHE_BUCKET_NAME }}
# build --google_default_credentials
#
# build --experimental_guard_against_concurrent_changes
# EOF
# fi
# cat << EOF >> user.bazelrc
# build --repository_cache=/home/runner/repo-cache/
# build --color=yes
# EOF
#
# bazelisk info release
# #! - name: Setup tmate session
# #! uses: mxschmitt/action-tmate@v3
- uses: actions/setup-dotnet@v4
if: inputs.plugin == 'rabbit'
with:
dotnet-version: '3.1.x'
# - name: deps/amqp10_client SETUP
# if: inputs.plugin == 'amqp10_client'
# run: |
# # reduce sandboxing so that activemq works
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbit SETUP
# if: inputs.plugin == 'rabbit'
# run: |
# # reduce sandboxing so that maven works
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
- name: deps/rabbitmq_auth_backend_ldap SETUP
if: inputs.plugin == 'rabbitmq_auth_backend_ldap'
run: |
sudo apt-get update && \
sudo apt-get install -y \
apparmor-utils \
ldap-utils \
slapd
sudo aa-complain `which slapd`
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbitmq_mqtt SETUP
# if: inputs.plugin == 'rabbitmq_mqtt'
# run: |
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbitmq_peer_discovery_consul SETUP
# if: inputs.plugin == 'rabbitmq_peer_discovery_consul'
# run: |
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbitmq_stream SETUP
# if: inputs.plugin == 'rabbitmq_stream'
# run: |
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbitmq_stream_management SETUP
# if: inputs.plugin == 'rabbitmq_stream_management'
# run: |
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: deps/rabbitmq_tracing SETUP
# if: inputs.plugin == 'rabbitmq_tracing'
# run: |
# cat << EOF >> user.bazelrc
# build --strategy=TestRunner=local
# EOF
# - name: CLI COMPILE WARNINGS AS ERRORS
# if: inputs.plugin == 'rabbitmq_cli'
# run: |
# bazel build //deps/rabbitmq_cli:compile_warnings_as_errors \
# --verbose_failures
# - name: COMPUTE TESTS IN SHARD
# id: shard
# run: |
# bazelisk cquery \
# 'tests(//deps/${{ inputs.plugin }}/...) except attr("tags", "manual|mixed-version-cluster", //deps/...)' \
# --output=label \
# | awk '{print $1;}' > tests.log
# split -da 3 -l $((`wc -l < tests.log`/${{ inputs.shard_count }})) tests.log shard
# printf -v padded_index "%03d" ${{ inputs.shard_index }}
# echo "file=shard$padded_index" | tee -a $GITHUB_OUTPUT
- name: RUN TESTS
if: inputs.plugin != 'rabbitmq_peer_discovery_aws'
run: |
make -C deps/${{ inputs.plugin }} parallel-ct RABBITMQ_METADATA_STORE=${{ matrix.metadata_store }}
# echo "Tests in shard:"
# cat ${{ steps.shard.outputs.file }}
# echo ""

## WARNING:
## secrets must not be set in --test_env or --action_env,
## or otherwise logs must not be saved as artifacts.
## rabbit_ct_helpers or other code may log portions of the
## env vars and leak them

# bazelisk test $(< ${{ steps.shard.outputs.file }}) \
# --test_env RABBITMQ_METADATA_STORE=${{ matrix.metadata_store }} \
# --build_tests_only \
# --verbose_failures
# - name: UPLOAD TEST LOGS
# if: always()
# uses: actions/upload-artifact@v4
# with:
# name: bazel-testlogs-${{ inputs.plugin }}-${{ inputs.shard_index }}-${{ matrix.erlang_version }}-${{ matrix.metadata_store }}
# path: |
# bazel-testlogs/deps/${{ inputs.plugin }}/*
17 changes: 17 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,23 @@ jobs:
repo_cache_key: ${{ needs.check-workflow.outputs.repo_cache_key }}
plugin: trust_store_http
secrets: inherit
test-rabbit-make:
# needs:
# - check-workflow
# - test-amqp10_client
# - test-amqp10_common
# - test-amqp_client
# - test-oauth2_client
# - test-rabbit_common
# - test-rabbitmq_ct_client_helpers
# - test-rabbitmq_ct_helpers
# - test-rabbitmq_stream_common
# - test-trust_store_http
uses: ./.github/workflows/test-plugin-make.yaml
with:
repo_cache_key: ${{ needs.check-workflow.outputs.repo_cache_key }}
plugin: rabbit
secrets: inherit
test-rabbit-0:
needs:
- check-workflow
Expand Down
126 changes: 124 additions & 2 deletions deps/rabbit/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -243,9 +243,131 @@ define ct_master.erl
halt()
endef

ct-master: test-build
# @todo We must have a way of running only a single shard or a single set
# @todo We must have a way of ensuring all test suites are covered in sets
# @todo We must ensure that the CT_OPTS also apply to ct-master
# @todo We should probably refactor ct_master.erl to have node init in a separate .erl
# @todo We would benefit from having rabbit nodes started with peer (no leftovers)
# @todo We need ct-master to be expanded to all components and not just rabbit
# @todo Start using ct-master in CI
# @todo Generate ct.test.spec from Makefile variables instead of hardcoded
# @todo Must figure out how much a GH worker can manage


#PARALLEL_CT_NUM_NODES ?= 4
#PARALLEL_CT_NODE_NAME = rabbit_shard$1@localhost
#PARALLEL_CT_NODE_INIT_FUN = fun(Pid, Num) -> peer:call(Pid, net_kernel, set_net_ticktime, [5]), peer:call(Pid, persistent_term, put, [rabbit_ct_tcp_port_base, 21000 + 2000 * Num]) end
#
#PARALLEL_CT_NUM_SETS = 8
#
#PARALLEL_CT_SET_1 = amqp_address amqp_auth amqp_client amqp_credit_api_v2 amqp_proxy_protocol \
# amqp_system amqpl_consumer_ack amqpl_direct_reply_to amqqueue_backward_compatibility \
# backing_queue bindings channel_interceptor channel_operation_timeout classic_queue classic_queue_prop
#
#PARALLEL_CT_SET_2 = cluster config_schema confirms_rejects consumer_timeout crashing_queues \
# deprecated_features direct_exchange_routing_v2 disconnect_detected_during_alarm \
# disk_monitor dynamic_qq exchanges rabbit_stream_queue
#
#PARALLEL_CT_SET_3 = cli_forget_cluster_node feature_flags feature_flags_v2 feature_flags_with_unpriveleged_user \
# list_consumers_sanity_check list_queues_online_and_offline logging lqueue maintenance_mode \
# mc_unit message_containers_deaths_v2 message_size_limit metadata_store_migration \
# metadata_store_phase1 metrics mirrored_supervisor msg_store peer_discovery_classic_config
#
#PARALLEL_CT_SET_4 = peer_discovery_dns peer_discovery_tmp_hidden_node per_node_limit per_user_connection_channel_limit \
# per_user_connection_channel_tracking per_user_connection_tracking per_vhost_connection_limit \
# per_vhost_msg_store per_vhost_queue_limit policy priority_queue priority_queue_recovery \
# product_info proxy_protocol publisher_confirms_parallel
#
#PARALLEL_CT_SET_5 =
# clustering_recovery
#, metadata_store_clustering
#, queue_length_limits
#, queue_parallel
#, quorum_queue
#, rabbit_access_control
#, rabbit_confirms
#, rabbit_core_metrics_gc
#, rabbit_cuttlefish
#, rabbit_db_binding
#, rabbit_db_exchange
#, rabbit_db_maintenance
#, rabbit_db_msup
#, rabbit_db_policy
#, rabbit_db_queue
#, rabbit_db_topic_exchange
#, rabbit_direct_reply_to_prop
#
#PARALLEL_CT_SET_6 =
# queue_type
#, quorum_queue_member_reconciliation
#, rabbit_fifo
#, rabbit_fifo_dlx
#, rabbit_fifo_dlx_integration
#, rabbit_fifo_int
#, rabbit_fifo_prop
#, rabbit_fifo_v0
#, rabbit_local_random_exchange
#, rabbit_message_interceptor
#, rabbit_stream_coordinator
#, rabbit_stream_sac_coordinator
#, rabbitmq_4_0_deprecations
#, rabbitmq_queues_cli_integration
#, rabbitmqctl_integration
#, rabbitmqctl_shutdown
#, routing
#, runtime_parameters
#
#PARALLEL_CT_SET_7 =
# cluster_limit
#, cluster_minority
#, clustering_management
#, signal_handling
#, single_active_consumer
#, term_to_binary_compat_prop
#, topic_permission
#, transactions
#, unicode
#, unit_access_control
#, unit_access_control_authn_authz_context_propagation
#, unit_access_control_credential_validation
#, unit_amqp091_content_framing
#, unit_amqp091_server_properties
#, unit_app_management
#, unit_cluster_formation_locking_mocks
#, unit_cluster_formation_sort_nodes
#, unit_collections
#, unit_config_value_encryption
#, unit_connection_tracking
#
#PARALLEL_CT_SET_8 =
# dead_lettering
#, definition_import
#, per_user_connection_channel_limit_partitions
#, per_vhost_connection_limit_partitions
#, unit_credit_flow
#, unit_disk_monitor
#, unit_file_handle_cache
#, unit_gen_server2
#, unit_log_management
#, unit_operator_policy
#, unit_pg_local
#, unit_plugin_directories
#, unit_plugin_versioning
#, unit_policy_validators
#, unit_priority_queue
#, unit_queue_consumers
#, unit_queue_location
#, unit_quorum_queue
#, unit_stats_and_metrics
#, unit_supervisor2
#, unit_vm_memory_monitor
#, upgrade_preparation
#, vhost


parallel-ct: test-build
$(verbose) mkdir -p $(CT_LOGS_DIR)
$(call erlang,$(ct_master.erl),-sname rabbit_master@localhost -hidden -kernel net_ticktime 5)
$(call erlang,$(ct_master.erl),-sname parallel_ct_$(PROJECT)@localhost -hidden -kernel net_ticktime 5)

# --------------------------------------------------------------------
# Compilation.
Expand Down

0 comments on commit 5efb311

Please sign in to comment.