diff --git a/cdc/Makefile b/cdc/Makefile index 884a6789..bf630689 100644 --- a/cdc/Makefile +++ b/cdc/Makefile @@ -263,7 +263,7 @@ integration_test_build: check_failpoint_ctl rawkv_data cdc_state_checker $(FAILPOINT_DISABLE) integration_test: prepare_test_binaries check_third_party_binary integration_test_build - tests/integration_tests/run.sh + tests/integration_tests/run.sh "$(CASE)" @bash <(curl -s https://codecov.io/bash) -F cdc -s /tmp/tikv_cdc_test -f *.out -t $(TIKV_MIGRATION_CODECOV_TOKEN) diff --git a/cdc/tests/.unmodify/README.md b/cdc/tests/.unmodify/README.md index 67eb5992..2b825ff9 100644 --- a/cdc/tests/.unmodify/README.md +++ b/cdc/tests/.unmodify/README.md @@ -2,33 +2,23 @@ ### Run integration tests locally -1. The following executables must be copied or generated or linked into these locations, `sync_diff_inspector` can be - downloaded - from [tidb-community-toolkit](https://download.pingcap.org/tidb-community-toolkit-v4.0.2-linux-amd64.tar.gz) - , `tidb-server` related binaries can be downloaded - from [tidb-community-server](https://download.pingcap.org/tidb-community-server-v4.0.2-linux-amd64.tar.gz): +Assume that you are in the root directory of source code (`github.com/tikv/migration/cdc`). - * `bin/tidb-server` # version >= 4.0.0-rc.1 - * `bin/tikv-server` # version >= 4.0.0-rc.1 - * `bin/pd-server` # version >= 4.0.0-rc.1 - * `bin/pd-ctl` # version >= 4.0.0-rc.1 - * `bin/tiflash` # needs tiflash binary and some necessary so files - * `bin/sync_diff_inspector` - * [bin/go-ycsb](https://github.com/pingcap/go-ycsb) - * [bin/etcdctl](https://github.com/etcd-io/etcd/tree/master/etcdctl) - * [bin/jq](https://stedolan.github.io/jq/) - * [bin/minio](https://github.com/minio/minio) +1. The following executables must be copied or generated or linked into these locations - > If you are running tests on MacOS, tidb related binaries can be downloaded from tiup mirrors, such as https://tiup-mirrors.pingcap.com/tidb-v4.0.2-darwin-amd64.tar.gz. And `sync_diff_inspector` can be compiled by yourself from source [tidb-tools](https://github.com/pingcap/tidb-tools) + * `scripts/bin/tidb-server` # version >= 6.2.0 + * `scripts/bin/tikv-server` # version >= 6.2.0 + * `scripts/bin/pd-server` # version >= 6.2.0 + * `scripts/bin/pd-ctl` # version >= 6.2.0 + * [scripts/bin/go-ycsb](https://github.com/pingcap/go-ycsb) + * [scripts/bin/etcdctl](https://github.com/etcd-io/etcd/tree/master/etcdctl) + * [scripts/bin/jq](https://stedolan.github.io/jq/) - > All Tiflash required files can be found in [tidb-community-server](https://download.pingcap.org/tidb-community-server-v4.0.2-linux-amd64.tar.gz) packages. You should put `flash_cluster_manager`, `libtiflash_proxy.so` and `tiflash` into `bin` directory in TiCDC code base. + > If you are running tests on Linux x86-64, you can run `make prepare_test_binaries` to get all necessary binaries. + > + > If you are running tests on MacOS, tidb related binaries can be downloaded from tiup mirrors, such as https://tiup-mirrors.pingcap.com/tidb-v6.2.0-darwin-amd64.tar.gz. -2. The following programs must be installed: - - * [mysql](https://dev.mysql.com/doc/mysql-installation-excerpt/5.7/en/) (the MySQL cli client, - currently [mysql client 8.0 is not supported](https://github.com/pingcap/tidb/issues/14021)) - -3. The user used to execute the tests must have permission to create the folder /tmp/tidb_cdc_test. All test artifacts +2. The user used to execute the tests must have permission to create the folder /tmp/tikv_cdc_test. All test artifacts will be written into this folder. ### Run integration tests in docker @@ -44,60 +34,44 @@ We recommend that you provide docker with at least 6+ cores and 8G+ memory. Of c ### Unit Test -1. Unit test does not need any dependencies, just running `make unit_test` in root dir of source code, or cd into +1. Unit test does not need any dependencies, just running `make unit_test` in root dir of source code, or `cd` into directory of a test case and run single case via `GO111MODULE=on go test -check.f TestXXX`. ### Integration Test #### Run integration tests locally -1. Run `make integration_test_build` to generate TiCDC related binaries for integration test +1. Run `make integration_test_build` to generate `tikv-cdc` binaries for integration test 2. Run `make integration_test` to execute the integration tests. This command will 1. Check that all required executables exist. 2. Execute `tests/integration_tests/run.sh` - > If want to run one integration test case only, just pass the CASE parameter, such as `make integration_test CASE=simple`. - - > There exists some environment variables that you can set by yourself, variable details can be found in [test_prepare](_utils/test_prepare). + > If want to run one integration test case only, just pass the CASE parameter, e.g. `make integration_test CASE=autorandom`. + > + > There are some environment variables that you can set by yourself, see [test_prepare](./integration_tests/_utils/test_prepare). - > `MySQL sink` will be used by default, if you want to test `Kafka sink`, please run with `make integration_test_kafka CASE=simple`. +#### Run integration tests in docker -3. After executing the tests, run `make coverage` to get a coverage report at `/tmp/tidb_cdc_test/all_cov.html`. +1. Run `tests/up.sh`. This script will setup a container with some tools ready, and run `/bin/bash` interactively in the container. -#### Run integration tests in docker +2. Run `make integration_test` or `make integration_test CASE=[test name]` to execute the integration tests. > **Warning:** > These scripts and files may not work under the arm architecture, > and we have not tested against it. -> Also, we currently use the PingCAP intranet address in our download scripts, -> so if you do not have access to the PingCAP intranet you will not be able to use these scripts. -> We will try to resolve these issues as soon as possible. - -1. If you want to run kafka tests, - run `CASE="clustered_index" docker-compose -f ./deployments/ticdc/docker-compose/docker-compose-kafka-integration.yml up --build` - -2. If you want to run MySQL tests, - run `CASE="clustered_index" docker-compose -f ./deployments/ticdc/docker-compose/docker-compose-mysql-integration.yml up --build` - -3. Use the command `docker-compose -f ./deployments/ticdc/docker-compose/docker-compose-kafka-integration.yml down -v` - to clean up the corresponding environment. Some useful tips: -1. The log files for the test are mounted in the `./deployments/ticdc/docker-compose/logs` directory. - -2. You can specify multiple tests to run in CASE, for example: `CASE="clustered_index kafka_messages"`. You can even +- You can specify multiple tests to run in CASE, for example: `CASE="cli cli_tls"`. You can even use `CASE="*"` to indicate that you are running all tests。 -3. You can specify in the [integration-test.Dockerfile](../../deployments/ticdc/docker/integration-test.Dockerfile) - the version of other dependencies that you want to download, such as tidb, tikv, pd, etc. - > For example, you can change `RUN ./download-integration-test-binaries.sh master` to `RUN ./download-integration-test-binaries.sh release-5.2` - > to use the release-5.2 dependency. - > Then rebuild the image with the [--no-cache](https://docs.docker.com/compose/reference/build/) flag. - ## Writing new tests -New integration tests can be written as shell scripts in `tests/integration_tests/TEST_NAME/run.sh`. The script should +1. Write new integration tests as shell scripts in `tests/integration_tests/TEST_NAME/run.sh`. The script should exit with a nonzero error code on failure. + +2. Add TEST_NAME to existing group in [run_group.sh](./integration_tests/run_group.sh), or add a new group for it. + +3. If you add a new group, the name of the new group must be added to [CI](https://github.com/PingCAP-QE/ci/blob/main/pipelines/tikv/migration/latest/pull_integration_test.groovy). diff --git a/cdc/tests/README.md b/cdc/tests/README.md new file mode 100644 index 00000000..6a918b08 --- /dev/null +++ b/cdc/tests/README.md @@ -0,0 +1,77 @@ +## Preparations + +### Run integration tests locally + +Assume that you are in the root directory of source code (`github.com/tikv/migration/cdc`). + +1. The following executables must be copied or generated or linked into these locations + + * `scripts/bin/tidb-server` # version >= 6.2.0 + * `scripts/bin/tikv-server` # version >= 6.2.0 + * `scripts/bin/pd-server` # version >= 6.2.0 + * `scripts/bin/pd-ctl` # version >= 6.2.0 + * [scripts/bin/go-ycsb](https://github.com/pingcap/go-ycsb) + * [scripts/bin/etcdctl](https://github.com/etcd-io/etcd/tree/master/etcdctl) + * [scripts/bin/jq](https://stedolan.github.io/jq/) + + > If you are running tests on Linux x86-64, you can run `make prepare_test_binaries` to get all necessary binaries. + > + > If you are running tests on MacOS, tidb related binaries can be downloaded from tiup mirrors, such as https://tiup-mirrors.pingcap.com/tidb-v6.2.0-darwin-amd64.tar.gz. + +2. The user used to execute the tests must have permission to create the folder /tmp/tikv_cdc_test. All test artifacts + will be written into this folder. + +### Run integration tests in docker + +The following programs must be installed: + +* [docker](https://docs.docker.com/get-docker/) +* [docker-compose](https://docs.docker.com/compose/install/) + +We recommend that you provide docker with at least 6+ cores and 8G+ memory. Of course, the more resources, the better. + +## Running + +### Unit Test + +1. Unit test does not need any dependencies, just running `make unit_test` in root dir of source code, or `cd` into + directory of a test case and run single case via `GO111MODULE=on go test -check.f TestXXX`. + +### Integration Test + +#### Run integration tests locally + +1. Run `make integration_test` to execute the integration tests. This command will + + 1. Download all required executables. + 2. Check that all required executables exist. + 3. Generate `tikv-cdc` binaries for integration test. + 4. Execute `tests/integration_tests/run.sh` + + > If want to run one integration test case only, just pass the CASE parameter, e.g. `make integration_test CASE=autorandom`. + > + > There are some environment variables that you can set by yourself, see [test_prepare](./integration_tests/_utils/test_prepare). + +#### Run integration tests in docker + +1. Run `tests/up.sh`. This script will setup a container with some tools ready, and run `/bin/bash` interactively in the container. + +2. Run `make integration_test` or `make integration_test CASE=[test name]` to execute the integration tests. + +> **Warning:** +> These scripts and files may not work under the arm architecture, +> and we have not tested against it. + +Some useful tips: + +- You can specify multiple tests to run in CASE, for example: `CASE="cli cli_tls"`. You can even + use `CASE="*"` to indicate that you are running all tests。 + +## Writing new tests + +1. Write new integration tests as shell scripts in `tests/integration_tests/TEST_NAME/run.sh`. The script should +exit with a nonzero error code on failure. + +2. Add TEST_NAME to existing group in [run_group.sh](./integration_tests/run_group.sh), or add a new group for it. + +3. If you add a new group, the name of the new group must be added to [CI](https://github.com/PingCAP-QE/ci/blob/main/pipelines/tikv/migration/latest/pull_integration_test.groovy). diff --git a/cdc/tests/integration_tests/run_group.sh b/cdc/tests/integration_tests/run_group.sh new file mode 100755 index 00000000..24063b74 --- /dev/null +++ b/cdc/tests/integration_tests/run_group.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +set -eo pipefail + +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) + +group=$1 + +# Define groups +# Note: If new group is added, the group name must also be added to CI +# https://github.com/PingCAP-QE/ci/blob/main/pipelines/tikv/migration/latest/pull_integration_test.groovy +declare -A groups +groups=( + ["G00"]='autorandom' + ["G01"]='capture_session_done_during_task cdc_hang_on' + ["G02"]='changefeed_auto_stop changefeed_error changefeed_fast_fail' + ["G03"]='changefeed_finish changefeed_pause_resume changefeed_reconstruct' + ["G04"]='cli cli_tls http_api http_proxies' + ["G05"]='disk_full flow_control' + ["G06"]='gc_safepoint kill_owner' + ["G07"]='kv_client_stream_reconnect multi_capture' + ["G08"]='processor_err_chan processor_panic' + ["G09"]='processor_resolved_ts_fallback processor_stop_delay' + ["G10"]='sink_hang sink_tls' + ["G11"]='sorter stop_downstream' + ["G12"]='availability' # heavy test case +) + +# Get other cases not in groups, to avoid missing any case +others=() +for script in "$CUR"/*/run.sh; do + test_name="$(basename "$(dirname "$script")")" + # shellcheck disable=SC2076 + if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then + others=("${others[@]} ${test_name}") + fi +done + +# Get test names +test_names="" +# shellcheck disable=SC2076 +if [[ "$group" == "others" ]]; then + test_names="${others[*]}" +elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then + test_names="${groups[${group}]}" +else + echo "Error: invalid group name: ${group}" + exit 1 +fi + +# Run test cases +if [[ -n $test_names ]]; then + echo "Run cases: ${test_names}" + "${CUR}"/run.sh "${test_names}" +fi