From 31033d8b83d355d5c77565eac17ae283dbbc49c8 Mon Sep 17 00:00:00 2001 From: XieYunshen <1084314248@qq.com> Date: Tue, 16 Sep 2025 16:46:46 +0800 Subject: [PATCH 1/2] coverage merge --- .github/workflows/_ci_xpu.yml | 135 ++++++++++++++++++ .github/workflows/_coverage_combine.yml | 104 ++++++++++++++ .github/workflows/_unit_test_coverage.yml | 99 ++----------- .github/workflows/ci_xpu.yml | 88 ------------ .github/workflows/pr_build_and_test.yml | 106 ++++++++------ .../layers/attention/attention.py | 2 +- scripts/.coveragerc | 2 + scripts/codecov.yml | 5 + scripts/run_ci_xpu.sh | 13 +- 9 files changed, 329 insertions(+), 225 deletions(-) create mode 100644 .github/workflows/_ci_xpu.yml create mode 100644 .github/workflows/_coverage_combine.yml delete mode 100644 .github/workflows/ci_xpu.yml create mode 100644 scripts/codecov.yml diff --git a/.github/workflows/_ci_xpu.yml b/.github/workflows/_ci_xpu.yml new file mode 100644 index 0000000000..9bef1a60c6 --- /dev/null +++ b/.github/workflows/_ci_xpu.yml @@ -0,0 +1,135 @@ +name: CI_XPU + +on: + workflow_call: + inputs: + DOCKER_IMAGE: + description: "Build Images" + required: true + type: string + default: "ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/fastdeploy-xpu:2.1.0" + FASTDEPLOY_ARCHIVE_URL: + description: "URL of the compressed FastDeploy code archive." + required: true + type: string + outputs: + xpu_cov_file_url: + description: "Output path of the GPU tests" + value: ${{ jobs.CI_XPU.outputs.xpu_cov_file_url }} + +concurrency: + group: ${{ github.event.pull_request.number }}-xpu-ci + cancel-in-progress: true + +jobs: + CI_XPU: + runs-on: [self-hosted, XPU-P800-8Card] + outputs: + xpu_cov_file_url: ${{ steps.set_output.outputs.xpu_cov_file_url }} + steps: + - name: Print current runner name + run: | + echo "Current runner name: ${{ runner.name }}" + + - name: Code Checkout + env: + docker_image: ${{ inputs.DOCKER_IMAGE }} + fd_archive_url: ${{ inputs.FASTDEPLOY_ARCHIVE_URL }} + run: | + REPO="https://github.com/${{ github.repository }}.git" + FULL_REPO="${{ github.repository }}" + REPO_NAME="${FULL_REPO##*/}" + BASE_BRANCH="${{ github.base_ref }}" + # Clean the repository directory before starting + docker run --rm --net=host -v $(pwd):/workspace -w /workspace \ + -e "REPO_NAME=${REPO_NAME}" \ + -e "BASE_BRANCH=${BASE_BRANCH}" \ + -e "fd_archive_url=${fd_archive_url}" \ + ${docker_image} /bin/bash -c ' + if [ -d ${REPO_NAME} ]; then + echo "Directory ${REPO_NAME} exists, removing it..." + rm -rf ${REPO_NAME} + fi + wget -q ${fd_archive_url} + tar -xf FastDeploy.tar.gz + rm -rf FastDeploy.tar.gz + set -x + cd FastDeploy + git config --global --add safe.directory "$(pwd)" + git config --global user.name "FastDeployCI" + git config --global user.email "fastdeploy_ci@example.com" + git log -n 3 --oneline + ' + + + - name: Run CI unittest + id: set_output + env: + docker_image: ${{ inputs.DOCKER_IMAGE }} + IS_PR: ${{ github.event_name == 'pull_request' }} + run: | + runner_name="${{ runner.name }}" + last_char="${runner_name: -1}" + + if [[ "$last_char" =~ [0-3] ]]; then + gpu_id="$last_char" + else + gpu_id="0" + fi + FD_API_PORT=$((9180 + gpu_id * 100)) + FD_ENGINE_QUEUE_PORT=$((9150 + gpu_id * 100)) + FD_METRICS_PORT=$((9170 + gpu_id * 100)) + + commit_id=${{ github.event.pull_request.head.sha }} + pr_num=${{ github.event.pull_request.number }} + + PARENT_DIR=$(dirname "$WORKSPACE") + echo "PARENT_DIR:$PARENT_DIR" + docker run --rm --net=host --cap-add=SYS_PTRACE --privileged --shm-size=64G \ + -v $(pwd):/workspace -w /workspace \ + -v "/ssd3:/ssd3" \ + -e "MODEL_PATH=/ssd3/model" \ + -e "http_proxy=$(git config --global --get http.proxy)" \ + -e "https_proxy=$(git config --global --get https.proxy)" \ + -e "no_proxy=bcebos.com,mirrors.tuna.tsinghua.edu.cn,127.0.0.1,localhost" \ + -e "FD_API_PORT=${FD_API_PORT}" \ + -e "FD_ENGINE_QUEUE_PORT=${FD_ENGINE_QUEUE_PORT}" \ + -e "FD_METRICS_PORT=${FD_METRICS_PORT}" \ + -e "IS_PR=${IS_PR}" \ + -e "commit_id=${commit_id}" \ + -e "pr_num=${pr_num}" \ + ${docker_image} /bin/bash -c ' + git config --global --add safe.directory /workspace/FastDeploy + chown -R $(whoami) /workspace/FastDeploy + cd FastDeploy + python -m pip install coverage + export COVERAGE_FILE=/workspace/FastDeploy/coveragedata/.coverage.xpu + export COVERAGE_RCFILE=/workspace/FastDeploy/scripts/.coveragerc + TEST_EXIT_CODE=0 + bash scripts/run_ci_xpu.sh || TEST_EXIT_CODE=8 + echo "TEST_EXIT_CODE=${TEST_EXIT_CODE}" >> exit_code.env + cat exit_code.env + coverage combine coveragedata/ || echo "No data to combine" + tar -cvf xpu_coverage.tar -C coveragedata . + coverage report + # coverage data upload + target_path=paddle-github-action/PR/FastDeploy/${pr_num}/${commit_id}/XPU + wget -q --no-proxy --no-check-certificate https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddlePaddle/PaddleTest/tools/bos_tools.py -O bos_tools.py + push_file=$(realpath bos_tools.py) + python -m pip install bce-python-sdk==0.9.29 + cov_file="xpu_coverage.tar" + if [ -f ${cov_file} ];then + python ${push_file} ${cov_file} ${target_path}/CoverageData + target_path_stripped="${target_path#paddle-github-action/}" + XPU_COV_FILE_URL=https://paddle-github-action.bj.bcebos.com/${target_path_stripped}/CoverageData/${cov_file} + echo "xpu_cov_file_url=${XPU_COV_FILE_URL}" >> github.output + fi + ' + if [ -f FastDeploy/github.output ];then + cat FastDeploy/github.output >> $GITHUB_OUTPUT + fi + if [ -f FastDeploy/exit_code.env ]; then + cat FastDeploy/exit_code.env >> $GITHUB_ENV + source FastDeploy/exit_code.env + fi + exit "$TEST_EXIT_CODE" diff --git a/.github/workflows/_coverage_combine.yml b/.github/workflows/_coverage_combine.yml new file mode 100644 index 0000000000..4c01b794d6 --- /dev/null +++ b/.github/workflows/_coverage_combine.yml @@ -0,0 +1,104 @@ +name: Coverage Combine +description: "Coverage Combine And Check" + +on: + workflow_call: + inputs: + GPU_COV_FILE_URL: + description: "URL of the compressed GPU Coverage Data archive." + required: true + type: string + XPU_COV_FILE_URL: + description: "URL of the compressed GPU Coverage Data archive." + required: true + type: string + secrets: + github-token: + required: true + + +jobs: + coverage_combine: + name: Coverage Combine And Check + env: + gpu_cov_file_url: ${{ inputs.GPU_COV_FILE_URL }} + xpu_cov_file_url: ${{ inputs.XPU_COV_FILE_URL }} + IS_PR: ${{ github.event_name == 'pull_request' }} + runs-on: ubuntu-latest + steps: + - name: Clone FastDeploy + uses: actions/checkout@v4 + with: + ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }} + submodules: recursive + fetch-depth: 0 + - name: Fetch base branch + if: ${{ github.event_name == 'pull_request' }} + run: | + git fetch origin ${{ github.event.pull_request.base.ref }} --depth=1000 + MERGE_BASE=$(git merge-base origin/${{ github.event.pull_request.base.ref }} ${{ github.event.pull_request.head.sha }}) + git diff ${MERGE_BASE} ${{ github.event.pull_request.head.sha }} --unified=0 > diff.txt + - name: Python Setup + uses: actions/setup-python@v5 + with: + python-version: '3.10' + - name: coverage file download and combine + shell: bash + env: + BASE_REF: ${{ github.event.pull_request.base.ref }} + run: | + git log -n 3 + python -m pip install coverage diff-cover + mkdir coveragedata + if [ -z "${gpu_cov_file_url}" ]; then + echo "No diff coverage file URL provided." + else + wget -q ${gpu_cov_file_url} + gpu_cov_file=$(basename "$gpu_cov_file_url") + tar -xf ${gpu_cov_file} -C coveragedata + fi + + if [ -z "${xpu_cov_file_url}" ]; then + echo "No diff coverage file URL provided." + else + wget -q ${xpu_cov_file_url} + xpu_cov_file=$(basename "$xpu_cov_file_url") + tar -xf ${xpu_cov_file} -C coveragedata + fi + export COVERAGE_FILE=coveragedata/.coverage + export COVERAGE_RCFILE=./scripts/.coveragerc + export COVERAGE_IGNORE_ERRORS=True + coverage combine coveragedata/ + coverage report --ignore-errors + coverage xml -o python_coverage_all.xml --ignore-errors + COVERAGE_EXIT_CODE=0 + set -x + if [[ "$IS_PR" == "true" ]]; then + diff-cover python_coverage_all.xml --diff-file=diff.txt --fail-under=80 --json-report diff_coverage.json || COVERAGE_EXIT_CODE=9 + python scripts/generate_diff_coverage_xml.py diff.txt python_coverage_all.xml + filename=diff_coverage.json + else + echo "Not a PR, skipping diff-cover" + fi + + if [[ -f diff_coverage.json ]]; then + echo "====== Diff Coverage JSON ======" + if command -v jq >/dev/null 2>&1; then + jq . diff_coverage.json + else + cat diff_coverage.json + fi + echo "================================" + fi + + exit "$COVERAGE_EXIT_CODE" + - name: Upload diff coverage report + if: always() && hashFiles('diff_coverage.xml') != '' + uses: codecov/codecov-action@v5 + with: + files: ./diff_coverage.xml + codecov_yml_path: ./scripts/codecov.yml + disable_search: true + name: python diff coverage + verbose: true + flags: diff diff --git a/.github/workflows/_unit_test_coverage.yml b/.github/workflows/_unit_test_coverage.yml index 09da48351e..6486f3bcd3 100644 --- a/.github/workflows/_unit_test_coverage.yml +++ b/.github/workflows/_unit_test_coverage.yml @@ -27,6 +27,10 @@ on: required: false type: string default: "" + outputs: + gpu_cov_file_url: + description: "Output path of the GPU tests" + value: ${{ jobs.run_tests_with_coverage.outputs.gpu_cov_file_url }} secrets: github-token: required: true @@ -45,9 +49,8 @@ jobs: needs: check_cov_skip if: needs.check_cov_skip.outputs.can-skip != 'true' outputs: - diff_cov_file_url: ${{ steps.cov_upload.outputs.diff_cov_file_url }} + gpu_cov_file_url: ${{ steps.cov_upload.outputs.gpu_cov_file_url }} unittest_failed_url: ${{ steps.cov_upload.outputs.unittest_failed_url }} - diff_cov_result_json_url: ${{ steps.cov_upload.outputs.diff_cov_result_json_url }} steps: - name: Code Prepare shell: bash @@ -184,22 +187,14 @@ jobs: else echo "Warning: tests/plugins directory not found, skipping setup.py install" fi - export COVERAGE_FILE=/workspace/FastDeploy/coveragedata/.coverage + export COVERAGE_FILE=/workspace/FastDeploy/coveragedata/.coverage.gpu export COVERAGE_RCFILE=/workspace/FastDeploy/scripts/.coveragerc TEST_EXIT_CODE=0 bash scripts/coverage_run.sh || TEST_EXIT_CODE=8 echo "TEST_EXIT_CODE=${TEST_EXIT_CODE}" >> exit_code.env coverage combine coveragedata/ || echo "No data to combine" + tar -cvf gpu_coverage.tar -C coveragedata . coverage report - coverage xml -o python_coverage_all.xml - COVERAGE_EXIT_CODE=0 - if [[ "$IS_PR" == "true" ]]; then - diff-cover python_coverage_all.xml --diff-file=diff.txt --fail-under=80 --json-report diff_coverage.json || COVERAGE_EXIT_CODE=9 - python scripts/generate_diff_coverage_xml.py diff.txt python_coverage_all.xml - else - echo "Not a PR, skipping diff-cover" - fi - echo "COVERAGE_EXIT_CODE=${COVERAGE_EXIT_CODE}" >> exit_code.env ' if [ -f FastDeploy/exit_code.env ]; then cat FastDeploy/exit_code.env >> $GITHUB_ENV @@ -211,26 +206,18 @@ jobs: cd FastDeploy commit_id=${{ github.event.pull_request.head.sha }} pr_num=${{ github.event.pull_request.number }} - target_path=paddle-github-action/PR/FastDeploy/${pr_num}/${commit_id}/SM${compile_arch//,/_} + target_path=paddle-github-action/PR/FastDeploy/${pr_num}/${commit_id}/GPU/ wget -q --no-proxy --no-check-certificate https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddlePaddle/PaddleTest/tools/bos_tools.py -O bos_tools.py push_file=$(realpath bos_tools.py) python -m pip install bce-python-sdk==0.9.29 - diff_cov_file="diff_coverage.xml" - if [ -f ${diff_cov_file} ];then - python ${push_file} ${diff_cov_file} ${target_path}/CoverageData + cov_file="gpu_coverage.tar" + if [ -f ${cov_file} ];then + python ${push_file} ${cov_file} ${target_path}/CoverageData target_path_stripped="${target_path#paddle-github-action/}" - DIFF_COV_FILE_URL=https://paddle-github-action.bj.bcebos.com/${target_path_stripped}/CoverageData/${diff_cov_file} - echo "diff_cov_file_url=${DIFF_COV_FILE_URL}" >> $GITHUB_OUTPUT - echo "diff_cov_file_url=${DIFF_COV_FILE_URL}" >> $GITHUB_ENV - fi - diff_cov_result_json="diff_coverage.json" - if [ -f ${diff_cov_result_json} ];then - python ${push_file} ${diff_cov_result_json} ${target_path}/CoverageData - target_path_stripped="${target_path#paddle-github-action/}" - DIFF_COV_JSON_URL=https://paddle-github-action.bj.bcebos.com/${target_path_stripped}/CoverageData/${diff_cov_result_json} - echo "diff_cov_result_json_url=${DIFF_COV_JSON_URL}" >> $GITHUB_OUTPUT - echo "diff_cov_result_json_url=${DIFF_COV_JSON_URL}" >> $GITHUB_ENV + GPU_COV_FILE_URL=https://paddle-github-action.bj.bcebos.com/${target_path_stripped}/CoverageData/${cov_file} + echo "gpu_cov_file_url=${GPU_COV_FILE_URL}" >> $GITHUB_OUTPUT fi + unittest_result="failed_tests.log" if [ -s ${unittest_result} ];then python ${push_file} ${unittest_result} ${target_path}/UnitTestResult @@ -259,61 +246,3 @@ jobs: exit "$TEST_EXIT_CODE" fi echo "All tests passed" - - - name: Verify Code Coverage Threshold (80%) - if: ${{ github.event_name == 'pull_request' }} - shell: bash - run: | - cd FastDeploy - if [ "$COVERAGE_EXIT_CODE" -eq 9 ]; then - echo "Coverage generation failed (exit code 9)" - filename=$(basename "$diff_cov_result_json_url") - if [ -z "${diff_cov_result_json_url}" ]; then - echo "No diff cov result file URL provided." - else - rm -rf "${filename}" - wget -O ${filename} ${diff_cov_result_json_url} || echo "Download cov json file failed, but continuing..." - fi - if [ -f "${filename}" ];then - echo "Failed test cases:" - if command -v jq >/dev/null 2>&1; then - jq . "${filename}" - else - cat "${filename}" - fi - fi - exit "$COVERAGE_EXIT_CODE" - fi - echo "coverage passed" - exit 0 - - diff_coverage_report: - needs: run_tests_with_coverage - if: always() - runs-on: ubuntu-latest - env: - fd_archive_url: ${{ inputs.FASTDEPLOY_ARCHIVE_URL }} - steps: - - name: coverage diff file download - shell: bash - env: - diff_cov_file_url: ${{ needs.run_tests_with_coverage.outputs.diff_cov_file_url }} - run: | - wget ${fd_archive_url} - tar -xf FastDeploy.tar.gz - cd FastDeploy - if [ -z "${diff_cov_file_url}" ]; then - echo "No diff coverage file URL provided." - exit 0 - fi - wget "${diff_cov_file_url}" -O ./diff_coverage.xml || echo "Download cov file failed, but continuing..." - - name: Upload diff coverage report - if: ${{ needs.run_tests_with_coverage.outputs.diff_cov_file_url != null && needs.run_tests_with_coverage.outputs.diff_cov_file_url != '' }} - uses: codecov/codecov-action@v5 - with: - files: ./FastDeploy/diff_coverage.xml - name: python diff coverage - verbose: true - disable_search: true - commit_parent: false - flags: diff diff --git a/.github/workflows/ci_xpu.yml b/.github/workflows/ci_xpu.yml deleted file mode 100644 index f99ca7d172..0000000000 --- a/.github/workflows/ci_xpu.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: CI_XPU - -on: - pull_request: - branches: - - develop - - 'release/*' - workflow_dispatch: - -concurrency: - group: ${{ github.event.pull_request.number }}-xpu-ci - cancel-in-progress: true - -jobs: - CI_XPU: - runs-on: [self-hosted, XPU-P800-8Card] - steps: - - name: Print current runner name - run: | - echo "Current runner name: ${{ runner.name }}" - # Because the system version is lower than 2.23, the checkout cannot be used. - # - name: Checkout code - # uses: actions/checkout@v4 - - - name: Code Checkout - env: - docker_image: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/fastdeploy-xpu:2.1.0 - run: | - REPO="https://github.com/${{ github.repository }}.git" - FULL_REPO="${{ github.repository }}" - REPO_NAME="${FULL_REPO##*/}" - BASE_BRANCH="${{ github.base_ref }}" - # Clean the repository directory before starting - docker run --rm --net=host -v $(pwd):/workspace -w /workspace \ - -e "REPO_NAME=${REPO_NAME}" \ - -e "BASE_BRANCH=${BASE_BRANCH}" \ - ${docker_image} /bin/bash -c ' - if [ -d ${REPO_NAME} ]; then - echo "Directory ${REPO_NAME} exists, removing it..." - rm -rf ${REPO_NAME} - fi - ' - git config --global user.name "FastDeployCI" - git config --global user.email "fastdeploy_ci@example.com" - git clone ${REPO} ${REPO_NAME} -b ${BASE_BRANCH} - cd FastDeploy - if [ "${{ github.event_name }}" = "pull_request" ]; then - git fetch origin pull/${{ github.event.pull_request.number }}/head:pr/${{ github.event.pull_request.number }} - git merge pr/${{ github.event.pull_request.number }} - git log -n 3 --oneline - else - git checkout ${{ github.sha }} - git log -n 3 --oneline - fi - - - name: Run CI unittest - env: - docker_image: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/fastdeploy-xpu:2.1.0 - run: | - runner_name="${{ runner.name }}" - last_char="${runner_name: -1}" - - if [[ "$last_char" =~ [0-3] ]]; then - gpu_id="$last_char" - else - gpu_id="0" - fi - FD_API_PORT=$((9180 + gpu_id * 100)) - FD_ENGINE_QUEUE_PORT=$((9150 + gpu_id * 100)) - FD_METRICS_PORT=$((9170 + gpu_id * 100)) - - PARENT_DIR=$(dirname "$WORKSPACE") - echo "PARENT_DIR:$PARENT_DIR" - docker run --rm --net=host --cap-add=SYS_PTRACE --privileged --shm-size=64G \ - -v $(pwd):/workspace -w /workspace \ - -v "/ssd3:/ssd3" \ - -e "MODEL_PATH=/ssd3/model" \ - -e "http_proxy=$(git config --global --get http.proxy)" \ - -e "https_proxy=$(git config --global --get https.proxy)" \ - -e "no_proxy=bcebos.com,mirrors.tuna.tsinghua.edu.cn,127.0.0.1,localhost" \ - -e "FD_API_PORT=${FD_API_PORT}" \ - -e "FD_ENGINE_QUEUE_PORT=${FD_ENGINE_QUEUE_PORT}" \ - -e "FD_METRICS_PORT=${FD_METRICS_PORT}" \ - ${docker_image} /bin/bash -c " - git config --global --add safe.directory /workspace/FastDeploy - cd FastDeploy - bash scripts/run_ci_xpu.sh - " diff --git a/.github/workflows/pr_build_and_test.yml b/.github/workflows/pr_build_and_test.yml index 23eb2fefa8..759d415a43 100644 --- a/.github/workflows/pr_build_and_test.yml +++ b/.github/workflows/pr_build_and_test.yml @@ -34,8 +34,8 @@ jobs: run: | echo "The built wheel is located at: ${{ needs.build.outputs.wheel_path }}" - unittest_coverage: - name: Run FastDeploy Unit Tests and Coverage + gpu_unittest: + name: Run FastDeploy Unit Tests with Coverage needs: [clone,build] uses: ./.github/workflows/_unit_test_coverage.yml with: @@ -46,52 +46,70 @@ jobs: secrets: github-token: ${{ secrets.GITHUB_TOKEN }} - logprob_test: - name: Run FastDeploy LogProb Tests - needs: [build] - uses: ./.github/workflows/_logprob_test_linux.yml - with: - DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate - PADDLETEST_ARCHIVE_URL: "https://xly-devops.bj.bcebos.com/PaddleTest/PaddleTest.tar.gz" - FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} - MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + # logprob_test: + # name: Run FastDeploy LogProb Tests + # needs: [build] + # uses: ./.github/workflows/_logprob_test_linux.yml + # with: + # DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + # PADDLETEST_ARCHIVE_URL: "https://xly-devops.bj.bcebos.com/PaddleTest/PaddleTest.tar.gz" + # FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} + # MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" - pre_ce_test: - name: Extracted partial CE model tasks to run in CI. - needs: [clone,build] - uses: ./.github/workflows/_pre_ce_test.yml - with: - DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate - FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} - FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} - MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + # pre_ce_test: + # name: Extracted partial CE model tasks to run in CI. + # needs: [clone,build] + # uses: ./.github/workflows/_pre_ce_test.yml + # with: + # DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + # FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} + # FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} + # MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" - base_test: - name: Run Base Tests - needs: [clone,build] - uses: ./.github/workflows/_base_test.yml - with: - DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate - FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} - FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} - MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + # base_test: + # name: Run Base Tests + # needs: [clone,build] + # uses: ./.github/workflows/_base_test.yml + # with: + # DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + # FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} + # FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} + # MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" - accuracy_test: - name: Run Accuracy Tests - needs: [clone,build] - uses: ./.github/workflows/_accuracy_test.yml + # accuracy_test: + # name: Run Accuracy Tests + # needs: [clone,build] + # uses: ./.github/workflows/_accuracy_test.yml + # with: + # DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + # FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} + # FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} + # MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + + # stable_test: + # name: Run Stable Tests + # needs: [clone,build] + # uses: ./.github/workflows/_stable_test.yml + # with: + # DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + # FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} + # FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} + # MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + + xpu_unittest: + name: Run XPU Test with Coverage + needs: [clone] + uses: ./.github/workflows/_ci_xpu.yml with: - DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate + DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/fastdeploy-xpu-ci:latest FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} - FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} - MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" - stable_test: - name: Run Stable Tests - needs: [clone,build] - uses: ./.github/workflows/_stable_test.yml + coverage_combine: + name: Coverage Combine And Check + uses: ./.github/workflows/_coverage_combine.yml + needs: [clone, gpu_unittest, xpu_usnittest] with: - DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:fastdeploy-ciuse-cuda126-dailyupdate - FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }} - FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }} - MODEL_CACHE_DIR: "/ssd2/actions-runner/ModelData" + GPU_COV_FILE_URL: ${{ needs.gpu_unittest.outputs.gpu_cov_file_url }} + XPU_COV_FILE_URL: ${{ needs.xpu_unittest.outputs.xpu_cov_file_url }} + secrets: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/fastdeploy/model_executor/layers/attention/attention.py b/fastdeploy/model_executor/layers/attention/attention.py index 4c33528683..ad3768ceee 100644 --- a/fastdeploy/model_executor/layers/attention/attention.py +++ b/fastdeploy/model_executor/layers/attention/attention.py @@ -101,7 +101,7 @@ def __init__( self.out_scale: float = out_scale self.use_neox_rotary_style: bool = use_neox_rotary_style - + print("cov report debug!") if fd_config.quant_config and hasattr(fd_config.quant_config, "kv_cache_quant_type"): self.quant_method: QuantMethodBase = fd_config.quant_config.get_quant_method(self) else: diff --git a/scripts/.coveragerc b/scripts/.coveragerc index d3760bf799..f38b88ef31 100644 --- a/scripts/.coveragerc +++ b/scripts/.coveragerc @@ -18,3 +18,5 @@ omit = */site-packages/setuptools/* */dist-packages/* */site-packages/*/fastdeploy/model_executor/ops/gpu* + +ignore_errors = True diff --git a/scripts/codecov.yml b/scripts/codecov.yml new file mode 100644 index 0000000000..29ffcf6b61 --- /dev/null +++ b/scripts/codecov.yml @@ -0,0 +1,5 @@ +codecov: + require_ci_to_pass: false + +fixes: + - "/workspace/FastDeploy::/home/runner/work/FastDeploy/FastDeploy/FastDeploy" diff --git a/scripts/run_ci_xpu.sh b/scripts/run_ci_xpu.sh index 597fa34802..7810a433b9 100644 --- a/scripts/run_ci_xpu.sh +++ b/scripts/run_ci_xpu.sh @@ -1,7 +1,7 @@ #!/bin/bash DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" echo "$DIR" - +export COVERAGE_RCFILE=${COVERAGE_RCFILE:-$DIR/../scripts/.coveragerc} #安装lsof工具 apt install -y lsof @@ -29,7 +29,7 @@ export CLANG_PATH=$(pwd)/custom_ops/xpu_ops/third_party/xtdk wget https://klx-sdk-release-public.su.bcebos.com/xinfer/daily/eb/20250921/output.tar.gz --no-proxy && tar xf output.tar.gz && mv output xvllm export XVLLM_PATH=${PWD}/xvllm bash build.sh || exit 1 - +export PYTHONPATH=./:${PYTHONPATH} echo "pip others" python -m pip install openai -U python -m pip uninstall -y triton @@ -49,8 +49,7 @@ ipcrm --all=msg echo "============================开始V0模式测试!============================" export ENABLE_V1_KVCACHE_SCHEDULER=0 export XPU_VISIBLE_DEVICES="0,1,2,3,4,5,6,7" - -python -m fastdeploy.entrypoints.openai.api_server \ +python -m coverage run -m fastdeploy.entrypoints.openai.api_server \ --model ${model_path} \ --port 8188 \ --tensor-parallel-size 8 \ @@ -95,7 +94,7 @@ done cat server.log # 执行服务化推理 -python -m pytest tests/ci_use/XPU_45T/run_45T.py +python -m coverage run tests/ci_use/XPU_45T/run_45T.py exit_code=$? echo exit_code is ${exit_code} @@ -122,7 +121,7 @@ ipcrm --all=msg echo "============================开始V1模式测试!============================" export ENABLE_V1_KVCACHE_SCHEDULER=1 export XPU_VISIBLE_DEVICES="0,1,2,3,4,5,6,7" -python -m fastdeploy.entrypoints.openai.api_server \ +python -m coverage run -m fastdeploy.entrypoints.openai.api_server \ --model ${model_path} \ --port 8188 \ --tensor-parallel-size 8 \ @@ -164,7 +163,7 @@ done cat server.log # 执行服务化推理 -python -m pytest tests/ci_use/XPU_45T/run_45T.py +python -m coverage run tests/ci_use/XPU_45T/run_45T.py kv_block_test_exit_code=$? echo kv_block_test_exit_code is ${kv_block_test_exit_code} From 2e56128a30e023d18b2774a2de94f34b056fbd7c Mon Sep 17 00:00:00 2001 From: XieYunshen <1084314248@qq.com> Date: Fri, 26 Sep 2025 14:23:44 +0800 Subject: [PATCH 2/2] update --- .github/workflows/pr_build_and_test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr_build_and_test.yml b/.github/workflows/pr_build_and_test.yml index 759d415a43..16e0945e7e 100644 --- a/.github/workflows/pr_build_and_test.yml +++ b/.github/workflows/pr_build_and_test.yml @@ -107,7 +107,7 @@ jobs: coverage_combine: name: Coverage Combine And Check uses: ./.github/workflows/_coverage_combine.yml - needs: [clone, gpu_unittest, xpu_usnittest] + needs: [clone, gpu_unittest, xpu_unittest] with: GPU_COV_FILE_URL: ${{ needs.gpu_unittest.outputs.gpu_cov_file_url }} XPU_COV_FILE_URL: ${{ needs.xpu_unittest.outputs.xpu_cov_file_url }}