diff --git a/.github/problem-matchers/compiler-non-source.json b/.github/problem-matchers/compiler-non-source.json new file mode 100644 index 000000000000..5a21a7dcbb59 --- /dev/null +++ b/.github/problem-matchers/compiler-non-source.json @@ -0,0 +1,17 @@ +{ + "problemMatcher": [ + { + "__comment_owner": "match compiler warning/error lines not from source", + "owner": "compiler-non-source", + "pattern": [ + { + "__comment_regexp1": "clang: warning: argument unused during compilation: '-march=armv7-a' [-Wunused-command-line-argument]", + "__comment_regexp2": "ld.lld: warning: lld uses blx instruction, no object with architecture supporting feature detected", + "regexp": "^(?:[^:]+): (?:fatal\\s+)?(warning|error):\\s+(.*)$", + "severity": 1, + "message": 2 + } + ] + } + ] +} diff --git a/.github/problem-matchers/compiler-source.json b/.github/problem-matchers/compiler-source.json new file mode 100644 index 000000000000..e719f671e8dc --- /dev/null +++ b/.github/problem-matchers/compiler-source.json @@ -0,0 +1,17 @@ +{ + "problemMatcher": [ + { + "owner": "gcc-problem-matcher", + "pattern": [ + { + "regexp": "^(?:/linux/)?(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$", + "file": 1, + "line": 2, + "column": 3, + "severity": 4, + "message": 5 + } + ] + } + ] +} diff --git a/.github/problem-matchers/sparse.json b/.github/problem-matchers/sparse.json new file mode 100644 index 000000000000..83c626e5448e --- /dev/null +++ b/.github/problem-matchers/sparse.json @@ -0,0 +1,17 @@ +{ + "problemMatcher": [ + { + "owner": "powerpc-sparse", + "pattern": [ + { + "regexp": "^\\+(?:/linux/)?(.*):(\\d+|XX):(\\d+|XX):\\s+(error|warning):\\s+(.*)$", + "file": 1, + "line": 2, + "column": 3, + "severity": 4, + "message": 5 + } + ] + } + ] +} diff --git a/.github/workflows/powerpc-allconfig.yml b/.github/workflows/powerpc-allconfig.yml new file mode 100644 index 000000000000..ef1f2b682308 --- /dev/null +++ b/.github/workflows/powerpc-allconfig.yml @@ -0,0 +1,79 @@ +name: powerpc/allconfig + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to merge-test only + branches: + - 'merge-test' + +jobs: + kernel: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + include: + # ppc64le allmod and allyes + - subarch: ppc64le + defconfig: ppc64le_allmodconfig + image: fedora-39 + - subarch: ppc64le + defconfig: allyesconfig + image: fedora-39 + merge_config: /linux/arch/powerpc/configs/le.config + # ppc64 allmod and allyes + - subarch: ppc64 + defconfig: allmodconfig + image: fedora-39 + - subarch: ppc64 + defconfig: allyesconfig + image: fedora-39 + # ppc32 allmod + - subarch: ppc + defconfig: ppc32_allmodconfig + image: fedora-39 + # ppc64 book3e allmod + # Broken due to head_check.sh + #- subarch: ppc64 + # defconfig: ppc64_book3e_allmodconfig + # image: fedora-39 + + env: + ARCH: powerpc + TARGET: kernel + CCACHE: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + DEFCONFIG: ${{ matrix.defconfig }} + MERGE_CONFIG: /linux/arch/powerpc/configs/disable-werror.config,${{ matrix.merge_config }} + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh diff --git a/.github/workflows/powerpc-clang.yml b/.github/workflows/powerpc-clang.yml new file mode 100644 index 000000000000..0f0abfd0de2c --- /dev/null +++ b/.github/workflows/powerpc-clang.yml @@ -0,0 +1,84 @@ +name: powerpc/clang + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch + branches: + - '**' + # As long as one of these paths matches + paths: + - '!tools/**' # ignore tools + - '!samples/**' # ignore samples + - '!Documentation/**' # ignore Documentation + - '!arch/**' # ignore arch changes + - 'arch/powerpc/**' # but not arch/powerpc + - 'arch/Kconfig' # or common bits in arch + - '**' # anything else triggers a build + +jobs: + kernel: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + defconfig: [ppc64, corenet64_smp, pmac32, ppc40x, mpc885_ads] + image: [fedora-39] + subarch: [ppc64] + include: + - subarch: ppc64le + defconfig: ppc64le + image: fedora-39 + + env: + CLANG: 1 + LLVM_IAS: 0 + ARCH: powerpc + TARGET: kernel + CCACHE: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + DEFCONFIG: ${{ matrix.defconfig }} + MERGE_CONFIG: /linux/arch/powerpc/configs/disable-werror.config + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.defconfig }}-${{ matrix.image }} + path: | + ~/output/vmlinux + ~/output/.config + ~/output/System.map + ~/output/modules.tar.bz2 + ~/output/arch/powerpc/boot/zImage + ~/output/arch/powerpc/boot/uImage diff --git a/.github/workflows/powerpc-extrawarn.yml b/.github/workflows/powerpc-extrawarn.yml new file mode 100644 index 000000000000..d048b1af775b --- /dev/null +++ b/.github/workflows/powerpc-extrawarn.yml @@ -0,0 +1,71 @@ +name: powerpc/extrawarn + +# Controls when the action will run. +on: + # Only when triggered manually via the github UI. + workflow_dispatch: + +jobs: + kernel: + runs-on: ubuntu-latest + + strategy: + matrix: + defconfig: [ppc64, corenet64_smp, pmac32, ppc40x, ppc44x, mpc885_ads, corenet32_smp] + image: [fedora-39, korg-5.5.0] + subarch: [ppc64] + include: + - subarch: ppc64le + defconfig: ppc64le + image: korg-5.5.0 + - subarch: ppc64le + defconfig: ppc64le + image: fedora-39 + + env: + ARCH: powerpc + TARGET: kernel + CCACHE: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + DEFCONFIG: ${{ matrix.defconfig }} + MERGE_CONFIG: /linux/arch/powerpc/configs/disable-werror.config + KBUILD_EXTRA_WARN: 1 + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.defconfig }}-${{ matrix.image }} + path: | + ~/output/vmlinux + ~/output/.config + ~/output/System.map + ~/output/modules.tar.bz2 + ~/output/arch/powerpc/boot/zImage + ~/output/arch/powerpc/boot/uImage diff --git a/.github/workflows/powerpc-kernel+qemu.yml b/.github/workflows/powerpc-kernel+qemu.yml new file mode 100644 index 000000000000..6fe971bce495 --- /dev/null +++ b/.github/workflows/powerpc-kernel+qemu.yml @@ -0,0 +1,219 @@ +name: powerpc/kernel+qemu + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch + branches: + - '**' + # As long as one of these paths matches + paths: + - '!tools/**' # ignore tools + - '!samples/**' # ignore samples + - '!Documentation/**' # ignore Documentation + - '!arch/**' # ignore arch changes + - 'arch/powerpc/**' # but not arch/powerpc + - 'arch/Kconfig' # or common bits in arch + - '**' # anything else triggers a build + +jobs: + kernel: + runs-on: ubuntu-latest + + strategy: + matrix: + defconfig: [ppc64_defconfig, ppc40x_defconfig, mpc885_ads_defconfig] + image: [fedora-39, korg-5.5.0] + include: + # ppc64le_guest_defconfig + - subarch: ppc64le + defconfig: ppc64le_guest_defconfig + image: fedora-39 + - subarch: ppc64le + defconfig: ppc64le_guest_defconfig + image: korg-5.5.0 + + # ppc44x + - defconfig: ppc44x_defconfig + merge_config: /linux/arch/powerpc/configs/ppc44x-qemu.config + image: fedora-39 + - defconfig: ppc44x_defconfig + merge_config: /linux/arch/powerpc/configs/ppc44x-qemu.config + image: korg-5.5.0 + + # corenet64_smp + - defconfig: corenet64_smp_defconfig + image: fedora-39 + - defconfig: corenet64_smp_defconfig + image: korg-5.5.0 + + # g5 + - defconfig: g5_defconfig + merge_config: /linux/arch/powerpc/configs/g5-qemu.config + image: fedora-39 + - defconfig: g5_defconfig + merge_config: /linux/arch/powerpc/configs/g5-qemu.config + image: korg-5.5.0 + + # pmac32 + - defconfig: pmac32_defconfig + merge_config: /linux/arch/powerpc/configs/pmac32-qemu.config + image: fedora-39 + - defconfig: pmac32_defconfig + merge_config: /linux/arch/powerpc/configs/pmac32-qemu.config + image: korg-5.5.0 + + - defconfig: corenet32_smp_defconfig + image: fedora-39 + + env: + ARCH: powerpc + TARGET: kernel + CCACHE: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + DEFCONFIG: ${{ matrix.defconfig }} + MERGE_CONFIG: ${{ matrix.merge_config }} + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.defconfig }}-${{ matrix.image }} + path: | + ~/output/vmlinux + ~/output/.config + ~/output/System.map + ~/output/modules.tar.bz2 + ~/output/arch/powerpc/boot/zImage + ~/output/arch/powerpc/boot/uImage + ~/output/include/config/kernel.release + + boot: + runs-on: ubuntu-latest + needs: kernel + + strategy: + matrix: + include: + - defconfig: ppc64le_guest_defconfig + machine: pseries+p8+tcg + machine_2: pseries+p9+tcg + packages: qemu-system-ppc64 + rootfs: ppc64le-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + - defconfig: ppc64le_guest_defconfig + machine: powernv+p8+tcg + machine_2: powernv+p9+tcg + packages: qemu-system-ppc64 + rootfs: ppc64le-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + - defconfig: ppc44x_defconfig + machine: 44x + packages: qemu-system-ppc + rootfs: ppc-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + - defconfig: corenet64_smp_defconfig + machine: ppc64e + machine_2: ppc64e+compat + packages: qemu-system-ppc64 + rootfs: ppc64-novsx-rootfs.cpio.gz ppc-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + - defconfig: g5_defconfig + machine: g5 + packages: qemu-system-ppc64 openbios-ppc + rootfs: ppc64-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + - defconfig: pmac32_defconfig + machine: mac99 + packages: qemu-system-ppc openbios-ppc + rootfs: ppc-rootfs.cpio.gz + old-image: korg-5.5.0 + new-image: fedora-39 + + steps: + - uses: actions/checkout@v3 + with: + repository: linuxppc/ci-scripts + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Download root disk + run: make -C root-disks ${{ matrix.rootfs }} + + - name: Set root disk path + run: echo "ROOT_DISK_PATH=$PWD/root-disks" >> $GITHUB_ENV + + - name: APT update + run: sudo apt update + + - name: Install dependencies + run: sudo apt install -y ${{ matrix.packages }} python3-pexpect python3-termcolor python3-yaml + + - uses: actions/download-artifact@v3 + with: + name: ${{ matrix.defconfig }}-${{ matrix.new-image }} + + - name: Disable network tests + run: echo "QEMU_NET_TESTS=0" >> $GITHUB_ENV + + - name: Run qemu-${{ matrix.machine }} with ${{ matrix.new-image }} build kernel + run: ./scripts/boot/qemu-${{ matrix.machine }} + + - name: Run qemu-${{ matrix.machine_2 }} with ${{ matrix.new-image }} build kernel + run: ./scripts/boot/qemu-${{ matrix.machine_2 }} + if: matrix.machine_2 != '' + + - uses: actions/download-artifact@v3 + with: + name: ${{ matrix.defconfig }}-${{ matrix.old-image }} + + - name: Run qemu-${{ matrix.machine }} with ${{ matrix.old-image }} build kernel + run: ./scripts/boot/qemu-${{ matrix.machine }} + + - name: Run qemu-${{ matrix.machine_2 }} with ${{ matrix.old-image }} build kernel + run: ./scripts/boot/qemu-${{ matrix.machine_2 }} + if: matrix.machine_2 != '' diff --git a/.github/workflows/powerpc-perf.yml b/.github/workflows/powerpc-perf.yml new file mode 100644 index 000000000000..e25f6270f5ed --- /dev/null +++ b/.github/workflows/powerpc-perf.yml @@ -0,0 +1,71 @@ +name: powerpc/perf + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch + branches: + - '**' + # As long as one of these paths matches + paths: + # Only build if perf or other pieces it uses have been modified + - 'tools/perf/**' + - 'tools/arch/**' + - 'tools/build/**' + - 'tools/include/**' + - 'tools/lib/**' + - 'tools/scripts/**' + # Change to workflow triggers a build + - '.github/workflows/powerpc-perf.yml' + +jobs: + perf: + runs-on: ubuntu-latest + + strategy: + matrix: + image: [ubuntu-22.04, ubuntu-20.04, ubuntu-18.04] + subarch: [ppc64, ppc64le] + + env: + ARCH: powerpc + TARGET: perf + CCACHE: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.subarch }}-${{ matrix.image }} + path: | + ~/output diff --git a/.github/workflows/powerpc-ppctests.yml b/.github/workflows/powerpc-ppctests.yml new file mode 100644 index 000000000000..9caa8fdddfc2 --- /dev/null +++ b/.github/workflows/powerpc-ppctests.yml @@ -0,0 +1,69 @@ +name: powerpc/ppctests + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch + branches: + - '**' + # As long as one of these paths matches + paths: + # Generic selftests changes might affect us so match all of selftests + - 'tools/testing/selftests/**' + # Some files in arch are symlinked by selftests + - 'arch/powerpc/**' + # Change to workflow triggers a build + - '.github/workflows/powerpc-ppctests.yml' + +jobs: + ppctests: + runs-on: ubuntu-latest + + strategy: + matrix: + image: [ubuntu-22.04, ubuntu-20.04, ubuntu-18.04, ubuntu-16.04] + subarch: [ppc64, ppc64le] + + env: + ARCH: powerpc + TARGET: ppctests + CCACHE: 1 + INSTALL: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.subarch }}-${{ matrix.image }} + path: | + ~/output/install diff --git a/.github/workflows/powerpc-selftests.yml b/.github/workflows/powerpc-selftests.yml new file mode 100644 index 000000000000..64c1b4e7b7cf --- /dev/null +++ b/.github/workflows/powerpc-selftests.yml @@ -0,0 +1,69 @@ +name: powerpc/selftests + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch + branches: + - '**' + # As long as one of these paths matches + paths: + # Generic selftests changes might affect us so match all of selftests + - 'tools/testing/selftests/**' + # Some files in arch are symlinked by selftests + - 'arch/powerpc/**' + # Change to workflow triggers a build + - '.github/workflows/powerpc-selftests.yml' + +jobs: + selftests: + runs-on: ubuntu-latest + + strategy: + matrix: + image: [ubuntu-22.04, ubuntu-20.04, ubuntu-18.04, ubuntu-16.04] + subarch: [ppc64, ppc64le] + + env: + ARCH: powerpc + TARGET: selftests + CCACHE: 1 + INSTALL: 1 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + + steps: + - uses: actions/checkout@v3 + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.subarch }}-${{ matrix.image }} + path: | + ~/output/install diff --git a/.github/workflows/powerpc-sparse.yml b/.github/workflows/powerpc-sparse.yml new file mode 100644 index 000000000000..1946e7099223 --- /dev/null +++ b/.github/workflows/powerpc-sparse.yml @@ -0,0 +1,103 @@ +name: powerpc/sparse + +# Controls when the action will run. +on: + # This allows the build to be triggered manually via the github UI. + workflow_dispatch: + + push: + # This triggers the build on a push to any branch named ci/powerpc/ + branches: + - '**' + # As long as one of these paths matches + paths: + - '!tools/**' # ignore tools + - '!samples/**' # ignore samples + - '!Documentation/**' # ignore Documentation + - '!arch/**' # ignore arch changes + - 'arch/powerpc/**' # but not arch/powerpc + - 'arch/Kconfig' # or common bits in arch + - '**' # anything else triggers a build + +jobs: + sparse: + runs-on: ubuntu-latest + + strategy: + matrix: + defconfig: [ppc64, pmac32, mpc885_ads_defconfig] + image: [fedora-39] + subarch: [ppc64] + include: + - subarch: ppc64le + defconfig: ppc64le + image: fedora-39 + + env: + ARCH: powerpc + TARGET: kernel + CCACHE: 1 + SPARSE: 2 + SUBARCH: ${{ matrix.subarch }} + IMAGE: ${{ matrix.image }} + DEFCONFIG: ${{ matrix.defconfig }} + + steps: + - name: Checkout + uses: actions/checkout@v3 + + # NB this is before the build on merge, so all errors are detected + # On other branches we ignore these and only report sparse diff results below + - name: Register problem matchers + if: github.ref_name == 'merge' + run: | + echo "::add-matcher::.github/problem-matchers/compiler-source.json" + echo "::add-matcher::.github/problem-matchers/compiler-non-source.json" + + - name: Load ccache + uses: actions/cache@v3 + with: + path: ~/.ccache + key: ${{ matrix.image }}-${{ matrix.subarch }}-${{ matrix.defconfig }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build + run: | + mkdir -p ~/.ccache + ./arch/powerpc/tools/ci-build.sh + + - name: Get sparse results from base tree + if: github.ref_name != 'merge' + continue-on-error: true + uses: dawidd6/action-download-artifact@v2 + with: + workflow: powerpc-sparse.yml + workflow_conclusion: success + branch: merge # Requires the merge branch to be built once before this will work + name: sparse-${{ matrix.defconfig }}-${{ matrix.image }}.log + + # NB this is after the build on non-merge, so only errors from the sparse diff are detected + - name: Register problem matcher + if: github.ref_name != 'merge' + run: | + echo "::add-matcher::.github/problem-matchers/sparse.json" + + - name: Compare sparse results with base + if: github.ref_name != 'merge' + run: | + bash -c 'if [ ! -f sparse.log ]; then cp ~/output/sparse.log .; fi' + ./arch/powerpc/tools/smart-sparse-diff.py sparse.log ~/output/sparse.log | tee -a ~/output/sparse-diff.log + + - name: Archive artifacts + uses: actions/upload-artifact@v3 + with: + name: sparse-${{ matrix.defconfig }}-${{ matrix.image }}.log + path: | + ~/output/sparse.log + ~/output/sparse-diff.log diff --git a/.gitignore b/.gitignore index c59dc60ba62e..3da6bed3ead8 100644 --- a/.gitignore +++ b/.gitignore @@ -95,6 +95,7 @@ modules.order # # We don't want to ignore the following even if they are dot-files # +!.github !.clang-format !.cocciconfig !.editorconfig diff --git a/arch/powerpc/configs/disable-werror.config b/arch/powerpc/configs/disable-werror.config index 7776b91da37f..ca1019e1e722 100644 --- a/arch/powerpc/configs/disable-werror.config +++ b/arch/powerpc/configs/disable-werror.config @@ -1,2 +1,3 @@ # Help: Disable -Werror CONFIG_PPC_DISABLE_WERROR=y +CONFIG_WERROR=n diff --git a/arch/powerpc/configs/g5-qemu.config b/arch/powerpc/configs/g5-qemu.config new file mode 100644 index 000000000000..3ae1c8a8bc61 --- /dev/null +++ b/arch/powerpc/configs/g5-qemu.config @@ -0,0 +1,2 @@ +CONFIG_SERIAL_PMACZILOG=y +CONFIG_SERIAL_PMACZILOG_CONSOLE=y diff --git a/arch/powerpc/configs/pmac32-qemu.config b/arch/powerpc/configs/pmac32-qemu.config new file mode 100644 index 000000000000..81d3a24b2169 --- /dev/null +++ b/arch/powerpc/configs/pmac32-qemu.config @@ -0,0 +1,5 @@ +CONFIG_SERIAL_PMACZILOG=y +CONFIG_SERIAL_PMACZILOG_CONSOLE=y +CONFIG_DEVTMPFS=y +CONFIG_DEVTMPFS_MOUNT=y +CONFIG_DEBUG_ATOMIC_SLEEP=y diff --git a/arch/powerpc/configs/ppc44x-qemu.config b/arch/powerpc/configs/ppc44x-qemu.config new file mode 100644 index 000000000000..5e9cf983acc7 --- /dev/null +++ b/arch/powerpc/configs/ppc44x-qemu.config @@ -0,0 +1,2 @@ +CONFIG_DEVTMPFS=y +CONFIG_DEVTMPFS_MOUNT=y diff --git a/arch/powerpc/tools/ci-build.sh b/arch/powerpc/tools/ci-build.sh new file mode 100755 index 000000000000..1b3f6a391b4f --- /dev/null +++ b/arch/powerpc/tools/ci-build.sh @@ -0,0 +1,84 @@ +#!/bin/bash + +if [[ -z "$TARGET" || -z "$IMAGE" ]]; then + echo "Error: required environment variables not set!" + exit 1 +fi + +cmd="docker run --rm " +cmd+="--network none " +cmd+="-w /linux " + +linux_dir=$(realpath $(dirname $0))/../../../ +cmd+="-v $linux_dir:/linux:ro " + +cmd+="-e ARCH " +cmd+="-e JFACTOR=$(nproc) " +cmd+="-e KBUILD_BUILD_TIMESTAMP=$(date +%Y-%m-%d) " +cmd+="-e CLANG " +cmd+="-e LLVM_IAS " +cmd+="-e SPARSE " + +if [[ -n "$MODULES" ]]; then + cmd+="-e MODULES=$MODULES " +fi + +if [[ -n "$DEFCONFIG" ]]; then + if [[ $DEFCONFIG != *config ]]; then + DEFCONFIG=${DEFCONFIG}_defconfig + fi + + cmd+="-e DEFCONFIG=${DEFCONFIG} " +fi + +if [[ -n "$MERGE_CONFIG" ]]; then + cmd+="-e MERGE_CONFIG=$MERGE_CONFIG " +fi + +if [[ "$SUBARCH" == "ppc64le" ]]; then + cross="powerpc64le-linux-gnu-" +else + cross="powerpc-linux-gnu-" +fi +cmd+="-e CROSS_COMPILE=$cross " + +mkdir -p $HOME/output +cmd+="-v $HOME/output:/output:rw " + +user=$(stat -c "%u:%g" $HOME/output) +cmd+="-u $user " + +if [[ -n "$CCACHE" ]]; then + cmd+="-v $HOME/.ccache:/ccache:rw " + cmd+="-e CCACHE_DIR=/ccache " + cmd+="-e CCACHE=1 " +fi + +if [[ -n "$TARGETS" ]]; then + cmd+="-e TARGETS=$TARGETS " +fi + +if [[ -n "$INSTALL" ]]; then + cmd+="-e INSTALL=$INSTALL " +fi + +if [[ "$TARGET" == "kernel" ]]; then + cmd+="-e QUIET=1 " +fi + +if [[ -n $KBUILD_EXTRA_WARN ]]; then + cmd+="-e KBUILD_EXTRA_WARN=$KBUILD_EXTRA_WARN " +fi + +cmd+="ghcr.io/linuxppc/build:$IMAGE-$(uname -m) " +cmd+="/bin/container-build.sh $TARGET" + +(set -x; $cmd) + +rc=$? + +if [[ -n "$SPARSE" ]]; then + cat $HOME/output/sparse.log +fi + +exit $rc diff --git a/arch/powerpc/tools/smart-sparse-diff.py b/arch/powerpc/tools/smart-sparse-diff.py new file mode 100755 index 000000000000..9991fde5a5a0 --- /dev/null +++ b/arch/powerpc/tools/smart-sparse-diff.py @@ -0,0 +1,314 @@ +#!/usr/bin/python3 +import sys +from typing import Dict, List, Tuple, Any + +verbose = False +def vprint(*args, **kwargs): + if verbose: + print(*args, **kwargs) + +def deinterleave_by_file(log: str) -> Dict[str, List[List[str]]]: + # zeroeth pass: things get interleaved with multiprocess compilation + # so deinterleave it first + lines_by_file = {} # type: Dict[str, List[List[str]]] + for line in log.split("\n"): + parts = line.split(":") + + filename = parts[0] + + if filename not in lines_by_file: + lines_by_file[filename] = [] + + lines_by_file[filename] += [parts] + + return lines_by_file + +def concat_multi_line_warnings(split_lines: List[List[str]]) -> List[List[str]]: + # first pass: concatenate irritating things like: + #drivers/scsi/lpfc/lpfc_scsi.c:5606:30: warning: incorrect type in assignment (different base types) + #drivers/scsi/lpfc/lpfc_scsi.c:5606:30: expected int [signed] memory_flags + #drivers/scsi/lpfc/lpfc_scsi.c:5606:30: got restricted gfp_t + lines = [] # type: List[List[str]] + last_column = "" + last_line = "" + for parts in split_lines: + + if len(parts) < 4: + # this doesn't have enough parts to be a 'real' line. + # store it, don't attempt to process it now + # hopefully it will be removed in deduplication + lines += [parts] + continue + + (linenum, columnnum) = parts[1:3] + final_mandatory_part = parts[3].strip() + final_parts = ":".join(parts[3:]).strip() + + #vprint(line) + if (linenum != last_line) or \ + (last_column != columnnum): + # this is a different line and column, it cannot be a concatenation + lines += [parts] + #vprint("different f/l/c") + elif (final_mandatory_part == "warning") or \ + (final_mandatory_part == "error"): + # this has an explicit type: it is a new message + lines += [parts] + #vprint("explicit type") + else: + # looks like this is a continuation + last_line_parts = lines[-1] + last_line_parts[-1] += " " + final_parts + lines[-1] = last_line_parts + #vprint("concat: new last: " + str(lines[-1])) + + last_line = linenum + last_column = columnnum + + return lines + +def parse_log_by_file(log: str) -> Dict[str, List[List[str]]]: + + lines_by_file = deinterleave_by_file(log) + + concat_lines_by_file = {} + for filename in lines_by_file: + concat_lines_by_file[filename] = concat_multi_line_warnings(lines_by_file[filename]) + + return concat_lines_by_file + +def smart_filter(a: List[Any], + b: List[Any]) -> List[Any]: + res = [] # type: List[Any] + # two reasons we'd want to keep a line: + # it does not appear in the other at all + # it appears an unequal number of times (think headers) + # (to manage this in the report, only include it where it appears + # more times) + for l in a: + if l not in b: + res += [l] + else: + if len([ll for ll in a if ll == l]) > \ + len([ll for ll in b if ll == l]): + + # save only once + if l not in res: + res += [l] + return res + +def remove_exact_matching_lines(old_lines: List[List[str]], + new_lines: List[List[str]]) \ + -> Tuple[List[List[str]], List[List[str]]]: + + new_old = smart_filter(old_lines, new_lines) + new_new = smart_filter(new_lines, old_lines) + + if new_old == []: + new_old = None + + if new_new == []: + new_new = None + + return (new_old, new_new) + +def remove_lines_diff_by_only_line_no(old_lines, new_lines): + + # drop weird short lines + safe_old_lines = [] + for parts in old_lines: + if len(parts) < 4: + # this doesn't have enough parts to be a 'real' line. warn and proceed. + print('Found odd line "%s" in old file, ignoring.' % ':'.join(parts)) + else: + safe_old_lines += [parts] + safe_new_lines = [] + for parts in new_lines: + if len(parts) < 4: + # this doesn't have enough parts to be a 'real' line. warn and proceed. + print('Found odd line "%s" in new file, ignoring.' % ':'.join(parts)) + else: + safe_new_lines += [parts] + + old_wo_line = [":".join([l[0]] + l[2:]) for l in safe_old_lines] + new_wo_line = [":".join([l[0]] + l[2:]) for l in safe_new_lines] + + new_old = smart_filter(old_wo_line, new_wo_line) + new_new = smart_filter(new_wo_line, old_wo_line) + + old_parts = [l.split(':') for l in new_old] + new_parts = [l.split(':') for l in new_new] + + old_parts = [[l[0], 'XX'] + l[1:] for l in old_parts] + new_parts = [[l[0], 'XX'] + l[1:] for l in new_parts] + + if old_parts == []: + old_parts = None + if new_parts == []: + new_parts = None + + return (old_parts, new_parts) + +def format_one_warning(parts: List[str]) -> str: + return ":".join(parts) + + +def smart_diff(old_log: str, new_log: str + ) -> Tuple[List[str], List[str]]: + old_by_file = parse_log_by_file(old_log) + new_by_file = parse_log_by_file(new_log) + + # todo - this structure is helpful for progressive development and + # debugging, but is not very efficient + + # we now have 2x { filename: [list of warnings] } + # go to 1x { filename: (old warnings, new warnings) } + combined_warnings = {} + for filename in set(old_by_file.keys()) | set(new_by_file.keys()): + olds = None + if filename in old_by_file: + olds = old_by_file[filename] + + news = None + if filename in new_by_file: + news = new_by_file[filename] + + combined_warnings[filename] = (olds, news) + + only_new = {} + only_old = {} + # lets winnow out our lists a bit + changed = {} + for filename in combined_warnings: + (olds, news) = combined_warnings[filename] + + if news and not olds: + only_new[filename] = (olds, news) + elif olds and not news: + only_old[filename] = (olds, news) + elif not olds and not news: + print("Something weird going on with: " + filename) + else: + changed[filename] = (olds, news) + + vprint("After parsing:") + vprint("Only new warnings: " + str(len(only_new.keys()))) + vprint("Only old warnings: " + str(len(only_old.keys()))) + vprint("Changed: " + str(len(changed.keys()))) + + + # remove entire duplicated files + changed_1 = {} + for filename in changed: + (olds, news) = changed[filename] + if olds == news: + vprint("exact complete match drops: " + filename) + else: + changed_1[filename] = (olds, news) + + vprint("After removing exact file matches:") + vprint("Only new warnings: " + str(len(only_new.keys()))) + vprint("Only old warnings: " + str(len(only_old.keys()))) + vprint("Changed: " + str(len(changed_1.keys()))) + + # now, lets just try removing exact matching lines + changed_2 = {} + for filename in changed_1: + (olds, news) = changed_1[filename] + (olds, news) = remove_exact_matching_lines(olds, news) + if not olds and not news: + vprint("remove_exact_matching_lines completely matched: " + filename) + elif olds and not news: + only_old[filename] = (olds, news) + elif not olds and news: + only_new[filename] = (olds, news) + else: + changed_2[filename] = (olds, news) + + vprint("After removing exact line matches:") + vprint("Only new warnings: " + str(len(only_new.keys()))) + vprint("Only old warnings: " + str(len(only_old.keys()))) + vprint("Changed: " + str(len(changed_2.keys()))) + + # now, lets just try removing lines w/ matching column, diff line + changed_3 = {} + for filename in changed_2: + (olds, news) = remove_lines_diff_by_only_line_no(*changed_2[filename]) + if olds and news: + changed_3[filename] = (olds, news) + elif olds and not news: + only_old[filename] = (olds, news) + elif not olds and news: + only_new[filename] = (olds, news) + else: + vprint("diff by only line no removed: " + filename) + + vprint("After removing warnings differing in line number only (same column, message):") + vprint("Only new warnings: " + str(len(only_new.keys()))) + vprint("Only old warnings: " + str(len(only_old.keys()))) + vprint("Changed: " + str(len(changed_3.keys()))) + + #fn = list(changed_3.keys())[0] + #ch = changed_3[fn] + + # now lets format data for return + # I assume consumers (so far, just pretty-printing) is pretty unconcerned with + # getting the messages split up by file name. So let's flatten our dictionaries + # note that this doesn't flatten them properly yet - we get a list where each + # item represents a file, and each item is a list of warnings, and each warning + # is a list of parts. + removed_msgs = [only_old[fn][0] for fn in only_old] + added_msgs = [only_new[fn][1] for fn in only_new] + + # also, the whole concept of 'changed' - files with changed messages - + # is pretty unique to our analysis, so just flatten them out too + removed_msgs += [changed_3[fn][0] for fn in changed_3] + added_msgs += [changed_3[fn][1] for fn in changed_3] + + # lastly, rejoin on ":", flattening out the lists as we go. + removed_warns = [] # type: List[str] + for sublist in removed_msgs: + for msg in sublist: + removed_warns += [format_one_warning(msg)] + added_warns = [] # type: List[str] + for sublist in added_msgs: + for msg in sublist: + added_warns += [format_one_warning(msg)] + + return (removed_warns, added_warns) + + +def usage(exec_name: str) -> None: + print("Usage: %s " % exec_name) + print(" attempt a smart diff between sparse logs in oldfile and newfile") + exit(1) + +if __name__ == '__main__': + if len(sys.argv) != 3: + usage(sys.argv[0]) + + try: + with open(sys.argv[1], 'r') as old_file: + old_log = old_file.read() + except: + print("Error reading old log file %s" % old_file) + exit(1) + + try: + with open(sys.argv[2], 'r') as new_file: + new_log = new_file.read() + except: + print("Error reading new log file %s" % new_file) + exit(1) + + (removed, added) = smart_diff(old_log, new_log) + + lines = [] # type: List[str] + lines += ['-' + w for w in removed] + lines += ['+' + w for w in added] + + # sort by message, not including +/- + lines.sort(key=lambda x: x[1:]) + for l in lines: + print(l) +