From 60492f328374f286e5bdac60a37abed2db5b108b Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Thu, 12 Oct 2023 15:40:18 +0200 Subject: [PATCH 1/2] gh: Backport actions to 25 --- .github/actions/build-base-image/action.yaml | 65 ++++ .github/dockerfiles/Dockerfile.32-bit | 2 +- .github/dockerfiles/Dockerfile.64-bit | 41 +- .github/dockerfiles/Dockerfile.clang | 4 +- .github/dockerfiles/Dockerfile.cross-compile | 2 +- .github/dockerfiles/Dockerfile.ubuntu-base | 99 +++-- .github/dockerfiles/init.sh | 5 +- .github/scripts/build-base-image.sh | 18 +- .github/scripts/build-macos.sh | 21 +- .github/scripts/get-pr-number.es | 35 +- .github/scripts/init-pre-release.sh | 31 +- .github/scripts/restore-from-prebuilt.sh | 162 ++++++++ .github/scripts/sync-github-prs.es | 97 ++++- .github/scripts/sync-github-releases.sh | 61 +-- .github/workflows/actions-updater.yaml | 29 ++ .github/workflows/add-to-project.yaml | 27 ++ .github/workflows/main.yaml | 387 +++++++++---------- .github/workflows/pr-comment.yaml | 39 +- .github/workflows/sync-github-prs.yaml | 45 +++ .github/workflows/sync-github-releases.yaml | 37 +- .github/workflows/update-base.yaml | 11 +- 21 files changed, 849 insertions(+), 369 deletions(-) create mode 100644 .github/actions/build-base-image/action.yaml create mode 100755 .github/scripts/restore-from-prebuilt.sh create mode 100644 .github/workflows/actions-updater.yaml create mode 100644 .github/workflows/add-to-project.yaml create mode 100644 .github/workflows/sync-github-prs.yaml diff --git a/.github/actions/build-base-image/action.yaml b/.github/actions/build-base-image/action.yaml new file mode 100644 index 000000000000..de9731c5de9a --- /dev/null +++ b/.github/actions/build-base-image/action.yaml @@ -0,0 +1,65 @@ +name: Build Base Image +description: 'Builds the base testing image (using cache if possible)' + +inputs: + BASE_BRANCH: + required: true + TYPE: + default: '64-bit' + BUILD_IMAGE: + default: true + github_token: + description: 'GITHUB_TOKEN' + default: '${{ github.token }}' + +runs: + using: composite + steps: + - name: Cleanup GH Runner + shell: bash -euxo pipefail {0} + run: | + ## Delete large files from runner to get more disk space + ## See https://github.com/actions/runner-images/issues/2840 + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + + - name: Cache BASE image + uses: actions/cache@v3 + if: inputs.TYPE == '64-bit' || inputs.TYPE == 'clang' + with: + path: otp_docker_base.tar + key: ${{ runner.os }}-${{ hashFiles('.github/dockerfiles/Dockerfile.ubuntu-base', '.github/scripts/build-base-image.sh') }}-${{ hashFiles('OTP_VERSION') }} + + - name: Docker login + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ inputs.github_token }} + + - name: Build BASE image + shell: bash + run: .github/scripts/build-base-image.sh "${{ inputs.BASE_BRANCH }}" "${{ inputs.TYPE }}" + + - name: Cache pre-built tar archives + if: inputs.BUILD_IMAGE == 'true' + uses: actions/cache@v3 + with: + path: | + otp_src.tar.gz + otp_cache.tar.gz + key: prebuilt-${{ github.job }}-${{ github.ref_name }}-${{ github.sha }} + restore-keys: | + prebuilt-${{ github.ref_name }}-${{ github.sha }} + - name: Build image + if: inputs.BUILD_IMAGE == 'true' + shell: bash -euxo pipefail {0} + run: | + .github/scripts/restore-from-prebuilt.sh `pwd` .github/otp.tar.gz + rm -f otp_{src,cache}.tar.gz + docker build --tag otp \ + --build-arg MAKEFLAGS=-j$(($(nproc) + 2)) \ + --file ".github/dockerfiles/Dockerfile.64-bit" \ + .github/ diff --git a/.github/dockerfiles/Dockerfile.32-bit b/.github/dockerfiles/Dockerfile.32-bit index f1ea96d5fbd9..b1df0e996cba 100644 --- a/.github/dockerfiles/Dockerfile.32-bit +++ b/.github/dockerfiles/Dockerfile.32-bit @@ -1,4 +1,4 @@ -ARG BASE=docker.pkg.github.com/erlang/otp/i386-debian-base +ARG BASE=ghcr.io/erlang/otp/i386-debian-base FROM $BASE ARG MAKEFLAGS=-j4 diff --git a/.github/dockerfiles/Dockerfile.64-bit b/.github/dockerfiles/Dockerfile.64-bit index 25aadc5e84e0..1cdb8cfd687c 100644 --- a/.github/dockerfiles/Dockerfile.64-bit +++ b/.github/dockerfiles/Dockerfile.64-bit @@ -1,10 +1,10 @@ -ARG BASE=docker.pkg.github.com/erlang/otp/ubuntu-base +ARG BASE=ghcr.io/erlang/otp/ubuntu-base FROM $BASE ARG MAKEFLAGS=$MAKEFLAGS ENV MAKEFLAGS=$MAKEFLAGS \ ERL_TOP=/buildroot/otp \ - PATH=/otp/bin:/buildroot/otp/bin:$PATH + PATH="/Erlang ∅⊤℞/bin":/buildroot/otp/bin:$PATH ARG ARCHIVE=./otp.tar.gz COPY $ARCHIVE /buildroot/otp.tar.gz @@ -14,35 +14,22 @@ WORKDIR /buildroot/otp/ ENV CFLAGS="-O2 -g -Werror" -## Configure, check that no application are disabled and then make -RUN ./configure --prefix=/otp && \ - if cat lib/*/CONF_INFO || cat lib/*/SKIP || cat lib/SKIP-APPLICATIONS; then exit 1; fi && \ - make && sudo make install +## Configure (if not cached), check that no application are disabled and then make +RUN if [ ! -f Makefile ]; then \ + touch README.md && \ + ./configure --prefix="/Erlang ∅⊤℞" && \ + if cat lib/*/CONF_INFO || cat lib/*/SKIP || cat lib/SKIP-APPLICATIONS; then exit 1; fi && \ + find . -type f -newer README.md | xargs tar --transform 's:^./:otp/:' -cf ../otp_cache.tar; \ + fi && \ + make && make docs DOC_TARGETS=chunks && \ + sudo make install install-docs DOC_TARGETS=chunks ## Disable -Werror as testcases do not compile with it on ENV CFLAGS="-O2 -g" -WORKDIR /buildroot/ - -## Install test tools rebar3, proper and jsx -RUN latest () { \ - local VSN=$(curl -sL "https://api.github.com/repos/$1/tags" | jq -r ".[] | .name" | grep -E '^v?[0-9]' | sort -V | tail -1); \ - curl -sL "https://github.com/$1/archive/$VSN.tar.gz" > $(basename $1).tar.gz; \ - } && \ - latest erlang/rebar3 && ls -la && \ - (tar xzf rebar3.tar.gz && cd rebar3-* && ./bootstrap && sudo cp rebar3 /usr/bin) && \ - latest proper-testing/proper && \ - (tar xzf proper.tar.gz && mv proper-* proper && cd proper && make) && \ - latest talentdeficit/jsx && \ - (tar xzf jsx.tar.gz && mv jsx-* jsx && cd jsx && rebar3 compile) - -ENV ERL_LIBS=/buildroot/proper:/buildroot/jsx - -WORKDIR /buildroot/otp/ - ## Update init.sh with correct env vars RUN echo "export MAKEFLAGS=$MAKEFLAGS" > /buildroot/env.sh && \ echo "export ERLC_USE_SERVER=$ERLC_USE_SERVER" >> /buildroot/env.sh && \ - echo "export ERL_TOP=$ERL_TOP" >> /buildroot/env.sh && \ - echo "export PATH=$PATH" >> /buildroot/env.sh && \ - echo "export ERL_LIBS=$ERL_LIBS" >> /buildroot/env.sh + echo "export ERL_TOP=\"$ERL_TOP\"" >> /buildroot/env.sh && \ + echo "export PATH=\"$PATH\"" >> /buildroot/env.sh && \ + echo "export ERL_LIBS=\"$ERL_LIBS\"" >> /buildroot/env.sh diff --git a/.github/dockerfiles/Dockerfile.clang b/.github/dockerfiles/Dockerfile.clang index 92607dd6bb67..2fe7f110d354 100644 --- a/.github/dockerfiles/Dockerfile.clang +++ b/.github/dockerfiles/Dockerfile.clang @@ -1,4 +1,4 @@ -ARG BASE=docker.pkg.github.com/erlang/otp/ubuntu-base +ARG BASE=ghcr.io/erlang/otp/ubuntu-base FROM $BASE ## We do a SSA lint check here ENV ERL_COMPILER_OPTIONS=ssalint @@ -9,7 +9,7 @@ ENV MAKEFLAGS=$MAKEFLAGS \ ERL_TOP=/buildroot/otp \ PATH=/otp/bin:/buildroot/otp/bin:$PATH -RUN sudo apt-get install -y clang +RUN sudo apt-get update && sudo apt-get install -y clang ARG ARCHIVE=./otp.tar.gz COPY $ARCHIVE /buildroot/otp.tar.gz diff --git a/.github/dockerfiles/Dockerfile.cross-compile b/.github/dockerfiles/Dockerfile.cross-compile index 75045f2c76ba..c9e9c4485538 100644 --- a/.github/dockerfiles/Dockerfile.cross-compile +++ b/.github/dockerfiles/Dockerfile.cross-compile @@ -1,7 +1,7 @@ ## ## This docker file will build Erlang on 32-bit to 64-bit x86 ## -ARG BASE=docker.pkg.github.com/erlang/otp/i386-debian-base +ARG BASE=ghcr.io/erlang/otp/i386-debian-base FROM $BASE as build ARG MAKEFLAGS=-j4 diff --git a/.github/dockerfiles/Dockerfile.ubuntu-base b/.github/dockerfiles/Dockerfile.ubuntu-base index c19537ef2a79..aa61012b4d39 100644 --- a/.github/dockerfiles/Dockerfile.ubuntu-base +++ b/.github/dockerfiles/Dockerfile.ubuntu-base @@ -6,47 +6,22 @@ FROM $BASE ENV INSTALL_LIBS="zlib1g-dev libncurses5-dev libssl-dev unixodbc-dev libsctp-dev lksctp-tools libgmp3-dev libwxbase3.0-dev libwxgtk3.0-gtk3-dev libwxgtk-webview3.0-gtk3-dev" -ARG EXTRA_LIBS="erlang erlang-doc" - USER root ENV DEBIAN_FRONTEND=noninteractive ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 +## Install build tools RUN apt-get update && apt-get -y upgrade && \ apt-get install -y build-essential m4 autoconf fop xsltproc \ - default-jdk libxml2-utils flex pkg-config \ - unixodbc odbc-postgresql postgresql \ - tzdata ssh openssh-server groff-base sudo gdb tinyproxy bind9 nsd expect vsftpd python \ - linux-tools-common linux-tools-generic linux-tools-`uname -r` curl jq \ - xvfb libgl1-mesa-dri \ - ${INSTALL_LIBS} && \ - for lib in ${EXTRA_LIBS}; do apt-get install -y ${lib}; done && \ - if [ ! -f /etc/apache2/apache2.conf ]; then apt-get install -y apache2; fi && \ + default-jdk libxml2-utils flex pkg-config locales tzdata sudo ${INSTALL_LIBS} && \ sed -i 's@# en_US.UTF-8@en_US.UTF-8@g' /etc/locale.gen && locale-gen && \ update-alternatives --set wx-config /usr/lib/x86_64-linux-gnu/wx/config/gtk3-unicode-3.0 -## EXTRA_LIBS are installed using a for loop because of bugs in the erlang-doc deb package -## Apache2 may already be installed, if so we do not want to install it again - ARG MAKEFLAGS=-j4 ENV MAKEFLAGS=$MAKEFLAGS \ ERLC_USE_SERVER=yes -## We install the latest version of the previous three releases in order to do -## backwards compatability testing of the Erlang distribution. -RUN apt-get install -y git && \ - curl -L https://raw.githubusercontent.com/kerl/kerl/master/kerl > /usr/bin/kerl && \ - chmod +x /usr/bin/kerl && \ - kerl update releases && \ - LATEST=$(kerl list releases | tail -1 | awk -F '.' '{print $1}') && \ - for release in $(seq $(( LATEST - 3 )) $(( LATEST - 1 ))); do \ - VSN=$(kerl list releases | grep "^$release" | tail -1); \ - kerl build ${VSN} ${VSN} && \ - kerl install ${VSN} /usr/local/lib/erlang-${VSN}; \ - done && \ - rm -rf ~/.kerl - ENV LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 ARG USER=gitpod @@ -56,11 +31,57 @@ ARG uid=421 RUN echo "Europe/Stockholm" > /etc/timezone && \ ln -snf /usr/share/zoneinfo/$(cat /etc/timezone) /etc/localtime && \ + if ! grep ":${gid}:$" /etc/group; then groupadd -g ${gid} localgroup; fi && \ if [ ! -d /home/${USER} ]; then useradd -rm -d /home/${USER} -s /bin/sh -g ${gid} -G ${gid},sudo -u ${uid} ${USER}; fi && \ echo "${USER} ALL=(ALL:ALL) NOPASSWD: ALL" > /etc/sudoers.d/${USER} && \ echo "/buildroot/** r," >> /etc/apparmor.d/local/usr.sbin.named && \ echo "/tests/** r," >> /etc/apparmor.d/local/usr.sbin.named +## Java and log4j are used by fop to build documentation +COPY --chown=${USER}:${GROUP} dockerfiles/log4j.properties /home/${USER}/ +ENV JAVA_ARGS="-Dlog4j.configuration=file://home/${USER}/log4j.properties" + +ENV OTP_STRICT_INSTALL=yes + +RUN mkdir /buildroot /tests /otp && chown ${USER}:${GROUP} /buildroot /tests /otp + +## We install the latest version of the previous three releases in order to do +## backwards compatability testing of Erlang. +RUN apt-get install -y git curl && \ + curl -L https://raw.githubusercontent.com/kerl/kerl/master/kerl > /usr/bin/kerl && \ + chmod +x /usr/bin/kerl && \ + kerl update releases && \ + LATEST=$(kerl list releases | tail -1 | awk -F '.' '{print $1}') && \ + for release in $(seq $(( LATEST - 2 )) $(( LATEST ))); do \ + VSN=$(kerl list releases | grep "^$release" | tail -1); \ + if [ $release = $LATEST ]; then \ + echo "/usr/local/lib/erlang-${VSN}/bin" > /home/${USER}/LATEST; \ + fi && \ + kerl build ${VSN} ${VSN} && \ + kerl install ${VSN} /usr/local/lib/erlang-${VSN}; \ + done && \ + rm -rf ~/.kerl + +## Install test tools +## EXTRA_LIBS are installed using a for loop because of bugs in the erlang-doc deb package +## Apache2 may already be installed, if so we do not want to install it again +ARG EXTRA_LIBS="erlang erlang-doc" +RUN apt-get install -y \ + unixodbc odbc-postgresql postgresql ssh openssh-server groff-base gdb \ + tinyproxy knot ldnsutils expect vsftpd python emacs nano vim \ + linux-tools-common linux-tools-generic jq \ + xvfb libgl1-mesa-dri && \ + for lib in ${EXTRA_LIBS}; do apt-get install -y ${lib}; done && \ + if [ ! -f /etc/apache2/apache2.conf ]; then apt-get install -y apache2; fi +RUN apt-get install -y linux-tools-$(uname -r) || true + +## We use tmux to test terminals +RUN apt-get install -y libevent-dev libutf8proc-dev && \ + cd /tmp && wget https://github.com/tmux/tmux/releases/download/3.2a/tmux-3.2a.tar.gz && \ + tar xvzf tmux-3.2a.tar.gz && cd tmux-3.2a && \ + ./configure --enable-static --enable-utf8proc && \ + make && make install + ## Setup progres so that the odbc test can run USER postgres @@ -79,15 +100,23 @@ ENV USER=${USER} RUN ssh-keygen -q -t rsa -N '' -f $HOME/.ssh/id_rsa && \ cp $HOME/.ssh/id_rsa.pub $HOME/.ssh/authorized_keys -## Java and log4j are used by fop to build documentation -COPY --chown=${USER}:${GROUP} dockerfiles/log4j.properties /home/${USER}/ -ENV OTP_STRICT_INSTALL=yes \ - JAVA_ARGS="-Dlog4j.configuration=file://home/${USER}/log4j.properties" - -RUN sudo mkdir /buildroot /tests /otp && sudo chown ${USER}:${GROUP} /buildroot /tests /otp - COPY --chown=${USER}:${GROUP} dockerfiles/init.sh /buildroot/ -## TODO: Build Erlang versions N, N-1 and N-2 for compatability testing. +WORKDIR /buildroot/ + +## Install test tools rebar3, proper and jsx +RUN export PATH="$(cat /home/${USER}/LATEST):${PATH}" && \ + latest () { \ + local VSN=$(curl -sL "https://api.github.com/repos/$1/tags" | jq -r ".[] | .name" | grep -E '^v?[0-9]' | sort -V | tail -1); \ + curl -sL "https://github.com/$1/archive/$VSN.tar.gz" > $(basename $1).tar.gz; \ + } && \ + latest erlang/rebar3 && ls -la && \ + (tar xzf rebar3.tar.gz && cd rebar3-* && ./bootstrap && sudo cp rebar3 /usr/bin) && \ + latest proper-testing/proper && \ + (tar xzf proper.tar.gz && mv proper-* proper && cd proper && make) && \ + latest talentdeficit/jsx && \ + (tar xzf jsx.tar.gz && mv jsx-* jsx && cd jsx && rebar3 compile) + +ENV ERL_LIBS=/buildroot/proper:/buildroot/jsx ENTRYPOINT ["/buildroot/init.sh"] diff --git a/.github/dockerfiles/init.sh b/.github/dockerfiles/init.sh index 8eb13abee214..3033e3351d03 100755 --- a/.github/dockerfiles/init.sh +++ b/.github/dockerfiles/init.sh @@ -10,11 +10,12 @@ sudo /usr/sbin/sshd sudo service postgresql start -sudo -E bash -c "apt-get update && apt-get install -y linux-tools-common linux-tools-generic linux-tools-`uname -r`" +sudo -E bash -c "apt-get update && apt-get install -y linux-tools-common linux-tools-generic" +sudo -E bash -c "apt-get install -y linux-tools-$(uname-r)" || true sudo bash -c "Xvfb :99 -ac -screen 0 1920x1080x24 -nolisten tcp" & export DISPLAY=:99 -PATH=$PATH:$(ls -1d /usr/local/lib/erlang-*/bin | tr '\n' ':') +PATH="$PATH:$(ls -1d /usr/local/lib/erlang-*/bin | tr '\n' ':')" exec /bin/bash -c "$1" diff --git a/.github/scripts/build-base-image.sh b/.github/scripts/build-base-image.sh index 543235e12626..5836efc30bd4 100755 --- a/.github/scripts/build-base-image.sh +++ b/.github/scripts/build-base-image.sh @@ -12,6 +12,10 @@ esac if [ -z "${BASE_TAG}" ]; then BASE_TAG=$(grep "ARG BASE=" ".github/dockerfiles/Dockerfile.${2}" | head -1 | tr '=' ' ' | awk '{print $3}') + ## If this script is used on pre 25 releases + if [ -z "${BASE_TAG}" ]; then + BASE_TAG=$(grep "FROM " ".github/dockerfiles/Dockerfile.${2}" | head -1 | awk '{print $2}') + fi fi case "${BASE_TAG}" in @@ -29,16 +33,16 @@ case "${BASE_TAG}" in ;; esac -echo "::set-output name=BASE::${BASE}" -echo "::set-output name=BASE_TAG::${BASE_TAG}" -echo "::set-output name=BASE_TYPE::${BASE_TYPE}" +echo "BASE=${BASE}" >> $GITHUB_OUTPUT +echo "BASE_TAG=${BASE_TAG}" >> $GITHUB_OUTPUT +echo "BASE_TYPE=${BASE_TYPE}" >> $GITHUB_OUTPUT if [ -f "otp_docker_base.tar" ]; then docker load -i "otp_docker_base.tar" - echo "::set-output name=BASE_BUILD::loaded" + echo "BASE_BUILD=loaded" >> $GITHUB_OUTPUT elif [ -f "otp_docker_base/otp_docker_base.tar" ]; then docker load -i "otp_docker_base/otp_docker_base.tar" - echo "::set-output name=BASE_BUILD::loaded" + echo "BASE_BUILD=loaded" >> $GITHUB_OUTPUT else if [ "${BASE_USE_CACHE}" != "false" ]; then docker pull "${BASE_TAG}:${BASE_BRANCH}" @@ -58,9 +62,9 @@ else NEW_BASE_IMAGE_ID=$(docker images -q "${BASE_TAG}:latest") if [ "${BASE_IMAGE_ID}" = "${NEW_BASE_IMAGE_ID}" ]; then - echo "::set-output name=BASE_BUILD::cached" + echo "BASE_BUILD=cached" >> $GITHUB_OUTPUT else - echo "::set-output name=BASE_BUILD::re-built" + echo "BASE_BUILD=re-built" >> $GITHUB_OUTPUT docker save "${BASE_TAG}:latest" > "otp_docker_base.tar" fi fi diff --git a/.github/scripts/build-macos.sh b/.github/scripts/build-macos.sh index 82b07bac7b7c..73c35a6a22d3 100755 --- a/.github/scripts/build-macos.sh +++ b/.github/scripts/build-macos.sh @@ -1,12 +1,19 @@ #!/bin/sh -export MAKEFLAGS=-j$(getconf _NPROCESSORS_ONLN) -export ERL_TOP=`pwd` -export RELEASE_ROOT=$ERL_TOP/release +export MAKEFLAGS="-j$(getconf _NPROCESSORS_ONLN)" +export ERL_TOP="$(pwd)" export ERLC_USE_SERVER=true +export RELEASE_ROOT="$ERL_TOP/release" +BUILD_DOCS=false -./otp_build configure \ - --disable-dynamic-ssl-lib +if [ "$1" = "build_docs" ]; then + BUILD_DOCS=true + shift +fi + +./otp_build configure $* ./otp_build boot -a -./otp_build release -a $RELEASE_ROOT -make release_docs DOC_TARGETS=chunks +./otp_build release -a "$RELEASE_ROOT" +if $BUILD_DOCS; then + make release_docs DOC_TARGETS=chunks +fi diff --git a/.github/scripts/get-pr-number.es b/.github/scripts/get-pr-number.es index e925f956258a..a388e6107a68 100755 --- a/.github/scripts/get-pr-number.es +++ b/.github/scripts/get-pr-number.es @@ -11,19 +11,46 @@ main([Repo, HeadSha]) -> string:equal(HeadSha, Sha) end, AllOpenPrs) of {value, #{ <<"number">> := Number } } -> - io:format("::set-output name=result::~p~n", [Number]); + append_to_github_output("result=~p~n", [Number]); false -> - io:format("::set-output name=result::~ts~n", [""]) + append_to_github_output("result=~ts~n", [""]) + end. + +append_to_github_output(Fmt, Args) -> + case os:getenv("GITHUB_OUTPUT") of + false -> + io:format(standard_error, "GITHUB_OUTPUT env var missing?~n", []); + GitHubOutputFile -> + {ok, F} = file:open(GitHubOutputFile, [write, append]), + ok = io:fwrite(F, Fmt, Args), + ok = file:close(F) end. ghapi(CMD) -> - Data = cmd(CMD), - try jsx:decode(Data, [{return_maps,true}]) + decode(cmd(CMD)). + +decode(Data) -> + try jsx:decode(Data,[{return_maps, true}, return_tail]) of + {with_tail, Json, <<>>} -> + Json; + {with_tail, Json, Tail} -> + lists:concat([Json | decodeTail(Tail)]) catch E:R:ST -> io:format("Failed to decode: ~ts",[Data]), erlang:raise(E,R,ST) end. +decodeTail(Data) -> + try jsx:decode(Data,[{return_maps, true}, return_tail]) of + {with_tail, Json, <<>>} -> + [Json]; + {with_tail, Json, Tail} -> + [Json | decodeTail(Tail)] + catch E:R:ST -> + io:format(standard_error, "Failed to decode: ~ts",[Data]), + erlang:raise(E,R,ST) + end. + cmd(CMD) -> ListCmd = unicode:characters_to_list(CMD), io:format("cmd: ~ts~n",[ListCmd]), diff --git a/.github/scripts/init-pre-release.sh b/.github/scripts/init-pre-release.sh index 7a86f3052c0c..c7b7cab617df 100755 --- a/.github/scripts/init-pre-release.sh +++ b/.github/scripts/init-pre-release.sh @@ -3,4 +3,33 @@ ## We create a tar ball that is used later by build-otp-tar ## to create the pre-built tar ball -git archive --prefix otp/ -o otp_src.tar.gz HEAD +AUTOCONF=0 +TARGET=otp_src.tar.gz + +if [ -n "$1" ]; then + TARGET="$1" +fi + +## This script is used to create archives for older releases +## so if configure does not exist in the git repo we need to +## create it. +if [ ! -f configure ]; then + ./otp_build autoconf + find . -name aclocal.m4 | xargs git add -f + find . -name configure | xargs git add -f + find . -name config.h.in | xargs git add -f + find . -name config.guess | xargs git add -f + find . -name config.sub | xargs git add -f + find . -name install-sh | xargs git add -f + if ! git config user.name; then + git config user.email "you@example.com" + git config user.name "Your Name" + fi + git commit --no-verify -m 'Add generated configure files' + AUTOCONF=1 +fi +git archive --prefix otp/ -o "$TARGET" HEAD + +if [ "$AUTOCONF" = 1 ]; then + git reset --hard HEAD~1 +fi diff --git a/.github/scripts/restore-from-prebuilt.sh b/.github/scripts/restore-from-prebuilt.sh new file mode 100755 index 000000000000..9fff27c28379 --- /dev/null +++ b/.github/scripts/restore-from-prebuilt.sh @@ -0,0 +1,162 @@ +#!/bin/bash + +set -xe + +CACHE_SOURCE_DIR="$1" +TARGET="$2" +ARCHIVE="$3" +EVENT="$4" +DELETED="$5" +CHANGES="$9" + +if [ ! -f "${CACHE_SOURCE_DIR}/otp_src.tar.gz" ] || [ "${NO_CACHE}" = "true" ]; then + cp "${ARCHIVE}" "${TARGET}" + cp "${ARCHIVE}" "${CACHE_SOURCE_DIR}/otp_src.tar.gz" + exit 0 +fi + +TMP_DIR=$(mktemp -d) +CACHE_DIR="${TMP_DIR}" +ARCHIVE_DIR="${TMP_DIR}/archive" + +mkdir "${ARCHIVE_DIR}" + +################################# +## START WORK ON THE CACHED FILES +################################# +echo "::group::{Restore cached files}" +tar -C "${CACHE_DIR}/" -xzf "${CACHE_SOURCE_DIR}/otp_src.tar.gz" + +## If configure scripts have NOT changed, we can restore configure and other C/java programs +if [ -z "${CONFIGURE}" ] || [ "${CONFIGURE}" = "false" ]; then + tar -C "${CACHE_DIR}/" -xzf "${CACHE_SOURCE_DIR}/otp_cache.tar.gz" +fi + +## If bootstrap has been changed, we do not use the cached .beam files +EXCLUDE_BOOTSTRAP=() +if [ "${BOOTSTRAP}" = "true" ]; then + find "${CACHE_DIR}/otp/lib" -name "*.beam" -exec rm -f {} \; +else + EXCLUDE_BOOTSTRAP=(--exclude "bootstrap") +fi + +## Make a copy of the cache for debugging +mkdir "${TMP_DIR}/cache" +cp -rp "${CACHE_DIR}/otp" "${TMP_DIR}/cache/" + +CACHE_DIR="${CACHE_DIR}/otp" + +echo "::group::{Delete files from PR}" +## Delete any files that this PR deletes +for delete in $DELETED; do + if [ -d "${CACHE_DIR}/${delete}" ]; then + rm -r "${CACHE_DIR}/${delete}" + elif [ -f "${CACHE_DIR}/${delete}" ]; then + rm "${CACHE_DIR}/${delete}" + else + echo "Could not find $delete to delete" + exit 1 + fi +done + +################################## +## START WORK ON THE UPDATED FILES +################################## + +echo "::group::{Extract changed files}" +if [ -n "${ARCHIVE}" ]; then + ## Extract with updated timestamp (the -m flag) so that any change will trigger a rebuild + tar -C "${ARCHIVE_DIR}/" -xzmf "${ARCHIVE}" + + ## Directory permissions in the archive and cache are for some reason different... + chmod -R g-w "${ARCHIVE_DIR}/" + + ## rlpgoD is the same as --archive, but without --times + RSYNC_ARGS=(-rlpgoD --itemize-changes --verbose --checksum --update "${EXCLUDE_BOOTSTRAP[@]}" "${ARCHIVE_DIR}/otp/" "${CACHE_DIR}/") + + CHANGES="${TMP_DIR}/changes" + PREV_CHANGES="${TMP_DIR}/prev-changes" + + touch "${PREV_CHANGES}" + + ## Below follows some rules about when we do not want to use the cache + ## The rules are run multiple times so that if any rule triggeres a delte + ## we will re-run the rules again with the new changes. + for i in $(seq 1 10); do + + echo "::group::{Run ${i} at pruning cache}" + + ## First do a dry run to see if we need to delete anything from cache + rsync --dry-run "${RSYNC_ARGS[@]}" | grep '^\(>\|c\)' > "${TMP_DIR}/changes" + cat "${TMP_DIR}/changes" + + if cmp -s "${CHANGES}" "${PREV_CHANGES}"; then + break; + fi + + ### If any parse transform is changed we recompile everything as we have + ### no idea what it may change. If the parse transform calls any other + ### modules we really should delete the cache for those as well, but + ### it is impossible for us to know which modules are used by a pt so + ### this has to be somekind of best effort. + echo "::group::{Run ${i}: parse transforms}" + PARSE_TRANSFORMS=$(grep -r '^parse_transform(' "${CACHE_DIR}/" | grep "/lib/[^/]*/src/" | awk -F ':' '{print $1}' | uniq) + for pt in $PARSE_TRANSFORMS; do + if grep "$(basename "${pt}")" "${CHANGES}"; then + echo "Deleting entire cache as a parse transform has changed" >&2 + rm -rf "${CACHE_DIR:?}/" + fi + done + + echo "::group::{Run ${i}: yecc}" + ### if yecc has changed, need to recompile all .yrl files + if grep "yecc.erl$" "${CHANGES}"; then + echo "Deleting all .yrl files as yecc has changed" >&2 + find "${CACHE_DIR}/" -name "*.yrl" -exec rm -f {} \; + fi + + echo "::group::{Run ${i}: asn1}" + ### If asn1 has changed, need to re-compile all .asn1 files + if grep lib/asn1 "${CHANGES}"; then + echo "Deleting all .asn1 files as asn1 has changed" >&2 + find "${CACHE_DIR}/" -name "*.asn1" -exec rm -f {} \; + fi + + echo "::group::{Run ${i}: docs}" + ### If any of the doc generating tools change, we need to re-compile the docs + if grep "lib/\(xmerl\|erl_docgen\|edoc\)" "${CHANGES}"; then + echo "Deleting all docs as documentation tools have changed" >&2 + rm -rf "${CACHE_DIR}"/lib/*/doc/ "${CACHE_DIR}/erts/doc/" "${CACHE_DIR}/system/" + fi + + ### Find all behaviours in OTP and check if any them as changed, we need to + ### rebuild all files that use them. + echo "::group::{Run ${i}: behaviours}" + BEHAVIOURS=$(grep -r "^-callback" "${CACHE_DIR}/" | grep "/lib/[^/]*/src/" | awk -F ':' '{print $1}' | uniq | sed 's:.*/\([^/.]*\)[.]erl$:\1:') + for behaviour in $BEHAVIOURS; do + if grep "${behaviour}[.]erl\$" "${CHANGES}"; then + echo "Deleting files using ${behaviour} has it has changed" >&2 + FILES=$(grep -r "^-behaviour(${behaviour})" "${CACHE_DIR}/" | grep "/lib/[^/]*/src/" | awk -F ':' '{print $1}') + rm -f $FILES + fi + done + + if [ "$i" = "10" ]; then + echo "Deleting entire cache as it did not stabalize in trime" >&2 + rm -rf "${CACHE_DIR:?}" + else + mv "${CHANGES}" "${PREV_CHANGES}" + fi + done + + echo "::group::{Sync changes over cached data}" + + ## Now we do the actual sync + rsync "${RSYNC_ARGS[@]}" +fi + +tar -czf "${TARGET}" -C "${TMP_DIR}" otp + +rm -rf "${TMP_DIR}" + +echo "::endgroup::" diff --git a/.github/scripts/sync-github-prs.es b/.github/scripts/sync-github-prs.es index bc033c3efe58..a09beb901f3b 100755 --- a/.github/scripts/sync-github-prs.es +++ b/.github/scripts/sync-github-prs.es @@ -5,9 +5,14 @@ %% into the Target folder. It tries its best to not create too large %% files so that gh will still be happy with us when this is published to %% gh pages +-module('sync-github-prs'). -mode(compile). main([Repo, Target]) -> + + io:format("Updating PRs in ~ts, current PRs are: ~p~n", + [Target, filelib:wildcard(filename:join(Target,"*"))]), + AllOpenPrs = ghapi("gh api --paginate -X GET /repos/"++Repo++"/pulls -f state=open"), %% Download all updates, there really should not be any to download as they %% are updated when a PR is updated, but we do it anyways just to be safe. @@ -26,7 +31,10 @@ main([Repo, Target]) -> false -> cmd("rm -rf " ++ filename:join(Target,PRNo)) end - end, AllPrs); + end, AllPrs), + + purge_prs(Target); + main([Repo, Target, PRNo]) -> handle_prs(Repo, Target, [ghapi("gh api /repos/"++Repo++"/pulls/"++PRNo)]). @@ -62,10 +70,15 @@ handle_pr(_Repo, Target, string:equal(HeadSha, Sha) andalso string:equal(Status, <<"completed">>) end, maps:get(<<"workflow_runs">>, Runs)) of {value, Run} -> - Ident = integer_to_list(maps:get(<<"id">>,Run)), + Ident = integer_to_list( + erlang:phash2( + {maps:get(<<"id">>,Run), ?MODULE:module_info(md5)})), io:format("Checking for ~ts~n", [filename:join(PRDir, Ident)]), case file:read_file_info(filename:join(PRDir, Ident)) of {error, enoent} -> + io:format("Did not find ~ts. Files in dir are: ~p~n", + [filename:join(PRDir, Ident), + filelib:wildcard(filename:join(PRDir, "*"))]), cmd("rm -rf "++PRDir), ok = file:make_dir(PRDir), ok = file:write_file(filename:join(PRDir,Ident), integer_to_list(Number)), @@ -91,11 +104,17 @@ handle_pr(_Repo, Target, end, Artifacts), CTLogsIndex = filename:join([PRDir,"ct_logs","index.html"]), case file:read_file_info(CTLogsIndex) of - {ok, _} -> ok; + {ok, _} -> + CTSuiteFiles = filename:join([PRDir,"ct_logs","ct_run*","*.logs","run.*","suite.log"]), + lists:foreach(fun purge_suite/1, filelib:wildcard(CTSuiteFiles)); _ -> ok = filelib:ensure_dir(CTLogsIndex), ok = file:write_file(CTLogsIndex, ["No test logs found for ", Sha]) end, + %% If we ever want to de-duplicate the docs, this command will create a + %% stable md5sum. + %% (cd $dir && find doc lib erts-* -type f \! -path "lib/jinterface-*" \! -name erlresolvelinks.js \! -name index.html \! -name release_notes.html \! -name users_guide.html \! -name internal_docs.html \! -name "*.eix" -exec md5sum {} \;) | sort -k 2 | awk "{print $1}" | md5sum + %% where $dir is the pr directory. DocIndex = filename:join([PRDir,"doc","index.html"]), case file:read_file_info(DocIndex) of {ok, _} -> ok; @@ -109,6 +128,72 @@ handle_pr(_Repo, Target, ok end. +%% We truncate the logs of all testcases of any suite that did not have any failures +purge_suite(SuiteFilePath) -> + {ok, SuiteFile} = file:read_file(SuiteFilePath), + SuiteDir = filename:dirname(SuiteFilePath), + Placeholder = "github truncated successful testcase", + case re:run(SuiteFile,"^=failed\s*\([0-9]+\)$",[multiline,{capture,all_but_first,binary}]) of + {match,[<<"0">>]} -> + io:format("Purging logs from: ~ts~n",[SuiteDir]), + ok = file:del_dir_r(filename:join(SuiteDir,"log_private")), + lists:foreach( + fun(File) -> + case filename:basename(File) of + "suite" ++ _ -> + ok; + "unexpected_io" ++_ -> + ok; + "cover.html" -> + ok; + _Else -> + file:write_file(File,Placeholder) + end + end, filelib:wildcard(filename:join(SuiteDir,"*.html"))); + _FailedTestcases -> + io:format("Purging logs from: ~ts~n",[SuiteDir]), + lists:foreach( + fun(File) -> + {ok, B} = file:read_file(File), + case re:run(B,"^=== Config value:",[multiline]) of + {match,_} -> + case re:run(B,"^=== successfully completed test case",[multiline]) of + {match, _} -> + file:write_file(File,Placeholder); + nomatch -> + ok + end; + nomatch -> + ok + end + end, filelib:wildcard(filename:join(SuiteDir,"*.html"))) + end. + +%% If we have more the 10 GB of PR data we need to remove some otherwise +%% github actions will not work them. So we purge the largest files until we +%% reach the 10 GB limit. +purge_prs(Target) -> + %% Start by deleting all data from common_test test runs as they are huge. + os:cmd("rm -rf "++Target++"*/ct_logs/ct_run*/*common_test_test*/run*/log_private/ct_run*"), + Files = string:split(cmd("find " ++ Target ++ " -type f -a " + "-name \\! suite.log.html -exec du -a {} \\+"),"\n",all), + SortedFiles = + lists:sort(fun([A|_]=As,[B|_]=Bs) -> + binary_to_integer(A) >= binary_to_integer(B) + end, [string:split(F,"\t") || F <- Files, F =/= <<>>]), + purge_prs(SortedFiles, Target, get_directory_size(Target)). +purge_prs(Files, Target, Size) when Size > 10_000_000_000 -> + {H,T} = lists:split(10, Files), + [file:write_file(File, io_lib:format("Large file (~p bytes) truncated", [Sz])) + || [Sz, File] <- H], + purge_prs(T, Target, get_directory_size(Target)); +purge_prs(_, _, _) -> + ok. + +get_directory_size(Dir) -> + binary_to_integer(hd(string:split(cmd("du -b --max-depth=0 " ++ Dir),"\t"))). + + ghapi(CMD) -> decode(cmd(CMD)). @@ -116,12 +201,14 @@ decode(Data) -> try jsx:decode(Data,[{return_maps, true}, return_tail]) of {with_tail, Json, <<>>} -> Json; - {with_tail, Json, Tail} -> + {with_tail, Json, Tail} when is_map(Json) -> [Key] = maps:keys(maps:remove(<<"total_count">>, Json)), #{ Key => lists:flatmap( fun(J) -> maps:get(Key, J) end, [Json | decodeTail(Tail)]) - } + }; + {with_tail, Json, Tail} when is_list(Json) -> + lists:concat([Json | decodeTail(Tail)]) catch E:R:ST -> io:format("Failed to decode: ~ts",[Data]), erlang:raise(E,R,ST) diff --git a/.github/scripts/sync-github-releases.sh b/.github/scripts/sync-github-releases.sh index 0cb2042f011d..c5c6f97ed6d8 100755 --- a/.github/scripts/sync-github-releases.sh +++ b/.github/scripts/sync-github-releases.sh @@ -211,33 +211,40 @@ if [ ${UPLOADED} = true ]; then fi ## If no assets were uploaded, we try to build one instead -if [ ${UPLOADED} = false ] && [ ${#MISSING_PREBUILD[0]} != 0 ]; then - name="${MISSING_PREBUILD[0]}" - stripped_name=$(_strip_name "${name}") - git clone https://github.com/erlang/otp -b "${name}" otp_src - (cd otp_src && ../.github/scripts/init-pre-release.sh) - case ${stripped_name} in - 23.**) - ## The 32-bit dockerfile build the doc chunks which we want - ## to include in VSN >= 23. - docker build -t otp --build-arg ARCHIVE=otp_src/otp_src.tar.gz \ - -f otp_src/.github/dockerfiles/Dockerfile.32-bit . - ;; - *) - docker build -t otp --build-arg ARCHIVE=otp_src/otp_src.tar.gz \ - -f otp_src/.github/dockerfiles/Dockerfile.64-bit . - ;; - esac - docker run -v "$PWD":/github otp \ - "/github/scripts/build-otp-tar -o /github/otp_clean_src.tar.gz /github/otp_src.tar.gz -b /buildroot/otp/ /buildroot/otp.tar.gz" - .github/scripts/release-docs.sh - .github/scripts/create-artifacts.sh downloads "${name}" - - ## Delete any artifacts that we should not upload - for artifact in dowloads/*; do - if ! echo "${RI[@]}" | grep "${artifact}" 2> /dev/null > /dev/null; then - rm -f "downloads/${artifact}" +if [ ${UPLOADED} = false ]; then + for name in "${MISSING_PREBUILD[@]}"; do + stripped_name=$(_strip_name "${name}") + release=$(echo "${stripped_name}" | awk -F. '{print $1}') + if [[ $release < 24 ]]; then + ## Releases before 24 are no longer supported and are a bit different + ## from 24+ so I've removed support for them + echo "Skipping old release ${name}" + continue; fi + echo "Building pre-build and docs for ${name}" + git clone https://github.com/erlang/otp -b "${name}" otp_src + if [ -f otp_src/.github/scripts/init-pre-release.sh ]; then + (cd otp_src && ERL_TOP=$(pwd) .github/scripts/init-pre-release.sh) + else + (cd otp_src && ERL_TOP=$(pwd) ../.github/scripts/init-pre-release.sh) + fi + (cd otp_src && BASE_USE_CACHE=false GITHUB_OUTPUT=.tmp ../.github/scripts/build-base-image.sh maint-${release} 64-bit) + docker build -t otp --build-arg ARCHIVE=otp_src/otp_src.tar.gz \ + -f otp_src/.github/dockerfiles/Dockerfile.64-bit . + docker run -v "$PWD":/github otp \ + "/github/scripts/build-otp-tar -o /github/otp_clean_src.tar.gz /github/otp_src.tar.gz -b /buildroot/otp/ /buildroot/otp.tar.gz" + .github/scripts/release-docs.sh + .github/scripts/create-artifacts.sh downloads "${name}" + + ## Delete any artifacts that we should not upload + for artifact in dowloads/*; do + if ! echo "${RI[@]}" | grep "${artifact}" 2> /dev/null > /dev/null; then + rm -f "downloads/${artifact}" + fi + done + _upload_artifacts "${name}" + + ## We only update one release per call to sync-github-releases + break done - _upload_artifacts "${name}" fi diff --git a/.github/workflows/actions-updater.yaml b/.github/workflows/actions-updater.yaml new file mode 100644 index 000000000000..7dd777182b05 --- /dev/null +++ b/.github/workflows/actions-updater.yaml @@ -0,0 +1,29 @@ +name: GitHub Actions Version Updater + +# Controls when the action will run. +on: + workflow_dispatch: + schedule: + # Automatically run on every Sunday + - cron: '0 0 * * 0' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Generate token + id: generate_token + uses: actions/create-github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PEM }} + + - uses: actions/checkout@v4.1.0 + with: + token: ${{ steps.generate_token.outputs.token }} + + - name: Run GitHub Actions Version Updater + uses: saadmk11/github-actions-version-updater@v0.8.1 + with: + token: ${{ steps.generate_token.outputs.token }} diff --git a/.github/workflows/add-to-project.yaml b/.github/workflows/add-to-project.yaml new file mode 100644 index 000000000000..3539790c0eb5 --- /dev/null +++ b/.github/workflows/add-to-project.yaml @@ -0,0 +1,27 @@ +name: Add bugs to bugs project + +on: + issues: + types: + - opened + pull_request_target: + types: + - opened + +jobs: + add-to-project: + name: Add issue to project + runs-on: ubuntu-latest + if: github.repository == 'erlang/otp' + steps: + - name: Generate token + id: generate_token + uses: actions/create-github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PEM }} + + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/erlang/projects/13 + github-token: ${{ steps.generate_token.outputs.token }} diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index ce50c86a1ebd..41f8c78b2925 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -7,11 +7,10 @@ ## not possible so we need to rebuild all of Erlang/OTP multiple ## times. ## -## When ghcr.io support using the GITHUB_TOKEN we should migrate -## over to use it instead as that should allow us to use the +## Now that we have migrated to ghcr.io we use the ## built-in caching mechanisms of docker/build-push-action@v2. ## However as things are now we use docker directly to make things -## work. +## work due to historical reasons. ## name: Build and check Erlang/OTP @@ -19,8 +18,11 @@ name: Build and check Erlang/OTP on: push: pull_request: + schedule: + - cron: 0 1 * * * env: + ## Equivalent to github.event_name == 'pull_request' ? github.base_ref : github.ref_name BASE_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || github.ref_name }} jobs: @@ -30,75 +32,115 @@ jobs: runs-on: ubuntu-latest outputs: BASE_BUILD: ${{ steps.base-build.outputs.BASE_BUILD }} + changes: ${{ steps.changes.outputs.changes }} + all: ${{ steps.apps.outputs.all }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 + - uses: ./.github/actions/build-base-image + with: + BASE_BRANCH: ${{ env.BASE_BRANCH }} + BUILD_IMAGE: false + - name: Get applications + id: apps + run: | + .github/scripts/path-filters.sh > .github/scripts/path-filters.yaml + ## Print path-filters for debug purposes + cat .github/scripts/path-filters.yaml + ALL_APPS=$(grep '^[a-z_]*:' .github/scripts/path-filters.yaml | sed 's/:.*$//') + ALL_APPS=$(jq -n --arg inarr "${ALL_APPS}" '$inarr | split("\n")' | tr '\n' ' ') + echo "all=${ALL_APPS}" >> $GITHUB_OUTPUT + - uses: dorny/paths-filter@v2.11.1 + id: app-changes + with: + filters: .github/scripts/path-filters.yaml + - name: Override changes + id: changes + env: + ALL_APPS: ${{ steps.apps.outputs.all }} + CHANGED_APPS: ${{ steps.app-changes.outputs.changes }} + run: | + if ${{ github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'full-build-and-check') }} || ${{ github.event_name == 'schedule' }}; then + echo "changes=${ALL_APPS}" >> "$GITHUB_OUTPUT" + else + echo "changes=${CHANGED_APPS}" >> "$GITHUB_OUTPUT" + fi - name: Create initial pre-release tar - run: .github/scripts/init-pre-release.sh + run: .github/scripts/init-pre-release.sh otp_archive.tar.gz - name: Upload source tar archive - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: otp_git_archive - path: otp_src.tar.gz - - name: Docker login - uses: docker/login-action@v1 - with: - registry: docker.pkg.github.com - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build BASE image - id: base-build - run: .github/scripts/build-base-image.sh "${BASE_BRANCH}" 64-bit - - name: Save BASE image - if: steps.base-build.outputs.BASE_BUILD == 're-built' - uses: actions/upload-artifact@v2 - with: - name: otp_docker_base - path: otp_docker_base.tar + path: otp_archive.tar.gz + - name: Cache pre-built tar archives + id: pre-built-cache + uses: actions/cache@v3.3.2 + with: + path: | + otp_src.tar.gz + otp_cache.tar.gz + key: prebuilt-${{ github.ref_name }}-${{ github.sha }} + restore-keys: | + prebuilt-${{ github.base_ref }}-${{ github.event.pull_request.base.sha }} + - uses: dorny/paths-filter@v2.11.1 + id: cache + with: + filters: | + no-cache: + - '.github/**' + deleted: + - deleted: '**' + bootstrap: + - 'bootstrap/**' + configure: + - '**.ac' + - '**.in' + list-files: shell + - name: Restore from cache + env: + NO_CACHE: ${{ steps.cache.outputs.no-cache }} + BOOTSTRAP: ${{ steps.cache.outputs.bootstrap }} + CONFIGURE: ${{ steps.cache.outputs.configure }} + run: | + .github/scripts/restore-from-prebuilt.sh "`pwd`" \ + "`pwd`/.github/otp.tar.gz" \ + "`pwd`/otp_archive.tar.gz" \ + '${{ github.event_name }}' \ + '${{ steps.cache.outputs.deleted_files }}' \ + '${{ steps.changes.outputs.changes }}' + - name: Upload restored cache + uses: actions/upload-artifact@v3.1.3 + if: runner.debug == 1 + with: + name: restored-cache + path: .github/otp.tar.gz - name: Build image run: | - mv otp_src.tar.gz .github/otp.tar.gz docker build --tag otp \ --build-arg MAKEFLAGS=-j$(($(nproc) + 2)) \ --file ".github/dockerfiles/Dockerfile.64-bit" \ .github/ - - name: Save Erlang/OTP image - run: | - docker run -v $PWD:/github --entrypoint "" otp \ - tar czf /github/otp-ubuntu-20.04.tar.gz /buildroot/ /otp/ - - name: Upload otp ubuntu image - uses: actions/upload-artifact@v2 - with: - name: otp-ubuntu-20.04 - path: otp-ubuntu-20.04.tar.gz - name: Build pre-built tar archives run: | docker run -v $PWD:/github --entrypoint "" otp \ - scripts/build-otp-tar -o /github/otp_clean_src.tar.gz /github/otp_src.tar.gz -b /buildroot/otp/ /buildroot/otp.tar.gz - - name: Upload pre-built tar archive - uses: actions/upload-artifact@v2 - with: - name: otp_prebuilt_no_chunks - path: otp_src.tar.gz - - changed-apps: - name: Calculate changed applications - runs-on: ubuntu-latest - outputs: - changes: ${{ steps.changes.outputs.changes }} - all: ${{ steps.apps.outputs.all }} - steps: - - uses: actions/checkout@v2 - - name: Get applications - id: apps + scripts/build-otp-tar -o /github/otp_clean_src.tar.gz /github/otp_src.tar.gz -b /buildroot/otp/ /github/otp_src.tar.gz + - name: Build cache run: | - .github/scripts/path-filters.sh > .github/scripts/path-filters.yaml - ALL_APPS=$(grep '^[a-z_]*:' .github/scripts/path-filters.yaml | sed 's/:.*$//') - ALL_APPS=$(jq -n --arg inarr "${ALL_APPS}" '$inarr | split("\n")' | tr '\n' ' ') - echo "::set-output name=all::${ALL_APPS}" - - uses: dorny/paths-filter@v2 - id: changes + if [ -f otp_cache.tar.gz ]; then + gunzip otp_cache.tar.gz + else + docker run -v $PWD:/github --entrypoint "" otp \ + bash -c 'cp ../otp_cache.tar /github/' + fi + docker run -v $PWD:/github --entrypoint "" otp \ + bash -c 'set -x; C_APPS=$(ls -d ./lib/*/c_src); find Makefile ./make ./erts ./bin/`erts/autoconf/config.guess` ./lib/erl_interface ./lib/jinterface ${C_APPS} `echo "${C_APPS}" | sed -e 's:c_src$:priv:'` -type f -newer README.md \! -name "*.beam" \! -path "*/doc/*" | xargs tar --transform "s:^./:otp/:" -uvf /github/otp_cache.tar' + gzip otp_cache.tar + - name: Upload pre-built tar archive + uses: actions/upload-artifact@v3.1.3 with: - filters: .github/scripts/path-filters.yaml + name: otp_prebuilt + path: | + otp_src.tar.gz + otp_cache.tar.gz build-macos: name: Build Erlang/OTP (macOS) @@ -107,16 +149,16 @@ jobs: env: WXWIDGETS_VERSION: 3.1.5 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 - name: Download source archive - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: - name: otp_prebuilt_no_chunks + name: otp_prebuilt - name: Cache wxWidgets id: wxwidgets-cache - uses: actions/cache@v2 + uses: actions/cache@v3.3.2 with: path: wxWidgets key: wxWidgets-${{ env.WXWIDGETS_VERSION }}-${{ runner.os }}-12 @@ -130,7 +172,7 @@ jobs: tar -xzf ./otp_src.tar.gz export PATH=$PWD/wxWidgets/release/bin:$PATH cd otp - $GITHUB_WORKSPACE/.github/scripts/build-macos.sh + $GITHUB_WORKSPACE/.github/scripts/build-macos.sh build_docs --disable-dynamic-ssl-lib tar -czf otp_macos_$(cat OTP_VERSION)_x86-64.tar.gz -C release . - name: Test Erlang @@ -142,7 +184,7 @@ jobs: ./bin/erl -noshell -eval '{wx_ref,_,_,_} = wx:new(), io:format("wx ok~n"), halt().' - name: Upload tarball - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: otp_prebuilt_macos_x86-64 path: otp/otp_macos_*_x86-64.tar.gz @@ -155,21 +197,17 @@ jobs: runs-on: macos-12 needs: pack steps: + - uses: actions/checkout@v4.1.0 - name: Download source archive - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: - name: otp_prebuilt_no_chunks + name: otp_prebuilt - name: Compile Erlang run: | tar -xzf ./otp_src.tar.gz cd otp - export ERL_TOP=`pwd` - export MAKEFLAGS="-j$(($(nproc) + 2)) -O" - export ERLC_USE_SERVER=true - ./otp_build configure --xcomp-conf=./xcomp/erl-xcomp-arm64-ios.conf --without-ssl - ./otp_build boot -a - ./otp_build release -a + $GITHUB_WORKSPACE/.github/scripts/build-macos.sh --xcomp-conf=./xcomp/erl-xcomp-arm64-ios.conf --without-ssl - name: Package .xcframework run: | @@ -180,7 +218,7 @@ jobs: xcodebuild -create-xcframework -output ./liberlang.xcframework -library liberlang.a - name: Upload framework - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: ios_framework_${{ env.TARGET_ARCH }} path: otp/liberlang.xcframework @@ -195,7 +233,7 @@ jobs: runs-on: windows-2022 needs: pack steps: - - uses: Vampire/setup-wsl@v1.2.1 + - uses: Vampire/setup-wsl@v2.0.1 with: distribution: Ubuntu-18.04 @@ -206,10 +244,10 @@ jobs: shell: cmd run: | choco install openssl - move "c:\\Program Files\\OpenSSL-Win64" "c:\\OpenSSL-Win64" + IF EXIST "c:\\Program Files\\OpenSSL-Win64" (move "c:\\Program Files\\OpenSSL-Win64" "c:\\OpenSSL-Win64") ELSE (move "c:\\Program Files\\OpenSSL" "c:\\OpenSSL-Win64") - name: Cache wxWidgets - uses: actions/cache@v2 + uses: actions/cache@v3.3.2 with: path: wxWidgets key: wxWidgets-${{ env.WXWIDGETS_VERSION }}-${{ runner.os }} @@ -217,11 +255,13 @@ jobs: # actions/cache on Windows sometimes does not set cache-hit even though there was one. Setting it manually. - name: Set wxWidgets cache id: wxwidgets-cache + env: + WSLENV: GITHUB_OUTPUT/p run: | if [ -d wxWidgets ]; then - echo "::set-output name=cache-hit::true" + echo "cache-hit=true" >> $GITHUB_OUTPUT else - echo "::set-output name=cache-hit::false" + echo "cache-hit=false" >> $GITHUB_OUTPUT fi - name: Download wxWidgets @@ -249,9 +289,9 @@ jobs: nmake TARGET_CPU=amd64 BUILD=release SHARED=0 DIR_SUFFIX_CPU= -f makefile.vc - name: Download source archive - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: - name: otp_prebuilt_no_chunks + name: otp_prebuilt - name: Compile Erlang run: | @@ -265,18 +305,46 @@ jobs: export ERTS_SKIP_DEPEND=true eval `./otp_build env_win32 x64` ./otp_build configure - if cat erts/CONF_INFO || cat lib/*/CONF_INFO || cat lib/*/SKIP || cat lib/SKIP-APPLICATIONS; then exit 1; fi + if cat erts/CONF_INFO || + grep -v "Static linking with OpenSSL 3.0" lib/*/CONF_INFO || + cat lib/*/SKIP || + cat lib/SKIP-APPLICATIONS; then + exit 1 + fi ./otp_build boot -a ./otp_build release -a cp /mnt/c/opt/local64/pgm/wxWidgets-${{ env.WXWIDGETS_VERSION }}/3rdparty/webview2/runtimes/win-x64/native/WebView2Loader.dll $ERL_TOP/release/win32/erts-*/bin/ ./otp_build installer_win32 - name: Upload installer - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: otp_win32_installer path: otp/release/win32/otp*.exe + build-flavors: + name: Build Erlang/OTP (Types and Flavors) + runs-on: ubuntu-latest + needs: pack + if: contains(needs.pack.outputs.changes, 'emulator') + + steps: + - uses: actions/checkout@v4.1.0 + - uses: ./.github/actions/build-base-image + with: + BASE_BRANCH: ${{ env.BASE_BRANCH }} + - name: Build Erlang/OTP flavors and types + run: | + TYPES="opt debug lcnt" + FLAVORS="emu jit" + for TYPE in ${TYPES}; do + for FLAVOR in ${FLAVORS}; do + echo "::group::{TYPE=$TYPE FLAVOR=$FLAVOR}" + docker run otp "make TYPE=$TYPE FLAVOR=$FLAVOR" + echo "::endgroup::" + done + done + build: name: Build Erlang/OTP runs-on: ubuntu-latest @@ -288,19 +356,15 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v2 - - name: Download source archive - uses: actions/download-artifact@v2 + - uses: actions/checkout@v4.1.0 + - uses: ./.github/actions/build-base-image with: - name: otp_prebuilt_no_chunks - - name: Docker login - uses: docker/login-action@v1 + BASE_BRANCH: ${{ env.BASE_BRANCH }} + TYPE: ${{ matrix.type }} + - name: Download source archive + uses: actions/download-artifact@v3.0.2 with: - registry: docker.pkg.github.com - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Build base image - run: .github/scripts/build-base-image.sh "${BASE_BRANCH}" ${{ matrix.type }} + name: otp_prebuilt - name: Build ${{ matrix.type }} image run: | mv otp_src.tar.gz .github/otp.tar.gz @@ -312,65 +376,34 @@ jobs: runs-on: ubuntu-latest needs: pack steps: - - uses: actions/checkout@v2 - ## Download docker images - - name: Docker login - uses: docker/login-action@v1 - with: - registry: docker.pkg.github.com - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Download base build - if: needs.pack.outputs.BASE_BUILD == 're-built' - uses: actions/download-artifact@v2 - with: - name: otp_docker_base - - name: Download otp build - uses: actions/download-artifact@v2 - with: - name: otp-ubuntu-20.04 - - name: Restore docker image - run: .github/scripts/restore-otp-image.sh "${BASE_BRANCH}" - - ## Build pre-built tar with chunks - - name: Build doc chunks - run: | - docker build -t otp - < /proc/sys/kernel/core_pattern" docker run --ulimit core=-1 --ulimit nofile=5000:5000 --pids-limit 512 \ -e CTRUN_TIMEOUT=90 -e SPEC_POSTFIX=gh \ + -e TEST_NEEDS_RELEASE=true -e "RELEASE_ROOT=/buildroot/otp/Erlang ∅⊤℞" \ -e EXTRA_ARGS="-ct_hooks cth_surefire [{path,\"/buildroot/otp/$DIR/make_test_dir/${{ matrix.type }}_junit.xml\"}]" \ -v "$PWD/make_test_dir:/buildroot/otp/$DIR/make_test_dir" \ + -v "$PWD/scripts:/buildroot/otp/scripts" \ otp "make TYPE=${TYPE} && make ${APP}_test TYPE=${TYPE}" ## Rename os_mon to debug for debug build if [ "$APP" != "${{ matrix.type }}" ]; then @@ -479,7 +484,7 @@ jobs: sudo bash -c "chown -R `whoami` make_test_dir && chmod -R +r make_test_dir" tar czf ${{ matrix.type }}_test_results.tar.gz make_test_dir - name: Upload test results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 if: always() with: name: ${{ matrix.type }}_test_results @@ -491,18 +496,12 @@ jobs: if: always() # Run even if the need has failed needs: test steps: - - uses: actions/checkout@v2 - - name: Docker login - uses: docker/login-action@v1 + - uses: actions/checkout@v4.1.0 + - uses: ./.github/actions/build-base-image with: - registry: docker.pkg.github.com - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} + BASE_BRANCH: ${{ env.BASE_BRANCH }} - name: Download test results - uses: actions/download-artifact@v2 - - run: mv otp-ubuntu-20.04/otp-ubuntu-20.04.tar.gz . - - name: Restore docker image - run: .github/scripts/restore-otp-image.sh "${BASE_BRANCH}" + uses: actions/download-artifact@v3.0.2 - name: Merge test results run: | shopt -s nullglob @@ -532,14 +531,14 @@ jobs: -e 's:\(file="erts/\)make_test_dir/[^/]*:\1test:g' \ make_test_dir/*_junit.xml - name: Upload test results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 if: always() with: name: test_results path: test_results.tar.gz - name: Upload Test Results if: always() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: Unit Test Results path: | @@ -560,22 +559,22 @@ jobs: run: | TAG=${GITHUB_REF#refs/tags/} VSN=${TAG#OTP-} - echo "::set-output name=tag::${TAG}" - echo "::set-output name=vsn::${VSN}" + echo "tag=${TAG}" >> $GITHUB_OUTPUT + echo "vsn=${VSN}" >> $GITHUB_OUTPUT - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 ## Publish the pre-built archive and docs - name: Download source archive - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: name: otp_prebuilt - name: Download html docs - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: name: otp_doc_html - name: Download man docs - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3.0.2 with: name: otp_doc_man @@ -613,7 +612,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Upload - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3.1.3 with: name: Event File path: ${{ github.event_path }} diff --git a/.github/workflows/pr-comment.yaml b/.github/workflows/pr-comment.yaml index b139d8232b38..800ea07d040e 100644 --- a/.github/workflows/pr-comment.yaml +++ b/.github/workflows/pr-comment.yaml @@ -19,7 +19,7 @@ jobs: outputs: result: ${{ steps.pr-number.outputs.result }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 - name: Fetch PR number id: pr-number env: @@ -35,9 +35,9 @@ jobs: needs: pr-number if: github.event.action == 'requested' && needs.pr-number.outputs.result != '' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 ## We create an initial comment with some useful help to the user - - uses: actions/github-script@v5 + - uses: actions/github-script@v6.4.1 with: script: | const script = require('./.github/scripts/pr-comment.js'); @@ -54,7 +54,7 @@ jobs: needs.pr-number.outputs.result != '' && github.event.workflow_run.conclusion != 'skipped' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 - name: Download and Extract Artifacts id: extract env: @@ -74,19 +74,19 @@ jobs: done if [ -d "Unit Test Results" ]; then - echo "::set-output name=HAS_TEST_ARTIFACTS::true" + echo "HAS_TEST_ARTIFACTS=true" >> $GITHUB_OUTPUT else - echo "::set-output name=HAS_TEST_ARTIFACTS::false" + echo "HAS_TEST_ARTIFACTS=false" >> $GITHUB_OUTPUT fi - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 with: token: ${{ secrets.ERLANG_TOKEN }} repository: 'erlang/erlang.github.io' path: erlang.github.io - name: Publish CT Test Results - uses: EnricoMi/publish-unit-test-result-action@v1 + uses: EnricoMi/publish-unit-test-result-action@v2.10.0 if: steps.extract.outputs.HAS_TEST_ARTIFACTS == 'true' with: commit: ${{ github.event.workflow_run.head_sha }} @@ -107,17 +107,22 @@ jobs: "${{ needs.pr-number.outputs.result }}" - name: Deploy to github pages 🚀 - uses: JamesIves/github-pages-deploy-action@v4.2.2 - with: - token: ${{ secrets.ERLANG_TOKEN }} - branch: master # The branch the action should deploy to. - folder: erlang.github.io # The folder the action should deploy. - repository-name: erlang/erlang.github.io - single-commit: true + run: | + cd erlang.github.io + set -x + git config user.name github-actions + git config user.email github-actions@github.com + git add . + git add -u + git update-index --refresh + if ! git diff-index --quiet HEAD --; then + git commit -m "Update github pages content" + git push origin master + fi - ## Append some usefull links and tips to the test results posted by + ## Append some useful links and tips to the test results posted by ## Publish CT Test Results - - uses: actions/github-script@v5 + - uses: actions/github-script@v6.4.1 if: always() with: script: | diff --git a/.github/workflows/sync-github-prs.yaml b/.github/workflows/sync-github-prs.yaml new file mode 100644 index 000000000000..666e0e041f2d --- /dev/null +++ b/.github/workflows/sync-github-prs.yaml @@ -0,0 +1,45 @@ +name: Sync all github prs with erlang.github.io/prs/ + +## Sync all github prs twice a day +on: + workflow_dispatch: + schedule: + ## In UTC + - cron: '0 */4 * * *' + +jobs: + + sync-prs: + if: github.repository == 'erlang/otp' + concurrency: erlang.github.io-deploy + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v4.1.0 + - uses: actions/checkout@v4.1.0 + with: + token: ${{ secrets.ERLANG_TOKEN }} + repository: 'erlang/erlang.github.io' + path: erlang.github.io + - name: Update PRs + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: | + git clone https://github.com/talentdeficit/jsx + (cd jsx && rebar3 compile) + mkdir -p "${GITHUB_WORKSPACE}/erlang.github.io/prs/" + touch "${GITHUB_WORKSPACE}/erlang.github.io/.nojekyll" + .github/scripts/sync-github-prs.es erlang/otp "${GITHUB_WORKSPACE}/erlang.github.io/prs/" + + - name: Deploy to github pages 🚀 + run: | + cd erlang.github.io + set -x + git config user.name github-actions + git config user.email github-actions@github.com + git add . + git add -u + git update-index --refresh + if ! git diff-index --quiet HEAD --; then + git commit -m "Update github pages content" + git push origin master + fi diff --git a/.github/workflows/sync-github-releases.yaml b/.github/workflows/sync-github-releases.yaml index af3245f1ba04..192cc81a5028 100644 --- a/.github/workflows/sync-github-releases.yaml +++ b/.github/workflows/sync-github-releases.yaml @@ -1,6 +1,6 @@ name: Sync all github releases with erlang.org -## Sync all github releases + prs every hour +## Sync all github releases every hour on: workflow_dispatch: schedule: @@ -15,45 +15,14 @@ jobs: concurrency: sync-github-releases runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4.1.0 ## We need to login to the package registry in order to pull ## the base debian image. - name: Docker login - run: docker login https://docker.pkg.github.com -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} + run: docker login https://ghcr.io -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} - name: Sync releases env: ERLANG_ORG_TOKEN: ${{ secrets.TRIGGER_ERLANG_ORG_BUILD }} run: > .github/scripts/sync-github-releases.sh ${{ github.repository }} "Bearer ${{ secrets.GITHUB_TOKEN }}" "^[2-9][1-9]\\..*" 25m - - sync-prs: - if: github.repository == 'erlang/otp' - concurrency: erlang.github.io-deploy - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v2 - with: - token: ${{ secrets.ERLANG_TOKEN }} - repository: 'erlang/erlang.github.io' - path: erlang.github.io - - uses: actions/checkout@v2 - - name: Update PRs - env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - run: | - git clone https://github.com/talentdeficit/jsx - (cd jsx && rebar3 compile) - rm -rf "${GITHUB_WORKSPACE}/erlang.github.io/.git" - mkdir -p "${GITHUB_WORKSPACE}/erlang.github.io/prs/" - touch "${GITHUB_WORKSPACE}/erlang.github.io/.nojekyll" - .github/scripts/sync-github-prs.es erlang/otp "${GITHUB_WORKSPACE}/erlang.github.io/prs/" - - - name: Deploy to github pages 🚀 - uses: JamesIves/github-pages-deploy-action@v4.2.2 - with: - token: ${{ secrets.ERLANG_TOKEN }} - branch: master # The branch the action should deploy to. - folder: erlang.github.io # The folder the action should deploy. - repository-name: erlang/erlang.github.io - single-commit: true diff --git a/.github/workflows/update-base.yaml b/.github/workflows/update-base.yaml index f043fab039c4..17e67faf8dd9 100644 --- a/.github/workflows/update-base.yaml +++ b/.github/workflows/update-base.yaml @@ -18,22 +18,23 @@ jobs: strategy: matrix: type: [debian-base,ubuntu-base,i386-debian-base] - branch: [master, maint, maint-25] + branch: [master, maint, maint-26] + fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.0 with: ref: ${{ matrix.branch }} - name: Docker login - uses: docker/login-action@v2 + uses: docker/login-action@v3.0.0 with: - registry: docker.pkg.github.com + registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image id: base run: >- - BASE_TAG=docker.pkg.github.com/${{ github.repository_owner }}/otp/${{ matrix.type }} + BASE_TAG=ghcr.io/${{ github.repository_owner }}/otp/${{ matrix.type }} BASE_USE_CACHE=false .github/scripts/build-base-image.sh "${{ matrix.branch }}" - name: Push master image From af5d82af8c9ad41673d0c77db315d77e06af5272 Mon Sep 17 00:00:00 2001 From: Lukas Larsson Date: Fri, 27 Oct 2023 07:15:42 +0200 Subject: [PATCH 2/2] gh: Don't test unicode space paths in OTP 25 --- .github/dockerfiles/Dockerfile.64-bit | 4 ++-- .github/workflows/main.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/dockerfiles/Dockerfile.64-bit b/.github/dockerfiles/Dockerfile.64-bit index 1cdb8cfd687c..1c0636ef8bcf 100644 --- a/.github/dockerfiles/Dockerfile.64-bit +++ b/.github/dockerfiles/Dockerfile.64-bit @@ -4,7 +4,7 @@ FROM $BASE ARG MAKEFLAGS=$MAKEFLAGS ENV MAKEFLAGS=$MAKEFLAGS \ ERL_TOP=/buildroot/otp \ - PATH="/Erlang ∅⊤℞/bin":/buildroot/otp/bin:$PATH + PATH="/Erlang/bin":/buildroot/otp/bin:$PATH ARG ARCHIVE=./otp.tar.gz COPY $ARCHIVE /buildroot/otp.tar.gz @@ -17,7 +17,7 @@ ENV CFLAGS="-O2 -g -Werror" ## Configure (if not cached), check that no application are disabled and then make RUN if [ ! -f Makefile ]; then \ touch README.md && \ - ./configure --prefix="/Erlang ∅⊤℞" && \ + ./configure --prefix="/Erlang" && \ if cat lib/*/CONF_INFO || cat lib/*/SKIP || cat lib/SKIP-APPLICATIONS; then exit 1; fi && \ find . -type f -newer README.md | xargs tar --transform 's:^./:otp/:' -cf ../otp_cache.tar; \ fi && \ diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 41f8c78b2925..38ce3e637165 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -468,7 +468,7 @@ jobs: sudo bash -c "echo 'core.%p' > /proc/sys/kernel/core_pattern" docker run --ulimit core=-1 --ulimit nofile=5000:5000 --pids-limit 512 \ -e CTRUN_TIMEOUT=90 -e SPEC_POSTFIX=gh \ - -e TEST_NEEDS_RELEASE=true -e "RELEASE_ROOT=/buildroot/otp/Erlang ∅⊤℞" \ + -e TEST_NEEDS_RELEASE=true -e "RELEASE_ROOT=/buildroot/otp/Erlang" \ -e EXTRA_ARGS="-ct_hooks cth_surefire [{path,\"/buildroot/otp/$DIR/make_test_dir/${{ matrix.type }}_junit.xml\"}]" \ -v "$PWD/make_test_dir:/buildroot/otp/$DIR/make_test_dir" \ -v "$PWD/scripts:/buildroot/otp/scripts" \