diff --git a/.decent_ci-Linux.yaml b/.decent_ci-Linux.yaml new file mode 100644 index 00000000000..05031b75d59 --- /dev/null +++ b/.decent_ci-Linux.yaml @@ -0,0 +1,34 @@ +compilers: + - name: "gcc" + version: "11.4" + cmake_extra_flags: -DLINK_WITH_PYTHON:BOOL=ON -DPython_REQUIRED_VERSION:STRING=3.12.2 -DPython_ROOT_DIR:PATH=~/.pyenv/versions/3.12.2/ -DBUILD_FORTRAN:BOOL=ON -DBUILD_TESTING:BOOL=ON -DENABLE_REGRESSION_TESTING:BOOL=ON -DREGRESSION_BASELINE_PATH:PATH=$REGRESSION_BASELINE -DREGRESSION_SCRIPT_PATH:PATH=$REGRESSION_DIR -DREGRESSION_BASELINE_SHA:STRING=$REGRESSION_BASELINE_SHA -DCOMMIT_SHA:STRING=$COMMIT_SHA -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DBUILD_PERFORMANCE_TESTS:BOOL=ON -DVALGRIND_ANALYZE_PERFORMANCE_TESTS:BOOL=ON -DENABLE_PCH:BOOL=OFF + collect_performance_results: true + s3_upload_bucket: energyplus + + - name: "gcc" + version: "11.4" + build_type: Debug + cmake_extra_flags: -DLINK_WITH_PYTHON:BOOL=ON -DPython_REQUIRED_VERSION:STRING=3.12.2 -DPython_ROOT_DIR:PATH=~/.pyenv/versions/3.12.2/ -DBUILD_FORTRAN:BOOL=ON -DBUILD_TESTING:BOOL=ON -DENABLE_REGRESSION_TESTING:BOOL=OFF -DCOMMIT_SHA:STRING=$COMMIT_SHA -DENABLE_COVERAGE:BOOL=ON -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DENABLE_PCH:BOOL=OFF + coverage_enabled: true + coverage_base_dir: src/EnergyPlus + coverage_pass_limit: 41.0 + coverage_warn_limit: 40.0 + coverage_s3_bucket: energyplus + build_tag: UnitTestsCoverage + ctest_filter: -E "integration.*" + skip_regression: true + skip_packaging: true + + - name: "gcc" + version: "11.4" + build_type: Debug + cmake_extra_flags: -DLINK_WITH_PYTHON:BOOL=ON -DPython_REQUIRED_VERSION:STRING=3.12.2 -DPython_ROOT_DIR:PATH=~/.pyenv/versions/3.12.2/ -DBUILD_FORTRAN:BOOL=ON -DBUILD_TESTING:BOOL=ON -DENABLE_REGRESSION_TESTING:BOOL=OFF -DCOMMIT_SHA:STRING=$COMMIT_SHA -DENABLE_COVERAGE:BOOL=ON -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DENABLE_PCH:BOOL=OFF + coverage_enabled: true + coverage_base_dir: src/EnergyPlus + coverage_pass_limit: 66.0 + coverage_warn_limit: 67.0 + coverage_s3_bucket: energyplus + build_tag: IntegrationCoverage + ctest_filter: -R "integration.*" + skip_regression: true + skip_packaging: true diff --git a/.decent_ci-MacOS.yaml b/.decent_ci-MacOS.yaml new file mode 100644 index 00000000000..33ee61f4ab4 --- /dev/null +++ b/.decent_ci-MacOS.yaml @@ -0,0 +1,3 @@ +compilers: + - name: clang + cmake_extra_flags: -DCMAKE_OSX_DEPLOYMENT_TARGET=10.15 -DBUILD_FORTRAN=ON -DBUILD_TESTING:BOOL=ON -DENABLE_REGRESSION_TESTING:BOOL=ON -DREGRESSION_BASELINE_PATH:PATH=$REGRESSION_BASELINE -DREGRESSION_SCRIPT_PATH:PATH=$REGRESSION_DIR -DREGRESSION_BASELINE_SHA:STRING=$REGRESSION_BASELINE_SHA -DCOMMIT_SHA=$COMMIT_SHA -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DLINK_WITH_PYTHON=ON -DPython_REQUIRED_VERSION:STRING=3.12.2 diff --git a/.decent_ci-Windows.yaml b/.decent_ci-Windows.yaml new file mode 100644 index 00000000000..496d53df1f3 --- /dev/null +++ b/.decent_ci-Windows.yaml @@ -0,0 +1,6 @@ +compilers: + - name: Visual Studio + version: 16 + architecture: Win64 + cmake_extra_flags: -DBUILD_FORTRAN:BOOL=ON -DBUILD_TESTING:BOOL=ON -DCOMMIT_SHA=%COMMIT_SHA% -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DLINK_WITH_PYTHON=ON -DPython_EXECUTABLE:PATH=C:/Users/elee/AppData/Local/Programs/Python/Python312/python.exe + skip_regression: true diff --git a/.decent_ci.yaml b/.decent_ci.yaml new file mode 100644 index 00000000000..011ef0a2485 --- /dev/null +++ b/.decent_ci.yaml @@ -0,0 +1,12 @@ +results_repository : Myoldmopar/EnergyPlusBuildResults +results_path : _posts +results_base_url : https://myoldmopar.github.io/EnergyPlusBuildResults +regression_repository : NREL/EnergyPlusRegressionTool +regression_branch : BumpToBoto3 # this is the branch of NREL/EnergyPlusRegressionTool to use (usually main) +regression_baseline_default : develop # this is the NREL/EnergyPlus branch to use as the baseline for regressions +regression_baseline_develop : "" +regression_baseline_master : "" +notification_recipients: + - myoldmopar +aging_pull_requests_notification: true +aging_pull_requests_numdays: 28 diff --git a/.github/disabled-workflows/mac_test.yml b/.github/disabled-workflows/mac_test.yml new file mode 100644 index 00000000000..d0ea8050acc --- /dev/null +++ b/.github/disabled-workflows/mac_test.yml @@ -0,0 +1,95 @@ +name: Mac Testing + +on: [push] + +env: + FC: /usr/local/bin/gfortran-9 + DO_REGRESSIONS: false + INSTALL_DEPENDENCIES: true + +jobs: + build: + runs-on: macos-10.15 + + steps: + - name: Checkout Branch + uses: actions/checkout@v2 + with: + path: 'clone_branch' + + - name: Checkout Baseline + if: ${{ env.DO_REGRESSIONS == 'true' }} + uses: actions/checkout@v2 + with: + repository: 'NREL/EnergyPlus' + ref: 'develop' + path: 'clone_baseline' + fetch-depth: '1' + + - name: Checkout Regressions + if: ${{ env.DO_REGRESSIONS == 'true' }} + uses: actions/checkout@v2 + with: + repository: 'NREL/EnergyPlusRegressionTool' + ref: 'master' + path: 'clone_regressions' + fetch-depth: '1' + + - name: Set up Python 3.7 + if: ${{ env.INSTALL_DEPENDENCIES == 'true' }} + uses: actions/setup-python@v2 + with: + python-version: 3.7 + + - name: Install Python dependencies + if: ${{ env.INSTALL_DEPENDENCIES == 'true' && env.DO_REGRESSIONS == 'true'}} + run: | + python -m pip install --upgrade pip + pip install beautifulsoup4 soupsieve boto + + - name: Create Baseline Build Directory + if: ${{ env.DO_REGRESSIONS == 'true' }} + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/clone_baseline/build + + - name: Configure Baseline + if: ${{ env.DO_REGRESSIONS == 'true' }} + shell: bash + working-directory: ${{runner.workspace}}/EnergyPlus/clone_baseline/build + run: cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_FORTRAN=ON -DBUILD_TESTING:BOOL=ON -DCOMMIT_SHA=$COMMIT_SHA -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DLINK_WITH_PYTHON=ON $GITHUB_WORKSPACE/clone_baseline + + - name: Build Baseline + if: ${{ env.DO_REGRESSIONS == 'true' }} + working-directory: ${{runner.workspace}}/EnergyPlus/clone_baseline/build + shell: bash + run: cmake --build . -j 2 + + - name: Test Baseline + if: ${{ env.DO_REGRESSIONS == 'true' }} + working-directory: ${{runner.workspace}}/EnergyPlus/clone_baseline/build + shell: bash + run: ctest -R 1ZoneUncontrolled + + - name: Create Branch Build Directory + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/clone_branch/build + + - name: Configure Branch without Regressions + if: ${{ env.DO_REGRESSIONS != 'true' }} + shell: bash + working-directory: ${{runner.workspace}}/EnergyPlus/clone_branch/build + run: cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_FORTRAN=ON -DBUILD_TESTING:BOOL=ON -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DLINK_WITH_PYTHON=ON .. + + - name: Configure Branch with Regressions + if: ${{ env.DO_REGRESSIONS == 'true' }} + shell: bash + working-directory: ${{runner.workspace}}/EnergyPlus/clone_branch/build + run: cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_FORTRAN=ON -DBUILD_TESTING:BOOL=ON -DENABLE_REGRESSION_TESTING:BOOL=ON -DREGRESSION_BASELINE_PATH:PATH=${{runner.workspace}}/clone_baseline/build -DREGRESSION_SCRIPT_PATH:PATH=${{runner.workspace}}/clone_regressions/build -DREGRESSION_BASELINE_SHA:STRING=UNNKOWN_SHA -DCOMMIT_SHA=${{github.sha}} -DENABLE_GTEST_DEBUG_MODE:BOOL=OFF -DLINK_WITH_PYTHON=ON .. + + - name: Build Branch + working-directory: ${{runner.workspace}}/EnergyPlus/clone_branch/build + shell: bash + run: cmake --build . -j 2 + + - name: Test Branch + working-directory: ${{runner.workspace}}/EnergyPlus/clone_branch/build + shell: bash + run: ctest -j 2 diff --git a/.github/disabled-workflows/release.yml b/.github/disabled-workflows/release.yml new file mode 100644 index 00000000000..d61aa7beebd --- /dev/null +++ b/.github/disabled-workflows/release.yml @@ -0,0 +1,211 @@ +name: Releases + +on: + push: + tags: + - '*' + +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_TYPE: Release + +jobs: + release: + runs-on: ${{ matrix.os }} + continue-on-error: ${{ matrix.allow_failure }} + strategy: + # fail-fast: Default is true, switch to false to allow one platform to fail and still run others + fail-fast: false + matrix: + build_name: [Windows_x64, Windows_x86, Ubuntu_18.04, Ubuntu_20.04, macOS_10.15] + include: + - build_name: Windows_x64 + os: windows-2019 + arch: x64 + allow_failure: false + CMAKE_GENERATOR_PLATFORM: x64 + package-arch: x86_64 + BINARY_EXT: exe + COMPRESSED_EXT: zip + QT_OS_NAME: windows + - build_name: Windows_x86 + os: windows-2019 + arch: x86 + allow_failure: false + CMAKE_GENERATOR_PLATFORM: Win32 + package-arch: i386 + BINARY_EXT: exe + COMPRESSED_EXT: zip + QT_OS_NAME: windows + - build_name: Ubuntu_18.04 + os: ubuntu-18.04 + arch: x64 + allow_failure: false + package-arch: x86_64 + BINARY_EXT: run + COMPRESSED_EXT: tar.gz + SH_EXT: sh + QT_OS_NAME: linux + - build_name: Ubuntu_20.04 + os: ubuntu-20.04 + arch: x64 + allow_failure: false + package-arch: x86_64 + BINARY_EXT: run + COMPRESSED_EXT: tar.gz + SH_EXT: sh + QT_OS_NAME: linux + - build_name: macOS_10.15 + os: macos-10.15 + arch: x64 + allow_failure: false + package-arch: x86_64 + BINARY_EXT: dmg + COMPRESSED_EXT: tar.gz + SH_EXT: sh + QT_OS_NAME: mac + MACOSX_DEPLOYMENT_TARGET: 10.15 + SDKROOT: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + architecture: ${{ matrix.arch }} + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install aqtinstall + + - name: Install System dependencies and LaTeX + shell: bash + run: | + set -x + if [ "$RUNNER_OS" == "Linux" ]; then + echo "Using Apt to install dependencies" + sudo apt update + sudo apt install texlive texlive-xetex texlive-science libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev + + elif [ "$RUNNER_OS" == "macOS" ]; then + echo "Setting up MACOSX_DEPLOYMENT_TARGET and SDKROOT" + echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.MACOSX_DEPLOYMENT_TARGET }} >> $GITHUB_ENV + echo SDKROOT=${{ matrix.SDKROOT }} >> $GITHUB_ENV + # The MACOSX_DEPLOYMENT_TARGET environment variable sets the default value for the CMAKE_OSX_DEPLOYMENT_TARGET variable. + # echo CMAKE_MACOSX_DEPLOYMENT_TARGET='-DCMAKE_OSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET' >> $GITHUB_ENV + + echo "Using brew to install mactex and adding it to PATH" + brew cask install mactex + echo "/Library/TeX/texbin" >> $GITHUB_PATH + + echo "Setting FC (fortran compiler)" + echo FC=/usr/local/bin/gfortran-9 >> $GITHUB_ENV + + elif [ "$RUNNER_OS" == "Windows" ]; then + + echo "Setting CMAKE_GENERATOR options equivalent to ='-G \"Visual Studio 16 2019\" -A ${{ matrix.CMAKE_GENERATOR_PLATFORM }}'" + echo CMAKE_GENERATOR='Visual Studio 16 2019' >> $GITHUB_ENV + echo CMAKE_GENERATOR_PLATFORM=${{ matrix.CMAKE_GENERATOR_PLATFORM }} >> $GITHUB_ENV + + # echo FC="C:/msys64/mingw64/bin/x86_64-w64-mingw32-gfortran.exe" >> $GITHUB_ENV + + echo "Downloading MiKTeX CLI installer" + # We download from a specific miror already + curl -L -O https://ctan.math.illinois.edu/systems/win32/miktex/setup/windows-x64/miktexsetup-4.0-x64.zip + unzip miktexsetup-4.0-x64.zip + + echo "Setting up the local package directory via download" + ./miktexsetup --verbose \ + --local-package-repository=C:/ProgramData/MiKTeX-Repo \ + --remote-package-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" \ + --package-set=basic \ + download + + echo "Installing from the local package directory previously set up" + ./miktexsetup --verbose \ + --local-package-repository=C:/ProgramData/MiKTeX-Repo \ + --package-set=basic \ + --shared \ + install + + echo "Adding MiKTeX bin folder to PATH and to GITHUB_PATH" + echo "C:/Program Files/MiKTeX/miktex/bin/x64/" >> $GITHUB_PATH + export PATH="/c/Program Files/MiKTeX/miktex/bin/x64/:$PATH" + + echo "Configuring MiKTeX to install missing packages on the fly" + initexmf --admin --verbose --set-config-value='[MPM]AutoInstall=1' + + echo "Configure default mirror for packages" + mpm --admin --set-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" + # Avoid annoying warning: "xelatex: major issue: So far, you have not checked for updates as a MiKTeX user." + mpm --find-updates + mpm --admin --find-updates + fi; + + #- name: Build Test Document (will install missing packages) + #working-directory: ./doc/test + #shell: bash + #run: | + #set -x + #xelatex dependencies.tex + + - name: Install IFW + shell: bash + run: | + set -x + out_dir=${{ runner.workspace }}/Qt + if [ "$RUNNER_OS" == "Windows" ]; then + out_dir="C:/Qt" + fi; + + aqt tool ${{ matrix.QT_OS_NAME }} tools_ifw 3.2.2 qt.tools.ifw.32 --outputdir="$out_dir" + echo "$out_dir/Tools/QtInstallerFramework/3.2/bin" >> $GITHUB_PATH + + - name: Create Build Directory + run: cmake -E make_directory ./build/ + + - name: Configure CMake + working-directory: ./build + shell: bash + run: | + set -x + cmake -DLINK_WITH_PYTHON=ON -DBUILD_FORTRAN=ON -DBUILD_PACKAGE:BOOL=ON \ + -DDOCUMENTATION_BUILD="BuildWithAll" -DTEX_INTERACTION="batchmode" \ + ../ + + - name: Build Package + working-directory: ./build + shell: bash + run: cmake --build . --target package -j 2 --config $BUILD_TYPE + + - name: Upload Zip to release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: build/EnergyPlus-*-${{ matrix.package-arch }}.${{ matrix.COMPRESSED_EXT }} + tag: ${{ github.ref }} + overwrite: true + file_glob: true + + - name: Upload IFW to release + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: build/EnergyPlus-*-${{ matrix.package-arch }}.${{ matrix.BINARY_EXT }} + tag: ${{ github.ref }} + overwrite: true + file_glob: true + + - name: Upload SH to release + if: runner.os == 'Linux' + uses: svenstaro/upload-release-action@v2 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: build/EnergyPlus-*-${{ matrix.package-arch }}.${{ matrix.SH_EXT }} + tag: ${{ github.ref }} + overwrite: true + file_glob: true + diff --git a/.github/disabled-workflows/windows_test.yml b/.github/disabled-workflows/windows_test.yml new file mode 100644 index 00000000000..8d31333d916 --- /dev/null +++ b/.github/disabled-workflows/windows_test.yml @@ -0,0 +1,36 @@ +name: Windows 64-bit Testing + +on: [push] + +env: + CMAKE_Fortran_COMPILER: "/c/msys64/mingw64/bin/x86_64-w64-mingw32-gfortran.exe" + +jobs: + windows_test_64: + runs-on: windows-2019 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + architecture: 'x64' + + - name: Create Build Directory + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/build + + - name: Configure CMake + shell: bash + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: cmake -G "Visual Studio 16 2019" -A x64 -DLINK_WITH_PYTHON=ON -DBUILD_TESTING=ON -DBUILD_FORTRAN=ON .. + + - name: Build EnergyPlus + working-directory: ${{runner.workspace}}/EnergyPlus/build + shell: bash + run: cmake --build . -j 2 --config Release + + - name: Run Tests + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: ctest -j 2 -C Release diff --git a/.github/unfinished-workflows/linux_test.yml b/.github/unfinished-workflows/linux_test.yml new file mode 100644 index 00000000000..869b13c94c7 --- /dev/null +++ b/.github/unfinished-workflows/linux_test.yml @@ -0,0 +1,43 @@ +name: Linux Test - Release Mode + +on: [push] + +jobs: + build: + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + + - name: Install APT dependencies + # install valgrind and performance test stuff + run: sudo apt-get update && sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev + + - name: Create Build Directory + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/build + + - name: Configure CMake + working-directory: ${{runner.workspace}}/EnergyPlus/build + # turn on performance testing + run: cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=ON -DLINK_WITH_PYTHON=ON -DDOCUMENTATION_BUILD="BuildWithAll" -DTEX_INTERACTION="batchmode" -DBUILD_FORTRAN=ON -DBUILD_PACKAGE:BOOL=ON .. + + - name: Build EnergyPlus + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: cmake --build . -j 2 + + - name: Run EnergyPlus Tests + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: ctest -j 2 + + # collect performance results here, upload them + + - name: Run EnergyPlus Integration Tests + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: ctest -j 2 -R "integration.*" + + # get coverage results here, upload them diff --git a/.github/unfinished-workflows/linux_test_debug.yml b/.github/unfinished-workflows/linux_test_debug.yml new file mode 100644 index 00000000000..23e90e2e8c8 --- /dev/null +++ b/.github/unfinished-workflows/linux_test_debug.yml @@ -0,0 +1,45 @@ +name: Linux Test - Debug Mode + +on: [push] + +jobs: + build: + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + + # install boto to upload coverage results + + - name: Install APT dependencies + # install gcovr and coverage stuff + run: sudo apt-get update && sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev + + - name: Create Build Directory + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/build + + - name: Configure CMake + working-directory: ${{runner.workspace}}/EnergyPlus/build + # turn on ENABLE_COVERAGE + run: cmake -DCMAKE_BUILD_TYPE=Debug -DBUILD_TESTING=ON -DLINK_WITH_PYTHON=ON -DDOCUMENTATION_BUILD="BuildWithAll" -DTEX_INTERACTION="batchmode" -DBUILD_FORTRAN=ON -DBUILD_PACKAGE:BOOL=ON .. + + - name: Build EnergyPlus + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: cmake --build . -j 2 + + - name: Run EnergyPlus Unit Tests + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: ctest -j 2 -E "integration.*" + + # get coverage results here, upload them, then clear them + + - name: Run EnergyPlus Integration Tests + working-directory: ${{runner.workspace}}/EnergyPlus/build + run: ctest -j 2 -R "integration.*" + + # get coverage results here, upload them diff --git a/.github/workflows/add_opened_pr_to_project.yml b/.github/workflows/add_opened_pr_to_project.yml new file mode 100644 index 00000000000..86ae4f597f3 --- /dev/null +++ b/.github/workflows/add_opened_pr_to_project.yml @@ -0,0 +1,32 @@ +name: 'Create Project Card for a New PR' + +on: + pull_request_target: + # So we need this action purely to add PRs to a project. Once in the project, + # there are built-in project workflows for assigning new items to a column, and + # moving closing and reopening items within a project. I think we just need to handle + # the opened event, and we'll be good to go. + types: [ opened ] + +env: + # this token must have typical repo write access plus full :project access + # Create this token with the correct permissions and add it as a repo secret + # It would be preferable to just use the GITHUB_TOKEN that Actions generates for a run, + # but I can't get the right projects (v2) write access to make it work. Eventually + # just use that, set permissions here, and delete the token. + GH_TOKEN: ${{ secrets.ADD_TO_PROJECT_TOKEN }} + +jobs: + create-card: + runs-on: ubuntu-latest + steps: + # check out the repo to get the script downloaded + - uses: actions/checkout@v4 + # gets the current PR and stores it in a variable + - uses: jwalton/gh-find-current-pr@v1 + id: findPr + # adds the variable to the environment so that we can use it in the next step + - run: echo "PR=${{ steps.findPr.outputs.pr }}" >> $GITHUB_ENV + # interact with the GitHub API and manipulate the Project + # we should also probably pass the project id as an env variable / argument + - run: bash scripts/dev/add_to_project.sh ${{ env.PR }} diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml deleted file mode 100644 index 991fac78ae0..00000000000 --- a/.github/workflows/build_documentation.yml +++ /dev/null @@ -1,228 +0,0 @@ -name: Documentation - -on: - push: - branches: [ develop ] - pull_request: - branches: [ develop ] - -defaults: - run: - shell: bash - -jobs: - build: - name: Build PDFs on ${{ matrix.pretty }} - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-24.04 - generator: "Unix Makefiles" - pretty: 'Ubuntu 24.04' - - os: windows-2019 - generator: "Visual Studio 16 2019" - pretty: "Windows" - - runs-on: ${{ matrix.os }} - - steps: - - name: Checkout EnergyPlus - uses: actions/checkout@v4 - - - name: Set up Python 3.12 - uses: actions/setup-python@v5 - with: - python-version: '3.12' - - - name: Set up LaTeX on Linux - if: runner.os == 'Linux' - run: sudo apt update && sudo apt install -y texlive texlive-xetex texlive-science poppler-utils - - - name: Set up LaTeX on Windows - if: runner.os == 'Windows' - run: | - set -x - echo "Downloading MiKTeX CLI installer" - # We download from a specific miror already # TODO: Should store this setup package somewhere ourselves - curl -L -O https://ctan.math.illinois.edu/systems/win32/miktex/setup/windows-x64/miktexsetup-5.5.0%2B1763023-x64.zip - unzip miktexsetup-5.5.0%2B1763023-x64.zip - - echo "Setting up the local package directory via download" - ./miktexsetup_standalone --verbose \ - --local-package-repository=C:/MiKTeX-Repo \ - --remote-package-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" \ - --package-set=essential \ - download - - echo "Installing from the local package directory previously set up" - ./miktexsetup_standalone --verbose \ - --local-package-repository=C:/MiKTeX-Repo \ - --package-set=essential \ - --shared=yes \ - install - - echo "Adding MiKTeX bin folder to PATH and to GITHUB_PATH" - echo "C:/Program Files/MiKTeX/miktex/bin/x64/" >> $GITHUB_PATH - export PATH="/c/Program Files/MiKTeX/miktex/bin/x64/:$PATH" - - echo "Configuring MiKTeX to install missing packages on the fly" - initexmf --admin --verbose --set-config-value='[MPM]AutoInstall=1' - - echo "Configure default mirror for packages" - mpm --admin --set-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" - # If later we pre-package into a zip/tar.gz all the packages we need, we can preinstall them via - # mpm --admin --set-repository=C:/MiKTeX-Repo - # mpm --verbose --admin --repository=C:\MiKTeX-Repo --require=@C:\MiKTeX-Repo\energyplus_packages.lst - # Avoid annoying warning: "xelatex: major issue: So far, you have not checked for updates as a MiKTeX user." - mpm --find-updates - mpm --admin --find-updates - # initexmf --enable-installer --update-fndb - # initexmf --admin --enable-installer --update-fndb - # initexmf --enable-installer --dump-by-name=xelatex --engine=xetex - - # This will install all required packages and does that in a single thread. So later in cmake we can safely run in parallel - - name: Preinstall Packages on Windows - if: runner.os == 'Windows' - working-directory: ./doc/ - run: | - # It shaves about 1min40s to preinstall from the .lst so do it first - mpm --verbose --admin --require=@energyplus_packages_windows.lst - cd test/ - xelatex dependencies.tex - nwords=$(pdftotext -f 2 -l 2 dependencies.pdf - | wc -w) - [ "$nwords" -lt "10" ] && echo "TOC isn't available for pass NO. 1 (as expected)" || echo "TOC IS available for pass NO. 1" - xelatex dependencies.tex - nwords=$(pdftotext -f 2 -l 2 dependencies.pdf - | wc -w) - if [ "$nwords" -lt "10" ]; then - echo "TOC isn't available for pass NO. 2" - exit 1 - fi; - echo "TOC is available for pass NO.2" - - - name: Create Build Environment - run: cmake -E make_directory ./doc/build - - - name: Configure CMake - working-directory: ./doc/build - run: cmake -G "${{ matrix.generator }}" -DTEX_INTERACTION=batchmode -DDOCS_TESTING=ON .. - - - name: Add problem matcher - run: echo "::add-matcher::.github/workflows/doc-problem-match.json" - - - name: Build Docs - working-directory: ./doc/build - run: cmake --build . -j 4 - - - name: Upload Acknowledgments - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: Acknowledgments - path: ${{ github.workspace }}/doc/build/pdf/Acknowledgments.pdf - - - name: Upload AuxiliaryPrograms - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: AuxiliaryPrograms - path: ${{ github.workspace }}/doc/build/pdf/AuxiliaryPrograms.pdf - - - name: Upload EMSApplicationGuide - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: EMSApplicationGuide - path: ${{ github.workspace }}/doc/build/pdf/EMSApplicationGuide.pdf - - - name: Upload EnergyPlusEssentials - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: EnergyPlusEssentials - path: ${{ github.workspace }}/doc/build/pdf/EnergyPlusEssentials.pdf - - - name: Upload EngineeringReference - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: EngineeringReference - path: ${{ github.workspace }}/doc/build/pdf/EngineeringReference.pdf - - - name: Upload ExternalInterfacesApplicationGuide - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: ExternalInterfacesApplicationGuide - path: ${{ github.workspace }}/doc/build/pdf/ExternalInterfacesApplicationGuide.pdf - - - name: Upload GettingStarted - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: GettingStarted - path: ${{ github.workspace }}/doc/build/pdf/GettingStarted.pdf - - - name: Upload InputOutputReference - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: InputOutputReference - path: ${{ github.workspace }}/doc/build/pdf/InputOutputReference.pdf - - - name: Upload InterfaceDeveloper - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: InterfaceDeveloper - path: ${{ github.workspace }}/doc/build/pdf/InterfaceDeveloper.pdf - - - name: Upload ModuleDeveloper - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: ModuleDeveloper - path: ${{ github.workspace }}/doc/build/pdf/ModuleDeveloper.pdf - - - name: Upload OutputDetailsAndExamples - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: OutputDetailsAndExamples - path: ${{ github.workspace }}/doc/build/pdf/OutputDetailsAndExamples.pdf - - - name: Upload PlantApplicationGuide - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: PlantApplicationGuide - path: ${{ github.workspace }}/doc/build/pdf/PlantApplicationGuide.pdf - - - name: Upload UsingEnergyPlusForCompliance - uses: actions/upload-artifact@v4 - if: matrix.os == 'ubuntu-24.04' - with: - name: UsingEnergyPlusForCompliance - path: ${{ github.workspace }}/doc/build/pdf/UsingEnergyPlusForCompliance.pdf - - - name: Upload entire pdf folder - uses: actions/upload-artifact@v4 - if: always() && matrix.os == 'ubuntu-24.04' - with: - name: AllDocumentation - path: ${{ github.workspace }}/doc/build/pdf/ - - - name: Upload log folder upon failure - if: failure() && runner.os == 'Windows' - uses: actions/upload-artifact@v4 - with: - name: Xelatex_Logs_after_all - path: C:\Users\runneradmin\AppData\Local\MiKTeX\miktex\log\ - -# - name: Test -# working-directory: ${{runner.workspace}}/build -# shell: bash -# # Execute tests defined by the CMake configuration. -# # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail -# # run: ctest -C $BUILD_TYPE -# run: ls diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 19c8da226b7..bac60d82e37 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -16,7 +16,7 @@ on: jobs: build_wheel: - name: Build Python Wheels + strategy: fail-fast: false matrix: diff --git a/.github/workflows/build_checksums.yml b/.github/workflows/checksums.yml similarity index 98% rename from .github/workflows/build_checksums.yml rename to .github/workflows/checksums.yml index 7b3756b1aef..6cccc3433b2 100644 --- a/.github/workflows/build_checksums.yml +++ b/.github/workflows/checksums.yml @@ -12,7 +12,6 @@ on: jobs: build: - name: Compute and Post Checksums runs-on: ubuntu-latest permissions: # Needed permission to upload the release asset diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml new file mode 100644 index 00000000000..93cc23a3167 --- /dev/null +++ b/.github/workflows/clang-format-check.yml @@ -0,0 +1,28 @@ +name: clang-format + +on: + push: + branches: [ develop ] + # Sequence of patterns matched against refs/tags + tags: + - '*' + pull_request: + branches: [ develop ] + +jobs: + formatting-check: + name: Formatting Check + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + path: + - 'src/EnergyPlus' + - 'tst/EnergyPlus/unit' + steps: + - uses: actions/checkout@v4 + - name: Run clang-format style check for C/C++ programs. + uses: jidicula/clang-format-action@v4.13.0 + with: + clang-format-version: '10' + check-path: ${{ matrix.path }} diff --git a/.github/workflows/cppcheck.yml b/.github/workflows/cppcheck.yml new file mode 100644 index 00000000000..ae66a455d10 --- /dev/null +++ b/.github/workflows/cppcheck.yml @@ -0,0 +1,84 @@ +name: CppCheck + +on: [push] + +env: + CPPCHECK_VERSION: '2.10' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout EnergyPlus + uses: actions/checkout@v4 + + - name: Install cppcheck + shell: bash + run: | + mkdir cppcheck + cd cppcheck + wget https://github.com/danmar/cppcheck/archive/$CPPCHECK_VERSION.tar.gz + tar xfz $CPPCHECK_VERSION.tar.gz + mkdir build + cd build + cmake -DCMAKE_BUILD_TYPE=Release ../cppcheck-$CPPCHECK_VERSION/ + make -j $(nproc) + sudo make install + cd .. + rm -Rf cppcheck + + - name: Cache cppcheck-build-directory + id: cppcheckcache + uses: actions/cache@v4 + with: + path: .cppcheck-build-dir/ + key: cppcheckcache + + - name: cppcheck-build-directory not found + # If the build cache wasn't found in the cache + if: steps.cppcheckcache.outputs.cache-hit != 'true' + run: | + mkdir .cppcheck-build-dir + + - name: cppcheck-build-directory was found + # If the build cache wasn't found in the cache + if: steps.cppcheckcache.outputs.cache-hit == 'true' + run: | + ls .cppcheck-build-dir/ || true + + - name: Run CppCheck + run: | + cppcheck --cppcheck-build-dir=.cppcheck-build-dir \ + -D__cppcheck__ -UEP_Count_Calls -DEP_NO_OPENGL -UGROUND_PLOT -DLINK_WITH_PYTHON -DMSVC_DEBUG -DSKYLINE_MATRIX_REMOVE_ZERO_COLUMNS -U_OPENMP -Ugeneratetestdata \ + -DEP_cache_GlycolSpecificHeat -DEP_cache_PsyTsatFnPb -UEP_nocache_Psychrometrics -UEP_psych_errors -UEP_psych_stats \ + --force \ + --std=c++17 \ + --inline-suppr \ + --suppress=cppcheckError \ + --suppress=unusedFunction:src/EnergyPlus/api/autosizing.cc \ + --suppress=unusedFunction:src/EnergyPlus/api/datatransfer.cc \ + --suppress=unusedFunction:src/EnergyPlus/api/func.cc \ + --suppress=unusedFunction:src/EnergyPlus/api/runtime.cc \ + --suppress=unusedFunction:src/EnergyPlus/api/state.cc \ + --suppress=unusedFunction:src/EnergyPlus/Psychrometrics.cc \ + --enable=all \ + -i EnergyPlus/DXCoils.cc \ + -i EnergyPlus/RefrigeratedCase.cc \ + -i EnergyPlus/SolarShading.cc \ + -j $(nproc) \ + --template='[{file}:{line}]:({severity}),[{id}],{message}' \ + --suppress="uninitvar:*" \ + ./src \ + 3>&1 1>&2 2>&3 | tee cppcheck.txt + + - name: Parse and colorize cppcheck + shell: bash + run: python ./scripts/dev/colorize_cppcheck_results.py + + - name: Upload cppcheck results as artifact + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: EnergyPlus-${{ github.sha }}-cppcheck_results.txt + path: cppcheck.txt diff --git a/.github/workflows/custom_check.yml b/.github/workflows/custom_check.yml new file mode 100644 index 00000000000..552496e0f1b --- /dev/null +++ b/.github/workflows/custom_check.yml @@ -0,0 +1,22 @@ +name: Custom Check + +on: [push] + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Custom Check + shell: bash + run: ./scripts/dev/custom_check.sh . diff --git a/.github/workflows/documentation-windows.yml b/.github/workflows/documentation-windows.yml new file mode 100644 index 00000000000..0846a551028 --- /dev/null +++ b/.github/workflows/documentation-windows.yml @@ -0,0 +1,116 @@ +name: Documentation Windows + +on: + push: + branches: [ master, develop ] + +env: + CMAKE_Fortran_COMPILER: "/c/msys64/mingw64/bin/x86_64-w64-mingw32-gfortran.exe" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_TYPE: Release + +jobs: + build: + runs-on: windows-2019 + + steps: + - name: Checkout EnergyPlus + uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Install System dependencies and LaTeX + shell: bash + run: | + set -x + echo "Downloading MiKTeX CLI installer" + # We download from a specific miror already # TODO: Should store this setup package somewhere ourselves + curl -L -O https://ctan.math.illinois.edu/systems/win32/miktex/setup/windows-x64/miktexsetup-5.5.0%2B1763023-x64.zip + unzip miktexsetup-5.5.0%2B1763023-x64.zip + + echo "Setting up the local package directory via download" + ./miktexsetup_standalone --verbose \ + --local-package-repository=C:/MiKTeX-Repo \ + --remote-package-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" \ + --package-set=essential \ + download + + echo "Installing from the local package directory previously set up" + ./miktexsetup_standalone --verbose \ + --local-package-repository=C:/MiKTeX-Repo \ + --package-set=essential \ + --shared=yes \ + install + + echo "Adding MiKTeX bin folder to PATH and to GITHUB_PATH" + echo "C:/Program Files/MiKTeX/miktex/bin/x64/" >> $GITHUB_PATH + export PATH="/c/Program Files/MiKTeX/miktex/bin/x64/:$PATH" + + echo "Configuring MiKTeX to install missing packages on the fly" + initexmf --admin --verbose --set-config-value='[MPM]AutoInstall=1' + + echo "Configure default mirror for packages" + mpm --admin --set-repository="https://ctan.math.illinois.edu/systems/win32/miktex/tm/packages/" + # If later we pre-package into a zip/tar.gz all the packages we need, we can preinstall them via + # mpm --admin --set-repository=C:/MiKTeX-Repo + # mpm --verbose --admin --repository=C:\MiKTeX-Repo --require=@C:\MiKTeX-Repo\energyplus_packages.lst + # Avoid annoying warning: "xelatex: major issue: So far, you have not checked for updates as a MiKTeX user." + mpm --find-updates + mpm --admin --find-updates + # initexmf --enable-installer --update-fndb + # initexmf --admin --enable-installer --update-fndb + # initexmf --enable-installer --dump-by-name=xelatex --engine=xetex + + # This will install all required packages and does that in a single thread. So later in cmake we can safely run in parallel + - name: Install required packages by building the Test document + shell: bash + working-directory: ./doc/ + run: | + # It shaves about 1min40s to preinstall from the .lst so do it first + mpm --verbose --admin --require=@energyplus_packages_windows.lst + cd test/ + xelatex dependencies.tex + nwords=$(pdftotext -f 2 -l 2 dependencies.pdf - | wc -w) + [ "$nwords" -lt "10" ] && echo "TOC isn't available for pass NO. 1 (as expected)" || echo "TOC IS available for pass NO. 1" + xelatex dependencies.tex + nwords=$(pdftotext -f 2 -l 2 dependencies.pdf - | wc -w) + if [ "$nwords" -lt "10" ]; then + echo "TOC isn't available for pass NO. 2" + exit 1 + fi; + echo "TOC is available for pass NO.2" + + - name: Create Build Directory + run: cmake -E make_directory ./doc/build/ + + - name: Configure CMake + working-directory: ./doc/build + shell: bash + run: | + set -x + cmake -G "Visual Studio 16 2019" -A x64 -DTEX_INTERACTION=batchmode -DDOCS_TESTING=ON ../ + + - name: Add problem matcher + run: echo "::add-matcher::.github/workflows/doc-problem-match.json" + + - name: Build Docs + working-directory: ${{runner.workspace}}/EnergyPlus/doc/build + run: | + cmake --build . -j 4 + + - name: Upload entire pdf folder + if: ${{ always() }} + uses: actions/upload-artifact@v4 + with: + name: InputOutputReference + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/ + + - name: Upload log folder upon failure + if: ${{ failure() }} + uses: actions/upload-artifact@v4 + with: + name: Xelatex_Logs_after_all + path: C:\Users\runneradmin\AppData\Local\MiKTeX\miktex\log\ diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 00000000000..0bc7b56be03 --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,119 @@ +name: Documentation + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout EnergyPlus + uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: '3.10' + + - name: Set up LaTeX + run: sudo apt update && sudo apt install -y texlive texlive-xetex texlive-science poppler-utils + + - name: Create Build Environment + run: cmake -E make_directory ${{runner.workspace}}/EnergyPlus/doc/build + + - name: Configure CMake + working-directory: ${{runner.workspace}}/EnergyPlus/doc/build + run: cmake -DTEX_INTERACTION=batchmode -DDOCS_TESTING=ON .. + + - name: Add problem matcher + run: echo "::add-matcher::.github/workflows/doc-problem-match.json" + + - name: Build Docs + working-directory: ${{runner.workspace}}/EnergyPlus/doc/build + run: cmake --build . -j 4 + + - name: Upload Acknowledgments + uses: actions/upload-artifact@v4 + with: + name: Acknowledgments + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/Acknowledgments.pdf + + - name: Upload AuxiliaryPrograms + uses: actions/upload-artifact@v4 + with: + name: AuxiliaryPrograms + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/AuxiliaryPrograms.pdf + + - name: Upload EMSApplicationGuide + uses: actions/upload-artifact@v4 + with: + name: EMSApplicationGuide + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/EMSApplicationGuide.pdf + + - name: Upload EnergyPlusEssentials + uses: actions/upload-artifact@v4 + with: + name: EnergyPlusEssentials + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/EnergyPlusEssentials.pdf + + - name: Upload EngineeringReference + uses: actions/upload-artifact@v4 + with: + name: EngineeringReference + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/EngineeringReference.pdf + + - name: Upload ExternalInterfacesApplicationGuide + uses: actions/upload-artifact@v4 + with: + name: ExternalInterfacesApplicationGuide + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/ExternalInterfacesApplicationGuide.pdf + + - name: Upload GettingStarted + uses: actions/upload-artifact@v4 + with: + name: GettingStarted + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/GettingStarted.pdf + + - name: Upload InputOutputReference + uses: actions/upload-artifact@v4 + with: + name: InputOutputReference + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/InputOutputReference.pdf + + - name: Upload InterfaceDeveloper + uses: actions/upload-artifact@v4 + with: + name: InterfaceDeveloper + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/InterfaceDeveloper.pdf + + - name: Upload ModuleDeveloper + uses: actions/upload-artifact@v4 + with: + name: ModuleDeveloper + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/ModuleDeveloper.pdf + + - name: Upload OutputDetailsAndExamples + uses: actions/upload-artifact@v4 + with: + name: OutputDetailsAndExamples + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/OutputDetailsAndExamples.pdf + + - name: Upload PlantApplicationGuide + uses: actions/upload-artifact@v4 + with: + name: PlantApplicationGuide + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/PlantApplicationGuide.pdf + + - name: Upload UsingEnergyPlusForCompliance + uses: actions/upload-artifact@v4 + with: + name: UsingEnergyPlusForCompliance + path: ${{runner.workspace}}/EnergyPlus/doc/build/pdf/UsingEnergyPlusForCompliance.pdf + +# - name: Test +# working-directory: ${{runner.workspace}}/build +# shell: bash +# # Execute tests defined by the CMake configuration. +# # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail +# # run: ctest -C $BUILD_TYPE +# run: ls diff --git a/.github/workflows/test_epjson.yml b/.github/workflows/epjson.yml similarity index 91% rename from .github/workflows/test_epjson.yml rename to .github/workflows/epjson.yml index 97ce25a0959..d754801964e 100644 --- a/.github/workflows/test_epjson.yml +++ b/.github/workflows/epjson.yml @@ -1,10 +1,7 @@ name: epJSON dependency on: - push: - branches: [ develop ] - pull_request: - branches: [ develop ] + push env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -12,19 +9,12 @@ env: jobs: release: - name: Testing on ${{ matrix.pretty }} runs-on: ${{ matrix.os }} strategy: # fail-fast: Default is true, switch to false to allow one platform to fail and still run others fail-fast: false matrix: - include: - - os: ubuntu-latest - pretty: "Ubuntu" - - os: windows-latest - pretty: "Windows" - - os: macos-latest - pretty: "Mac" + os: [ubuntu-latest, windows-latest, macos-latest] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/linux_build.yml b/.github/workflows/linux_build.yml new file mode 100644 index 00000000000..b5fddcc0a16 --- /dev/null +++ b/.github/workflows/linux_build.yml @@ -0,0 +1,84 @@ +name: Linux Build + +on: + push: + branches: [ develop ] + # Sequence of patterns matched against refs/tags + tags: + - 'v*' # Push events matching v*, i.e. v1.0, v20.15.10 + pull_request: + branches: [ develop ] + +jobs: +# base_build: +# runs-on: ${{ matrix.os }} +# strategy: +# matrix: +# os: [ubuntu-20.04] +# +# steps: +# - uses: actions/checkout@v4 +# +# - name: Set up Python 3.7 +# uses: actions/setup-python@v5 +# with: +# python-version: 3.7 +# +# - name: Install System Dependencies +# shell: bash +# run: | +# set -x +# echo "Using Apt to install dependencies" +# sudo apt-get update +# sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev +# +# - name: Create Build Directory +# run: cmake -E make_directory ./build/ +# +# - name: Configure CMake +# shell: bash +# working-directory: ./build/ +# run: cmake -DCMAKE_BUILD_TYPE=Release -DLINK_WITH_PYTHON=ON .. +# +# - name: Build EnergyPlus +# working-directory: ./build/ +# shell: bash +# run: cmake --build . --target energyplus -j 2 + + alternate_build_configurations: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v4 + + - name: Install System Dependencies + shell: bash + run: | + set -x + echo "Using Apt to install dependencies" + sudo apt-get update + + - name: Create Build Directory + run: cmake -E make_directory ./build/ + + - name: Configure CMake + shell: bash + working-directory: ./build/ + run: | + cmake -DCMAKE_BUILD_TYPE=Release \ + -DLINK_WITH_PYTHON=OFF \ + -DUSE_PSYCHROMETRICS_CACHING=OFF \ + -DUSE_GLYCOL_CACHING=OFF \ + -DOPENGL_REQUIRED=OFF \ + -DUSE_PSYCH_STATS=ON \ + -DUSE_PSYCH_ERRORS=OFF \ + -DENABLE_PCH=OFF \ + ../ + + - name: Build EnergyPlus + working-directory: ./build/ + shell: bash + run: cmake --build . --target energyplus -j 4 diff --git a/.github/workflows/release_linux.yml b/.github/workflows/linux_release.yml similarity index 95% rename from .github/workflows/release_linux.yml rename to .github/workflows/linux_release.yml index 6469fade466..e6a3ef8dfc5 100644 --- a/.github/workflows/release_linux.yml +++ b/.github/workflows/linux_release.yml @@ -1,4 +1,4 @@ -name: Releases +name: Linux Releases on: push: @@ -12,18 +12,14 @@ env: jobs: build_installer_artifact: - name: Build Packages for ${{ matrix.pretty }} + name: Build Installer and Upload # keeping upload in this job so we could potentially download problematic builds runs-on: ${{ matrix.os }} continue-on-error: false strategy: # fail-fast: Default is true, switch to false to allow one platform to fail and still run others fail-fast: false matrix: - include: - - os: ubuntu-22.04 - pretty: "Ubuntu 22.04" - - os: ubuntu-24.04 - pretty: "Ubuntu 24.04" + os: [ubuntu-20.04, ubuntu-22.04, ubuntu-24.04] permissions: # Needed permission to upload the release asset diff --git a/.github/workflows/release_mac.yml b/.github/workflows/mac_release.yml similarity index 99% rename from .github/workflows/release_mac.yml rename to .github/workflows/mac_release.yml index 9556e5e893c..bf1d0c015c4 100644 --- a/.github/workflows/release_mac.yml +++ b/.github/workflows/mac_release.yml @@ -1,4 +1,4 @@ -name: Releases +name: Mac Releases on: push: @@ -16,7 +16,6 @@ env: jobs: build_installer_artifact: - name: Build Packages for ${{ matrix.pretty }} runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.allow_failure }} strategy: @@ -30,14 +29,11 @@ jobs: allow_failure: false arch: x86_64 python-arch: x64 - pretty: "Mac x64" - macos_dev_target: 13.0 os: macos-14 allow_failure: false arch: arm64 python-arch: arm64 - pretty: "Mac arm64" - permissions: # Needed permission to upload the release asset contents: write diff --git a/.github/workflows/verify_pr_labels.yml b/.github/workflows/pr_labels.yml similarity index 88% rename from .github/workflows/verify_pr_labels.yml rename to .github/workflows/pr_labels.yml index 88e5ab98555..5b69ed1d488 100644 --- a/.github/workflows/verify_pr_labels.yml +++ b/.github/workflows/pr_labels.yml @@ -1,4 +1,4 @@ -name: Pull Request Labels +name: Verify Pull Request Labeling on: pull_request: @@ -6,7 +6,6 @@ on: jobs: check_pr_labels: - name: Verification runs-on: ubuntu-latest steps: - name: Verify PR label action diff --git a/.github/workflows/test_code_integrity.yml b/.github/workflows/test_code_integrity.yml deleted file mode 100644 index 40b09e63a17..00000000000 --- a/.github/workflows/test_code_integrity.yml +++ /dev/null @@ -1,105 +0,0 @@ -name: Code Integrity - -on: - push: - branches: [ develop ] - pull_request: - branches: [ develop ] - -jobs: - code_integrity_checks: - name: Static Code Analysis - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Run clang-format style check for C/C++ source code. - uses: jidicula/clang-format-action@v4.13.0 - if: always() - with: - clang-format-version: '10' - check-path: 'src/EnergyPlus' - - - name: Run clang-format style check for C/C++ unit test code. - uses: jidicula/clang-format-action@v4.13.0 - if: always() - with: - clang-format-version: '10' - check-path: 'tst/EnergyPlus/unit' - - - name: Custom Check - if: always() - run: ./scripts/dev/custom_check.sh . - - - name: Install cppcheck - if: always() - run: | - mkdir cppcheck - cd cppcheck - wget https://github.com/danmar/cppcheck/archive/2.10.tar.gz - tar xfz 2.10.tar.gz - mkdir build - cd build - cmake -DCMAKE_BUILD_TYPE=Release ../cppcheck-2.10/ - make -j $(nproc) - sudo make install - cd .. - rm -Rf cppcheck - - - name: Cache cppcheck-build-directory - if: always() - id: cppcheckcache - uses: actions/cache@v4 - with: - path: .cppcheck-build-dir/ - key: cppcheckcache - - - name: cppcheck-build-directory not found - # If the build cache wasn't found in the cache - if: always() && steps.cppcheckcache.outputs.cache-hit != 'true' - run: mkdir .cppcheck-build-dir - - - name: cppcheck-build-directory was found - # If the build cache wasn't found in the cache - if: always() && steps.cppcheckcache.outputs.cache-hit == 'true' - run: ls .cppcheck-build-dir/ || true - - - name: Run CppCheck - id: cpp_check_run - if: always() - # TODO: Evaluate the long list of flags here - run: > - cppcheck - --cppcheck-build-dir=.cppcheck-build-dir - -D__cppcheck__ -UEP_Count_Calls -DEP_NO_OPENGL -UGROUND_PLOT -DLINK_WITH_PYTHON -DMSVC_DEBUG -DSKYLINE_MATRIX_REMOVE_ZERO_COLUMNS -U_OPENMP -Ugeneratetestdata - -DEP_cache_GlycolSpecificHeat -DEP_cache_PsyTsatFnPb -UEP_nocache_Psychrometrics -UEP_psych_errors -UEP_psych_stats - --force - --std=c++17 - --inline-suppr - --suppress=cppcheckError - --suppress=unusedFunction:src/EnergyPlus/api/autosizing.cc - --suppress=unusedFunction:src/EnergyPlus/api/datatransfer.cc - --suppress=unusedFunction:src/EnergyPlus/api/func.cc - --suppress=unusedFunction:src/EnergyPlus/api/runtime.cc - --suppress=unusedFunction:src/EnergyPlus/api/state.cc - --suppress=unusedFunction:src/EnergyPlus/Psychrometrics.cc - --enable=all - -i EnergyPlus/DXCoils.cc - -i EnergyPlus/RefrigeratedCase.cc - -i EnergyPlus/SolarShading.cc - -j $(nproc) - --template='[{file}:{line}]:({severity}),[{id}],{message}' - --suppress="uninitvar:*" - ./src - 3>&1 1>&2 2>&3 | tee cppcheck.txt - - - name: Parse and colorize cppcheck - if: always() && steps.cpp_check_run.outcome == 'success' - run: python ./scripts/dev/colorize_cppcheck_results.py - - - name: Upload cppcheck results as artifact - if: always() - uses: actions/upload-artifact@v4 - with: - name: EnergyPlus-${{ github.sha }}-cppcheck_results.txt - path: cppcheck.txt diff --git a/.github/workflows/test_debug_builds.yml b/.github/workflows/test_debug_builds.yml deleted file mode 100644 index 6cee964ccf9..00000000000 --- a/.github/workflows/test_debug_builds.yml +++ /dev/null @@ -1,172 +0,0 @@ -name: Debug Testing - -on: - pull_request: - branches: [ develop ] # run this on any PR pointing to develop - push: - branches: [ develop ] # also run this on any commit to develop - -defaults: - run: - shell: bash - -env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - FC: gfortran-13 - Python_REQUIRED_VERSION: 3.12.3 # 3.12.2 not available on Ubuntu 24 GHA - -jobs: - run_debug_integration: - name: Integration Testing - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - env: - CI_FORCE_TIME_STEP: 'Y' # Force E+ to run integration tests at 30 minutes for CI time saving! - - steps: - - - name: Set up Python ${{ env.Python_REQUIRED_VERSION }} - id: setup-python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.Python_REQUIRED_VERSION }} - - - name: Install Dependencies for Linux - run: | - sudo apt-get update - sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev lcov gcovr - # https://github.com/actions/runner-images/issues/10025 - echo "FC=gfortran-13" >> $GITHUB_ENV - - - uses: actions/checkout@v4 - - - name: Create Build Directory - run: cmake -E make_directory ./build/ - - - name: Configure CMake - working-directory: ./build - run: > - cmake - -G "Unix Makefiles" - -DCMAKE_BUILD_TYPE:STRING=RelWithDebInfo - -DFORCE_DEBUG_ARITHM_GCC_OR_CLANG:BOOL=ON - -DLINK_WITH_PYTHON:BOOL=ON - -DPython_REQUIRED_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} - -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/x64/ - -DBUILD_TESTING:BOOL=ON - -DBUILD_FORTRAN:BOOL=ON - -DBUILD_PACKAGE:BOOL=OFF - -DDOCUMENTATION_BUILD:STRING=DoNotBuild - -DENABLE_OPENMP:BOOL=OFF - -DUSE_OpenMP:BOOL=OFF - ../ - - - name: Build - working-directory: ./build - run: cmake --build . -j 4 --target energyplus ExpandObjects ReadVarsESO Slab Basement AppGPostProcess ParametricPreprocessor - - - name: Run Integration Tests - working-directory: ./build - # skipping several here because they take a hideously long time in debug builds, would love to clean them up: - # integration.UnitaryHybridAC_DedicatedOutsideAir # 952 seconds - # integration.DirectIndirectEvapCoolersVSAS # 847 seconds - # integration.HospitalBaselineReheatReportEMS # 705 seconds - # integration.ASHRAE901_ApartmentHighRise_STD2019_Denver # 614 seconds - # integration.HospitalBaseline # 610 seconds - # integration.RefBldgOutPatientNew2004_Chicago # 586 seconds - # integration.UnitarySystem_MultiSpeedDX_EconoStaging # 536 seconds - # integration.RefrigeratedWarehouse # 445 seconds - # integration.RefBldgSecondarySchoolNew2004_Chicago # 378 seconds - # integration.ASHRAE901_OutPatientHealthCare_STD2019_Denver # 373 seconds - # after this, there is a big drop and everything is less than 5 minutes. - # by skipping these 10 tests, we remove over 6000 test seconds, and on a 4 core machine, that's almost a half hour - # I'm also skipping the SolarShadingTest_ImportedShading file because we force CI to a 30 minute timestep, but this causes a mismatch on importing that solar data - # I'm also skipping ShopWithPVandLiIonBattery because now that we are checking for NaNs aggressively, this one is failing because of NaN calculations inside SSC - run: > - ctest - -R "integration.*" - -E "UnitaryHybridAC_DedicatedOutsideAir|DirectIndirectEvapCoolersVSAS|HospitalBaselineReheatReportEMS|ASHRAE901_ApartmentHighRise_STD2019_Denver|HospitalBaseline|RefBldgOutPatientNew2004_Chicago|UnitarySystem_MultiSpeedDX_EconoStaging|RefrigeratedWarehouse|RefBldgSecondarySchoolNew2004_Chicago|ASHRAE901_OutPatientHealthCare_STD2019_Denver|SolarShadingTest_ImportedShading|ShopWithPVandLiIonBattery" - -j 4 - - run_unit_test_debug_coverage: - name: Unit Test Coverage - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - - name: Set up Python ${{ env.Python_REQUIRED_VERSION }} - id: setup-python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.Python_REQUIRED_VERSION }} - - - name: Install Dependencies for Linux - run: | - sudo apt-get update - sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev lcov gcovr - # https://github.com/actions/runner-images/issues/10025 - echo "FC=gfortran-13" >> $GITHUB_ENV - - - uses: actions/checkout@v4 - - - name: Create Build Directory - run: cmake -E make_directory ./build/ - - - name: Configure CMake - working-directory: ./build - run: > - cmake - -G "Unix Makefiles" - -DCMAKE_BUILD_TYPE:STRING=RelWithDebInfo - -DFORCE_DEBUG_ARITHM_GCC_OR_CLANG:BOOL=ON - -DLINK_WITH_PYTHON:BOOL=ON - -DPython_REQUIRED_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} - -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/x64/ - -DBUILD_TESTING:BOOL=ON - -DBUILD_FORTRAN:BOOL=OFF - -DBUILD_PACKAGE:BOOL=OFF - -DDOCUMENTATION_BUILD:STRING=DoNotBuild - -DENABLE_OPENMP:BOOL=OFF - -DUSE_OpenMP:BOOL=OFF - -DENABLE_COVERAGE:BOOL=ON - ../ - - - name: Build - working-directory: ./build - run: > - cmake - --build . - -j 4 - --target - energyplus_tests energyplusapi energyplus parser ConvertInputFormat - TestAPI_DataTransfer_C TestAPI_Functional_C TestAPI_RuntimeDeleteState_C TestAPI_RuntimeResetState_C TestAPI_Runtime_C TestEnergyPlusCallbacks energyplusapi_tests - - - name: Run Unit Tests - working-directory: ./build - run: ctest -E "integration.*" -j 4 - - - name: Generate Raw Unit Test Coverage Results - working-directory: ./build - run: lcov -c -d . -o ./lcov.output --no-external --base-directory ../src/EnergyPlus/ - - - name: Generate Filtered Unit Test Coverage Results - working-directory: ./build - run: lcov -r ./lcov.output "${{ github.workspace }}/build/*" -o lcov.output.filtered - - - name: Generate HTML Unit Test Coverage Results - working-directory: ./build - run: genhtml ./lcov.output.filtered -o lcov-html --demangle-cpp --function-coverage | tee cover.txt - - - name: Process Unit Test Coverage Summary - working-directory: ./build - run: python ${{ github.workspace }}/scripts/dev/gha_coverage_summary.py - - - name: Generate Unit Test Artifact Summary - run: echo "$(cat ${{ github.workspace }}/build/cover.md)" >> $GITHUB_STEP_SUMMARY - - - uses: actions/upload-artifact@v4 - with: - name: "unit_test_coverage_results" - path: "${{ github.workspace }}/build/lcov-html" diff --git a/.github/workflows/test_develop_commits.yml b/.github/workflows/test_develop_commits.yml deleted file mode 100644 index e7669111172..00000000000 --- a/.github/workflows/test_develop_commits.yml +++ /dev/null @@ -1,143 +0,0 @@ -name: Develop Branch Testing - -on: - push: - branches: [ develop ] # run this on all commits to the develop branch - -defaults: - run: - shell: bash - -env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - FC: gfortran-13 - Python_REQUIRED_VERSION: 3.12.3 # 3.12.2 not available on Ubuntu 24 GHA - -jobs: - build_and_test: - name: ${{ matrix.pretty }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - os: macos-12 - macos_dev_target: 12.1 - arch: x86_64 - python-arch: x64 - generator: "Unix Makefiles" - nproc: 3 - pretty: "Standard Build on Mac x64" - alternate: false - - os: macos-14 - macos_dev_target: 13.0 - arch: arm64 - python-arch: arm64 - generator: "Unix Makefiles" - nproc: 3 - pretty: "Standard Build on Mac arm64" - alternate: false - - os: ubuntu-24.04 - arch: x86_64 - python-arch: x64 - generator: "Unix Makefiles" - nproc: 4 - pretty: "Standard Build on Ubuntu 24.04" - alternate: false - - os: windows-2019 - arch: x86_64 - python-arch: x64 - generator: "Visual Studio 16 2019" - nproc: 4 - pretty: "Windows VS 2019" - alternate: false - - os: windows-2022 - arch: x86_64 - python-arch: x64 - generator: "Visual Studio 17 2022" - nproc: 4 - pretty: "Standard Build on Windows VS 2022" - alternate: false - - os: ubuntu-24.04 - arch: x86_64 - python-arch: x64 - generator: "Unix Makefiles" - nproc: 4 - pretty: "Alternate Build on Ubuntu 24.04" - alternate: true - - steps: - - - name: Set up Python ${{ env.Python_REQUIRED_VERSION }} - id: setup-python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.Python_REQUIRED_VERSION }} - - - name: Install Dependencies for Mac - if: ${{ runner.os == 'macOS' }} - run: | - brew update - brew install gcc@13 - echo "FC=$(brew --prefix gcc@13)/bin/gfortran-13" >> $GITHUB_ENV - echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.macos_dev_target }} >> $GITHUB_ENV - - - name: Install Dependencies for Linux - if: ${{ runner.os == 'Linux' }} - run: | - sudo apt-get update - sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev - if [[ "${{ matrix.os }}" == "ubuntu-24.04" ]]; then - # https://github.com/actions/runner-images/issues/10025 - echo "FC=gfortran-13" >> $GITHUB_ENV - fi - - - uses: actions/checkout@v4 - - - name: Create Build Directory - run: cmake -E make_directory ./build/ - - - name: Configure CMake with Standard Flags - if: matrix.alternate == false - working-directory: ./build - run: > - cmake - -G "${{ matrix.generator }}" - -DCMAKE_BUILD_TYPE:STRING=Release - -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=${{ matrix.macos_dev_target }} - -DLINK_WITH_PYTHON:BOOL=ON - -DPython_REQUIRED_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} - -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/${{ matrix.python-arch }}/ - -DBUILD_TESTING:BOOL=ON - -DBUILD_FORTRAN:BOOL=ON - -DBUILD_PACKAGE:BOOL=OFF - -DDOCUMENTATION_BUILD:STRING=DoNotBuild - -DENABLE_OPENMP:BOOL=OFF - -DUSE_OpenMP:BOOL=OFF - ../ - - - name: Configure CMake with Alternative Flags - if: matrix.alternate == true - working-directory: ./build/ - run: > - cmake - -DCMAKE_BUILD_TYPE=Release - -DLINK_WITH_PYTHON=OFF - -DUSE_PSYCHROMETRICS_CACHING=OFF - -DUSE_GLYCOL_CACHING=OFF - -DOPENGL_REQUIRED=OFF - -DUSE_PSYCH_STATS=ON - -DUSE_PSYCH_ERRORS=OFF - -DENABLE_PCH=OFF - ../ - - - name: Build - id: build - working-directory: ./build - run: cmake --build . -j ${{ matrix.nproc }} --config Release - - - name: Test - # Not running test on alternate build yet, I need to test things - if: matrix.alternate == false - working-directory: ./build - run: ctest -C Release -j ${{ matrix.nproc }} diff --git a/.github/workflows/test_pull_requests.yml b/.github/workflows/test_pull_requests.yml deleted file mode 100644 index 8f58f736b18..00000000000 --- a/.github/workflows/test_pull_requests.yml +++ /dev/null @@ -1,195 +0,0 @@ -name: Build and Test - -on: - pull_request: - branches: [ develop ] # TODO: Run this on any PR, and compare to baseline branch, not necessarily develop. Also perhaps non-draft PRs only. - -defaults: - run: - shell: bash - -env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - FC: gfortran-13 - Python_REQUIRED_VERSION: 3.12.3 # 3.12.2 not available on Ubuntu 24 GHA - -jobs: - build_and_test: - name: Testing on ${{ matrix.pretty }} - runs-on: ${{ matrix.os }} - permissions: - pull-requests: write - strategy: - fail-fast: false - matrix: - include: - - os: macos-14 - macos_dev_target: 13.0 - arch: arm64 - python-arch: arm64 - generator: "Unix Makefiles" - nproc: 3 - run_regressions: true - pretty: "Mac arm64" - - os: ubuntu-24.04 - arch: x86_64 - python-arch: x64 - generator: "Unix Makefiles" - nproc: 4 - run_regressions: false - pretty: "Ubuntu 24.04" - - os: windows-2022 - arch: x86_64 - python-arch: x64 - generator: "Visual Studio 17 2022" - nproc: 4 - run_regressions: false - pretty: "Windows x64" - - steps: - - - name: Set up Python ${{ env.Python_REQUIRED_VERSION }} - id: setup-python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.Python_REQUIRED_VERSION }} - - - name: Install Dependencies for Mac # gcc13 reinstall may not be needed - if: runner.os == 'macOS' - run: | - brew update - brew reinstall gcc@13 - echo "FC=$(brew --prefix gcc@13)/bin/gfortran-13" >> $GITHUB_ENV - echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.macos_dev_target }} >> $GITHUB_ENV - - - name: Install Dependencies for Linux - if: runner.os == 'Linux' - run: | - sudo apt-get update - sudo apt-get install libxkbcommon-x11-0 xorg-dev libgl1-mesa-dev - if [[ "${{ matrix.os }}" == "ubuntu-24.04" ]]; then - # https://github.com/actions/runner-images/issues/10025 - echo "FC=gfortran-13" >> $GITHUB_ENV - fi - - # BUILD AND TEST INTEGRATION FILES ON THE BASELINE BRANCH - - - name: Baseline Checkout - if: matrix.run_regressions - uses: actions/checkout@v4 - with: - ref: develop - path: baseline - - - name: Baseline Create Build Directory - if: matrix.run_regressions - run: cmake -E make_directory ./baseline/build/ - - - name: Baseline Configure CMake - if: matrix.run_regressions - working-directory: ./baseline/build - run: > - cmake - -G "${{ matrix.generator }}" - -DCMAKE_BUILD_TYPE:STRING=Release - -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=${{ matrix.macos_dev_target }} - -DLINK_WITH_PYTHON:BOOL=ON - -DPython_REQUIRED_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} - -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/${{ matrix.python-arch }}/ - -DBUILD_TESTING:BOOL=ON - -DBUILD_FORTRAN:BOOL=ON - -DBUILD_PACKAGE:BOOL=OFF - -DDOCUMENTATION_BUILD:STRING=DoNotBuild - -DENABLE_OPENMP:BOOL=OFF - -DUSE_OpenMP:BOOL=OFF - ../ - - # During baseline builds, just build specific target list so that we don't waste time building the unit test binary - - - name: Baseline Build on Windows - if: matrix.run_regressions && runner.os == 'Windows' - working-directory: ./baseline/build - run: cmake --build . -j ${{ matrix.nproc }} --config Release --target energyplus ExpandObjects_build ReadVars_build Slab_build Basement_build AppGPostProcess_build ParametricPreprocessor_build - - - name: Baseline Build on Mac/Linux - if: matrix.run_regressions && runner.os != 'Windows' - working-directory: ./baseline/build - run: cmake --build . -j ${{ matrix.nproc }} --target energyplus ExpandObjects ReadVarsESO Slab Basement AppGPostProcess ParametricPreprocessor - - - name: Baseline Test - if: matrix.run_regressions - working-directory: ./baseline/build - run: ctest -C Release -R integration -j 3 # TODO: Speed up basement so we don't have to skip it. - - # BUILD AND TEST EVERYTHING ON THE CURRENT BRANCH - - - name: Branch Checkout - uses: actions/checkout@v4 - with: - path: branch - - - name: Branch Create Build Directory - run: cmake -E make_directory ./branch/build/ - - - name: Branch Configure CMake - working-directory: ./branch/build - run: > - cmake - -G "${{ matrix.generator }}" - -DCMAKE_BUILD_TYPE:STRING=Release - -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=${{ matrix.macos_dev_target }} - -DLINK_WITH_PYTHON:BOOL=ON - -DPython_REQUIRED_VERSION:STRING=${{ steps.setup-python.outputs.python-version }} - -DPython_ROOT_DIR:PATH=$RUNNER_TOOL_CACHE/Python/${{ steps.setup-python.outputs.python-version }}/${{ matrix.python-arch }}/ - -DBUILD_TESTING:BOOL=ON - -DBUILD_FORTRAN:BOOL=ON - -DBUILD_PACKAGE:BOOL=OFF - -DDOCUMENTATION_BUILD:STRING=DoNotBuild - -DENABLE_OPENMP:BOOL=OFF - -DUSE_OpenMP:BOOL=OFF - ../ - - - name: Branch Build - id: branch_build - working-directory: ./branch/build - run: cmake --build . -j ${{ matrix.nproc }} --config Release - - - name: Branch Test - working-directory: ./branch/build - run: ctest -C Release -j 3 - - - name: Install Regression Tool - if: always() && matrix.run_regressions && steps.branch_build.outcome != 'failure' # always run this step as long as we actually built - run: pip install energyplus-regressions - - - name: Run Regressions - if: always() && matrix.run_regressions && steps.branch_build.outcome != 'failure' # always run this step as long as we actually built - id: regressions - # steps.regressions.conclusion is always "success", but if no regressions, steps.regressions.outcome is "success" - continue-on-error: true - run: python ./branch/scripts/dev/gha_regressions.py ./baseline/build/testfiles ./branch/build/testfiles/ ./regressions - - - uses: actions/upload-artifact@v4 - id: upload_regressions - if: always() && matrix.run_regressions && steps.regressions.outcome == 'failure' # only run this if regressions were encountered "failed" - with: - name: "regressions-${{ matrix.os }}" - path: "${{ github.workspace }}/regressions" - - - name: Generate Regression Summary GitHub Script - if: always() && matrix.run_regressions && steps.regressions.outcome == 'failure' - run: > - python ./branch/scripts/dev/build_regression_summary.py - ${{ github.workspace }}/regressions/summary.md - ${{ github.workspace }}/regressions/summary.js - ${{ matrix.os }} - ${{ github.sha }} - ${{ github.run_id }} - ${{ steps.upload_regressions.outputs.artifact-url }} - - - uses: actions/github-script@v7 - if: always() && matrix.run_regressions && steps.regressions.outcome == 'failure' - with: - script: | - const script = require('${{ github.workspace }}/regressions/summary.js') - console.log(script({github, context})) diff --git a/.github/workflows/release_windows.yml b/.github/workflows/windows_release.yml similarity index 97% rename from .github/workflows/release_windows.yml rename to .github/workflows/windows_release.yml index 5885ca60967..a11f2bb5ac2 100644 --- a/.github/workflows/release_windows.yml +++ b/.github/workflows/windows_release.yml @@ -1,4 +1,4 @@ -name: Releases +name: Windows Releases on: push: @@ -13,7 +13,7 @@ env: jobs: build_installer_artifact: - name: Build Packages for ${{ matrix.pretty }} + name: Build Installer and Upload # keeping upload in this job so we could potentially download problematic builds runs-on: windows-2019 # to move to windows-2022, just make sure to set the Visual Studio generator build to "17 2022" continue-on-error: ${{ matrix.allow_failure }} @@ -31,7 +31,6 @@ jobs: vs-generator: x64 package-arch: x86_64 enable_hardened_runtime: OFF - pretty: "Windows x64" # - name: x86 # arch: x86 # allow_failure: false @@ -44,7 +43,6 @@ jobs: vs-generator: x64 package-arch: x86_64-HardenedRuntime enable_hardened_runtime: ON - pretty: "Windows x64 Hardened" permissions: # Needed permission to upload the release asset @@ -205,5 +203,8 @@ jobs: working-directory: package run: ls +# - name: Setup tmate session +# uses: mxschmitt/action-tmate@v3 + - name: Run Package Tests run: python checkout/scripts/package_tests/runner.py --verbose --msvc 2022 win64 package/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000000..734c4431f13 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,157 @@ +stages: + - build + - test + - package + +build:mac: + stage: build + tags: + - mac + variables: + CC: /usr/bin/clang + CXX: /usr/bin/clang++ + CXXFLAGS: -stdlib=libc++ + CMAKE_OSX_ARCHITECTURES: x86_64 + MACOSX_DEPLOYMENT_TARGET: "10.7" + SDKROOT: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk + script: + - mkdir build + - cd build + - cmake -DBUILD_FORTRAN=ON -DBUILD_PACKAGE=ON -DBUILD_TESTING=ON -DENABLE_GTEST_DEBUG_MODE=OFF -DCPACK_BINARY_PACKAGEMAKER=ON -DCPACK_BINARY_STGZ=OFF -DCPACK_BINARY_TGZ=OFF .. + - cmake .. + - make -j8 + artifacts: + expire_in: 6 hrs + paths: + - build/ + +build:ubuntu: + stage: build + tags: + - clang + - ubuntu + variables: + CC: /usr/bin/clang-5.0 + CXX: /usr/bin/clang++-5.0 + script: + - mkdir build + - cd build + - cmake -DBUILD_FORTRAN=ON -DBUILD_PACKAGE=ON -DBUILD_TESTING=ON -DENABLE_GTEST_DEBUG_MODE=OFF -DCPACK_BINARY_TGZ=OFF -DCPACK_BINARY_TZ=OFF .. + - cmake .. + - make -j8 + artifacts: + expire_in: 6 hrs + paths: + - build/ + +build:windows: + stage: build + tags: + - vs2017 + - windows + before_script: + - set PATH=%PATH:C:\Program Files\Git\usr\bin;=% + script: + - mkdir build + - cd build + - '"C:\Program Files\CMake\bin\cmake.exe" -G "Visual Studio 15 2017 Win64" -DBUILD_FORTRAN=ON -DBUILD_PACKAGE=ON -DBUILD_TESTING=ON -DENABLE_GTEST_DEBUG_MODE=OFF ..' + - '"C:\Program Files\CMake\bin\cmake.exe" ..' + - '"C:\Program Files\CMake\bin\cmake.exe" --build . --config Release --target ALL_BUILD' + artifacts: + expire_in: 6 hrs + paths: + - build/ + +test:mac: + stage: test + tags: + - mac + dependencies: + - build:mac + script: + - cd build + - ctest -j8 + +test:ubuntu: + stage: test + tags: + - clang + - ubuntu + variables: + CC: /usr/bin/clang-5.0 + CXX: /usr/bin/clang++-5.0 + dependencies: + - build:ubuntu + script: + - cd build + - ctest -j8 + +test:windows: + stage: test + tags: + - vs2017 + - windows + before_script: + - set PATH=%PATH:C:\Program Files\Git\usr\bin;=% + dependencies: + - build:windows + script: + - cd build + - '"C:\Program Files\CMake\bin\ctest.exe" -C Release -j8' + +package:mac: + stage: package + tags: + - mac + dependencies: + - build:mac + variables: + CC: /usr/bin/clang + CXX: /usr/bin/clang++ + CXXFLAGS: -std=c++11 -stdlib=libc++ + CMAKE_OSX_ARCHITECTURES: x86_64 + MACOSX_DEPLOYMENT_TARGET: "10.7" + SDKROOT: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk + script: + - cd build + - make package -j8 + artifacts: + expire_in: 1 week + paths: + - build/EnergyPlus-*.dmg + +# # Ubuntu 16.04 EnergyPlus packages are currently failing +# package:ubuntu: +# stage: package +# tags: +# - clang +# - ubuntu +# dependencies: +# - build:ubuntu +# variables: +# CC: /usr/bin/clang-5.0 +# CXX: /usr/bin/clang++-5.0 +# script: +# - cd build +# - make package -j8 +# artifacts: +# expire_in: 1 week +# paths: +# - build/EnergyPlus-*.sh + +package:windows: + stage: package + tags: + - vs2017 + - windows + before_script: + - set PATH=%PATH:C:\Program Files\Git\usr\bin;=% + dependencies: + - build:windows + script: + - cd build + - '"C:\Program Files\CMake\bin\cmake.exe" --build . --config Release --target PACKAGE' + artifacts: + expire_in: 1 week + paths: + - build/EnergyPlus-*.exe diff --git a/CMakeLists.txt b/CMakeLists.txt index 6929707fdac..b6cded6bc15 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -200,12 +200,52 @@ else() endif() if(BUILD_TESTING) + option(ENABLE_REGRESSION_TESTING "Enable Regression Tests" OFF) option(ENABLE_REVERSE_DD_TESTING "Enable Reverse Design Day Tests" OFF) option(BUILD_WCE_TESTING "Build testing targets for Windows Calculation Engine" OFF) + if(ENABLE_REGRESSION_TESTING) + if(CMAKE_CL_64) + set(ARCH_FLAG "-64bit") + else() + set(ARCH_FLAG "") + endif() + set(REGRESSION_BASELINE_SHA + "" + CACHE STRING "SHA of baseline comparison") + set(COMMIT_SHA + "" + CACHE STRING "Commit sha of this build") + set(REGRESSION_SCRIPT_PATH + "" + CACHE PATH "Path to regression scripts") + set(REGRESSION_BASELINE_PATH + "" + CACHE PATH "Path to energyplus baseline folder to compare against") + set(DEVICE_ID + "${CMAKE_SYSTEM_NAME}-${CMAKE_CXX_COMPILER_ID}${ARCH_FLAG}" + CACHE STRING "Identifier for this device configuration") + endif() set_property(GLOBAL PROPERTY CTEST_TARGETS_ADDED 1) # This avoids all the CTest Nightly, Continuous, etc. tests. enable_testing() endif() +if(ENABLE_REGRESSION_TESTING) + string(STRIP "${REGRESSION_SCRIPT_PATH}" REG_PATH_STRIPPED) + string(STRIP "${REGRESSION_BASELINE_PATH}" REG_BASELINE_STRIPPED) + if(REG_PATH_STRIPPED STREQUAL "" OR REG_BASELINE_STRIPPED STREQUAL "") + message(AUTHOR_WARNING "Regression testing is enabled, but no script or baseline has been provided, turning it off.") + set(DO_REGRESSION_TESTING OFF) + else() + if(BUILD_FORTRAN) + # No Problem + set(DO_REGRESSION_TESTING ON) + else() + message(AUTHOR_WARNING "For regression testing, you must enable BUILD_FORTRAN in order to call ReadVarEso, turning it off.") + set(DO_REGRESSION_TESTING OFF) + endif() + endif() +endif() + if(UNIX AND NOT APPLE) set(CMAKE_INSTALL_RPATH "$ORIGIN") endif() @@ -407,6 +447,7 @@ mark_as_advanced(RE2_BUILD_TESTING) mark_as_advanced(USEPCRE) mark_as_advanced(CCACHE_PROGRAM) mark_as_advanced(CMAKE_VERSION_BUILD) +mark_as_advanced(ENABLE_REGRESSION_TESTING) mark_as_advanced(TEST_ANNUAL_SIMULATION) mark_as_advanced(gtest_build_samples) mark_as_advanced(gtest_build_tests) diff --git a/cmake/CompilerFlags.cmake b/cmake/CompilerFlags.cmake index bdcc6ea5903..9acc1c3691a 100644 --- a/cmake/CompilerFlags.cmake +++ b/cmake/CompilerFlags.cmake @@ -119,8 +119,6 @@ elseif(CMAKE_COMPILER_IS_GNUCXX OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" O # depending on the level of overflow check selected, the stringop-overflow can also emit false positives # https://gcc.gnu.org/onlinedocs/gcc/Warning-Options.html#index-Wstringop-overflow target_compile_options(project_warnings INTERFACE -Wno-stringop-overflow) - # for RelWithDebInfo builds, lets turn OFF NDEBUG, which will re-enable assert statements - target_compile_options(project_options INTERFACE $<$:-UNDEBUG>) elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang") if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13.0) # Suppress unused-but-set warnings until more serious ones are addressed diff --git a/cmake/ProjectMacros.cmake b/cmake/ProjectMacros.cmake index 071bc94f56c..6bd2aec73d7 100644 --- a/cmake/ProjectMacros.cmake +++ b/cmake/ProjectMacros.cmake @@ -112,6 +112,10 @@ function(ADD_SIMULATION_TEST) if(ADD_SIM_TEST_PERFORMANCE) # For performance testing, it's more problematic, because that'll cut on the ReadVarEso time message(WARNING "Will not be able to call ReadVarEso unless BUILD_FORTRAN=TRUE, skipping flag -r.") + elseif(DO_REGRESSION_TESTING) + # DO_REGRESSION_TESTING shouldn't really occur here since EnergyPlus/CMakeLists.txt will throw an error if BUILD_FORTRAN isn't enabled + # Not that bad, just a dev warning + message(AUTHOR_WARNING "Will not be able to call ReadVarEso unless BUILD_FORTRAN=TRUE, skipping flag -r.") endif() endif() @@ -160,6 +164,21 @@ function(ADD_SIMULATION_TEST) set_tests_properties("${TEST_CATEGORY}.${IDF_NAME}" PROPERTIES FAIL_REGULAR_EXPRESSION "ERROR;FAIL;Test Failed") endif() + if(DO_REGRESSION_TESTING AND (NOT ADD_SIM_TEST_EXPECT_FATAL)) + add_test( + NAME "regression.${IDF_NAME}" + COMMAND + ${CMAKE_COMMAND} -DBINARY_DIR=${PROJECT_BINARY_DIR} -DPYTHON_EXECUTABLE=${Python_EXECUTABLE} -DIDF_FILE=${ADD_SIM_TEST_IDF_FILE} + -DREGRESSION_SCRIPT_PATH=${REGRESSION_SCRIPT_PATH} -DREGRESSION_BASELINE_PATH=${REGRESSION_BASELINE_PATH} + -DREGRESSION_BASELINE_SHA=${REGRESSION_BASELINE_SHA} -DCOMMIT_SHA=${COMMIT_SHA} -DDEVICE_ID=${DEVICE_ID} -P + ${PROJECT_SOURCE_DIR}/cmake/RunRegression.cmake) + # Note, CMake / CTest doesn't seem to validate if this dependent name actually exists, + # but it does seem to honor the requirement + set_tests_properties("regression.${IDF_NAME}" PROPERTIES DEPENDS "${TEST_CATEGORY}.${IDF_NAME}") + set_tests_properties("regression.${IDF_NAME}" PROPERTIES PASS_REGULAR_EXPRESSION "Success") + set_tests_properties("regression.${IDF_NAME}" PROPERTIES FAIL_REGULAR_EXPRESSION "ERROR;FAIL;Test Failed") + endif() + if(ENABLE_REVERSE_DD_TESTING AND (NOT ADD_SIM_TEST_EXPECT_FATAL)) set(TEST_FILE_FOLDER "testfiles") set(ENERGYPLUS_FLAGS "-D -r") diff --git a/cmake/RunRegression.cmake b/cmake/RunRegression.cmake new file mode 100644 index 00000000000..4e693c91cd6 --- /dev/null +++ b/cmake/RunRegression.cmake @@ -0,0 +1,17 @@ +# These need to be defined by the caller +# IDF_FILE +# BINARY_DIR +# PYTHON_EXECUTABLE +# REGRESSION_SCRIPT_PATH +# REGRESSION_BASELINE_PATH +# REGRESSION_BASELINE_SHA +# COMMIT_SHA +# DEVICE_ID + +get_filename_component(IDF_NAME "${IDF_FILE}" NAME_WE) + +execute_process( + COMMAND + ${PYTHON_EXECUTABLE} "${REGRESSION_SCRIPT_PATH}/energyplus_regressions/diffs/ci_compare_script.py" "${IDF_NAME}" + "${REGRESSION_BASELINE_PATH}/testfiles/${IDF_NAME}" "${BINARY_DIR}/testfiles/${IDF_NAME}" ${REGRESSION_BASELINE_SHA} ${COMMIT_SHA} true + "${DEVICE_ID}") diff --git a/requirements.txt b/requirements.txt index 3d8b2a018cb..9968de6fe52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,2 @@ # requirements for building an EnergyPlus wheel -wheel - -# requirements for the CI regression testing scripts -energyplus-regressions +wheel \ No newline at end of file diff --git a/scripts/dev/add_to_project.sh b/scripts/dev/add_to_project.sh new file mode 100755 index 00000000000..e5fc0210161 --- /dev/null +++ b/scripts/dev/add_to_project.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# call with the PR number as the only command line argument + +# get the PR num from the command line argument +PR_NUM=$1 + +# the GraphQL Project ID can be retrieved from a given organization's project, where the URL is: +# https://github.com/orgs/ORGANIZATION/projects/SOME_PROJECT_NUMBER/views/2 +# and the associated call to graphql is: +#gh api graphql -f query=' +# query{ +# organization(login: "ORGANIZATION"){ +# projectV2(number: SOME_PROJECT_NUMBER) { +# id +# } +# } +# }' +# TODO: Just specify the project organization and number and get the graphql ID in here +PROJ_ID=PVT_kwDOAB0YcM4AEWD7 +echo "Using PR Num as ${PR_NUM} and project ID as: ${PROJ_ID}" + +# get the current PR ID based on the this checkout +CONTENT=$(gh pr view "$PR_NUM" --json 'id' --jq '.id') +echo "Found PR node ID as: ${CONTENT}" + +# use the gh api command line to act on the Projects-v2 API and add the PR as a new card +# should also add more arguments for the column to use, etc. +gh api graphql -f query=" + mutation { + addProjectV2ItemById(input: {projectId: \"${PROJ_ID}\" contentId: \"${CONTENT}\"}) { + item { + id + } + } + }" diff --git a/scripts/dev/build_regression_summary.py b/scripts/dev/build_regression_summary.py deleted file mode 100644 index 641a228fa3a..00000000000 --- a/scripts/dev/build_regression_summary.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -# EnergyPlus, Copyright (c) 1996-2024, The Board of Trustees of the University -# of Illinois, The Regents of the University of California, through Lawrence -# Berkeley National Laboratory (subject to receipt of any required approvals -# from the U.S. Dept. of Energy), Oak Ridge National Laboratory, managed by UT- -# Battelle, Alliance for Sustainable Energy, LLC, and other contributors. All -# rights reserved. -# -# NOTICE: This Software was developed under funding from the U.S. Department of -# Energy and the U.S. Government consequently retains certain rights. As such, -# the U.S. Government has been granted for itself and others acting on its -# behalf a paid-up, nonexclusive, irrevocable, worldwide license in the -# Software to reproduce, distribute copies to the public, prepare derivative -# works, and perform publicly and display publicly, and to permit others to do -# so. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# (1) Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# (2) Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# (3) Neither the name of the University of California, Lawrence Berkeley -# National Laboratory, the University of Illinois, U.S. Dept. of Energy nor -# the names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# (4) Use of EnergyPlus(TM) Name. If Licensee (i) distributes the software in -# stand-alone form without changes from the version obtained under this -# License, or (ii) Licensee makes a reference solely to the software -# portion of its product, Licensee must refer to the software as -# "EnergyPlus version X" software, where "X" is the version number Licensee -# obtained under this License and may not use a different name for the -# software. Except as specifically required in this Section (4), Licensee -# shall not use in a company name, a product name, in advertising, -# publicity, or other promotional activities any name, trade name, -# trademark, logo, or other designation of "EnergyPlus", "E+", "e+" or -# confusingly similar designation, without the U.S. Department of Energy's -# prior written consent. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -from sys import argv - -summary_input_md_file = argv[1] -summary_output_js_file = argv[2] -matrix_os = argv[3] -github_sha = argv[4] -github_run_id = argv[5] -artifact_url = argv[6] - -with open(summary_input_md_file) as md: - md_contents = md.read() - -fixed_up_contents = f""" -### :warning: Regressions detected on {matrix_os} for commit {github_sha} - -{md_contents} - - - [View Results](https://github.com/NREL/EnergyPlus/actions/runs/{github_run_id}) - - [Download Regressions]({artifact_url}) -""" - -with open(summary_output_js_file, 'w') as js: - js_contents = f""" -module.exports = ({{github, context}}) => {{ - github.rest.issues.createComment({{ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: `{fixed_up_contents}` - }}) -}} -""" - js.write(js_contents) diff --git a/scripts/dev/gha_coverage_summary.py b/scripts/dev/gha_coverage_summary.py deleted file mode 100644 index 34bb39dcc3c..00000000000 --- a/scripts/dev/gha_coverage_summary.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python -# EnergyPlus, Copyright (c) 1996-2024, The Board of Trustees of the University -# of Illinois, The Regents of the University of California, through Lawrence -# Berkeley National Laboratory (subject to receipt of any required approvals -# from the U.S. Dept. of Energy), Oak Ridge National Laboratory, managed by UT- -# Battelle, Alliance for Sustainable Energy, LLC, and other contributors. All -# rights reserved. -# -# NOTICE: This Software was developed under funding from the U.S. Department of -# Energy and the U.S. Government consequently retains certain rights. As such, -# the U.S. Government has been granted for itself and others acting on its -# behalf a paid-up, nonexclusive, irrevocable, worldwide license in the -# Software to reproduce, distribute copies to the public, prepare derivative -# works, and perform publicly and display publicly, and to permit others to do -# so. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# (1) Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# (2) Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# (3) Neither the name of the University of California, Lawrence Berkeley -# National Laboratory, the University of Illinois, U.S. Dept. of Energy nor -# the names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# (4) Use of EnergyPlus(TM) Name. If Licensee (i) distributes the software in -# stand-alone form without changes from the version obtained under this -# License, or (ii) Licensee makes a reference solely to the software -# portion of its product, Licensee must refer to the software as -# "EnergyPlus version X" software, where "X" is the version number Licensee -# obtained under this License and may not use a different name for the -# software. Except as specifically required in this Section (4), Licensee -# shall not use in a company name, a product name, in advertising, -# publicity, or other promotional activities any name, trade name, -# trademark, logo, or other designation of "EnergyPlus", "E+", "e+" or -# confusingly similar designation, without the U.S. Department of Energy's -# prior written consent. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -# expecting to find a file called cover.txt in cwd -# need to generate a cover.md file in cwd -# cover.txt looks like: -# lines=48 hit=2 functions=2 hit=1 -# Processing file EnergyPlus/SurfaceGeometry.hh -# lines=44 hit=9 functions=4 hit=2 -# Overall coverage rate: -# lines......: 7.9% (28765 of 364658 lines) -# functions......: 19.6% (2224 of 11327 functions) - -from pathlib import Path -cover_input = Path.cwd() / 'cover.txt' -lines = cover_input.read_text().strip().split('\n') -line_coverage = lines[-2].strip().split(':')[1].strip() -line_percent = line_coverage.split(' ')[0] -function_coverage = lines[-1].strip().split(':')[1].strip() -cover_output = Path.cwd() / 'cover.md' -content = f""" -
- Coverage Summary - {line_percent} of lines - Download Coverage Artifact for Full Details - - - {line_coverage} - - {function_coverage} -
""" -cover_output.write_text(content) diff --git a/scripts/dev/gha_regressions.py b/scripts/dev/gha_regressions.py deleted file mode 100644 index b3de9964190..00000000000 --- a/scripts/dev/gha_regressions.py +++ /dev/null @@ -1,508 +0,0 @@ -#!/usr/bin/env python -# EnergyPlus, Copyright (c) 1996-2024, The Board of Trustees of the University -# of Illinois, The Regents of the University of California, through Lawrence -# Berkeley National Laboratory (subject to receipt of any required approvals -# from the U.S. Dept. of Energy), Oak Ridge National Laboratory, managed by UT- -# Battelle, Alliance for Sustainable Energy, LLC, and other contributors. All -# rights reserved. -# -# NOTICE: This Software was developed under funding from the U.S. Department of -# Energy and the U.S. Government consequently retains certain rights. As such, -# the U.S. Government has been granted for itself and others acting on its -# behalf a paid-up, nonexclusive, irrevocable, worldwide license in the -# Software to reproduce, distribute copies to the public, prepare derivative -# works, and perform publicly and display publicly, and to permit others to do -# so. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# (1) Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# (2) Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# (3) Neither the name of the University of California, Lawrence Berkeley -# National Laboratory, the University of Illinois, U.S. Dept. of Energy nor -# the names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# (4) Use of EnergyPlus(TM) Name. If Licensee (i) distributes the software in -# stand-alone form without changes from the version obtained under this -# License, or (ii) Licensee makes a reference solely to the software -# portion of its product, Licensee must refer to the software as -# "EnergyPlus version X" software, where "X" is the version number Licensee -# obtained under this License and may not use a different name for the -# software. Except as specifically required in this Section (4), Licensee -# shall not use in a company name, a product name, in advertising, -# publicity, or other promotional activities any name, trade name, -# trademark, logo, or other designation of "EnergyPlus", "E+", "e+" or -# confusingly similar designation, without the U.S. Department of Energy's -# prior written consent. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -from collections import defaultdict -from datetime import datetime, UTC -import json -from shutil import copy -from pathlib import Path -import sys -from shutil import rmtree -from zoneinfo import ZoneInfo - -from energyplus_regressions.runtests import SuiteRunner -from energyplus_regressions.structures import TextDifferences, TestEntry, EndErrSummary - - -class RegressionManager: - - def __init__(self): - self.root_index_files_no_diff = [] - self.root_index_files_diffs = [] - self.diffs_by_idf = defaultdict(list) - self.diffs_by_type = defaultdict(list) - self.summary_results = {} - self.num_idf_inspected = 0 - # self.all_files_compared = [] TODO: need to get this from regression runner - import energyplus_regressions - self.threshold_file = str(Path(energyplus_regressions.__file__).parent / 'diffs' / 'math_diff.config') - - def single_file_regressions(self, baseline: Path, modified: Path) -> [TestEntry, bool]: - - idf = baseline.name - self.num_idf_inspected += 1 - this_file_diffs = [] - - entry = TestEntry(idf, "") - entry, message = SuiteRunner.process_diffs_for_one_case( - entry, - {'build_dir': str(baseline)}, - {'build_dir': str(modified)}, - "", - self.threshold_file, - ci_mode=True - ) # returns an updated entry - self.summary_results[idf] = entry.summary_result - - has_diffs = False - - text_diff_results = { - "Audit": entry.aud_diffs, - "BND": entry.bnd_diffs, - "DELightIn": entry.dl_in_diffs, - "DELightOut": entry.dl_out_diffs, - "DXF": entry.dxf_diffs, - "EIO": entry.eio_diffs, - "ERR": entry.err_diffs, - "Readvars_Audit": entry.readvars_audit_diffs, - "EDD": entry.edd_diffs, - "WRL": entry.wrl_diffs, - "SLN": entry.sln_diffs, - "SCI": entry.sci_diffs, - "MAP": entry.map_diffs, - "DFS": entry.dfs_diffs, - "SCREEN": entry.screen_diffs, - "GLHE": entry.glhe_diffs, - "MDD": entry.mdd_diffs, - "MTD": entry.mtd_diffs, - "RDD": entry.rdd_diffs, - "SHD": entry.shd_diffs, - "PERF_LOG": entry.perf_log_diffs, - "IDF": entry.idf_diffs, - "StdOut": entry.stdout_diffs, - "StdErr": entry.stderr_diffs, - } - for diff_type, diffs in text_diff_results.items(): - if diffs is None: - continue - if diffs.diff_type != TextDifferences.EQUAL: - has_diffs = True - this_file_diffs.append(diff_type) - self.diffs_by_type[diff_type].append(idf) - self.diffs_by_idf[idf].append(diff_type) - - numeric_diff_results = { - "ESO": entry.eso_diffs, - "MTR": entry.mtr_diffs, - "SSZ": entry.ssz_diffs, - "ZSZ": entry.zsz_diffs, - "JSON": entry.json_diffs, - } - for diff_type, diffs in numeric_diff_results.items(): - if diffs is None: - continue - if diffs.diff_type == 'Big Diffs': - has_diffs = True - this_file_diffs.append(f"{diff_type} Big Diffs") - self.diffs_by_type[f"{diff_type} Big Diffs"].append(idf) - self.diffs_by_idf[idf].append(f"{diff_type} Big Diffs") - elif diffs.diff_type == 'Small Diffs': - has_diffs = True - this_file_diffs.append(f"{diff_type} Small Diffs") - self.diffs_by_type[f"{diff_type} Small Diffs"].append(idf) - self.diffs_by_idf[idf].append(f"{diff_type} Small Diffs") - - if entry.table_diffs: - if entry.table_diffs.big_diff_count > 0: - has_diffs = True - this_file_diffs.append("Table Big Diffs") - self.diffs_by_type["Table Big Diffs"].append(idf) - self.diffs_by_idf[idf].append("Table Big Diffs") - elif entry.table_diffs.small_diff_count > 0: - has_diffs = True - this_file_diffs.append("Table Small Diffs") - self.diffs_by_type["Table Small Diffs"].append(idf) - self.diffs_by_idf[idf].append("Table Small Diffs") - if entry.table_diffs.string_diff_count > 1: # There's always one...the time stamp - has_diffs = True - this_file_diffs.append("Table String Diffs") - self.diffs_by_type["Table String Diffs"].append(idf) - self.diffs_by_idf[idf].append("Table String Diffs") - - return entry, has_diffs - - @staticmethod - def single_diff_html(contents: str) -> str: - return f""" - - - - - - - - -
-   
-    {contents}
-   
-  
- - -""" - - @staticmethod - def regression_row_in_single_test_case_html(diff_file_name: str) -> str: - return f""" - - {diff_file_name} - download - view - """ - - @staticmethod - def single_test_case_html(contents: str) -> str: - return f""" - - - - - - - - - - - - - - - - -{contents} -
filename
- -""" - - def bundle_root_index_html(self, header_info: list[str]) -> str: - # set up header table - header_content = "" - for hi in header_info: - header_content += f"""
  • {hi}
  • \n""" - - # set up diff summary listings - num_no_diff = len(self.root_index_files_no_diff) - nds = 's' if num_no_diff == 0 or num_no_diff > 1 else '' - no_diff_content = "" - for nd in self.root_index_files_no_diff: - no_diff_content += f"""
  • {nd}
  • \n""" - num_diff = len(self.root_index_files_diffs) - ds = 's' if num_diff == 0 or num_diff > 1 else '' - diff_content = "" - for d in self.root_index_files_diffs: - diff_content += f"""{d}\n""" - - # set up diff type listing - diff_type_keys = sorted(self.diffs_by_type.keys()) - num_diff_types = len(diff_type_keys) - dt = 's' if num_diff_types == 0 or num_diff_types > 1 else '' - diff_type_content = "" - if num_diff_types > 0: - for k in diff_type_keys: - nice_type_key = k.lower().replace(' ', '') - diffs_this_type = self.diffs_by_type[k] - num_files_this_type = len(diffs_this_type) - dtt = 's' if num_diff_types == 0 or num_diff_types > 1 else '' - this_diff_type_list = "" - for idf in diffs_this_type: - this_diff_type_list += f"""{idf}\n""" - diff_type_content += f""" -
    -
    - -
    -
    -
      -{this_diff_type_list} -
    -
    -
    -
    -
    """ - - # set up runtime results table - run_time_rows_text = "" - sum_base_seconds = 0 - sum_branch_seconds = 0 - sorted_idf_keys = sorted(self.summary_results.keys()) - for idf in sorted_idf_keys: - summary = self.summary_results[idf] - case_1_success = summary.simulation_status_case1 == EndErrSummary.STATUS_SUCCESS - case_2_success = summary.simulation_status_case2 == EndErrSummary.STATUS_SUCCESS - if case_1_success: - base_time = summary.run_time_seconds_case1 - else: - base_time = "N/A" - if case_1_success: - branch_time = summary.run_time_seconds_case2 - else: - branch_time = "N/A" - if case_1_success and case_2_success: - sum_base_seconds += base_time - sum_branch_seconds += branch_time - - run_time_rows_text += f"""{idf}{base_time}{branch_time}""" - run_time_rows_text += f"""Runtime Total (Successes){sum_base_seconds:.1f}{sum_branch_seconds:.1f}""" - - return f""" - - - - - - - - - - -
    - -

    EnergyPlus Regressions

    - -
    -
    - - -
    -
    - -
    - -

    Summary by File

    - -
    -
    - -
    -
    -
      -{no_diff_content} -
    -
    -
    -
    -
    - -
    -
    - -
    -
    -
      -{diff_content} -
    -
    -
    -
    -
    - -
    - -

    Summary by Diff Type

    - -
    -
    - -
    -
    -
      -{diff_type_content} -
    -
    -
    -
    -
    - -
    - -

    Run Times

    - -
    -
    - -
    -
    - - - - - - -{run_time_rows_text} -
    FilenameBase Case Runtime (seconds)Branch Case Runtime (seconds)
    -
    -
    -
    -
    - -
    - - -""" - - def generate_markdown_summary(self, bundle_root: Path): - diff_lines = "" - for diff_type, idfs in self.diffs_by_type.items(): - diff_lines += f" - {diff_type}: {len(idfs)}\n" - content = f""" -
    - Regression Summary - -{diff_lines} -
    """ - (bundle_root / 'summary.md').write_text(content) - - def check_all_regressions(self, base_testfiles: Path, mod_testfiles: Path, bundle_root: Path) -> bool: - any_diffs = False - bundle_root.mkdir(exist_ok=True) - entries = sorted(base_testfiles.iterdir()) - for entry_num, baseline in enumerate(entries): - if not baseline.is_dir(): - continue - if baseline.name == 'CMakeFiles': # add more ignore dirs here - continue - modified = mod_testfiles / baseline.name - if not modified.exists(): - continue # TODO: Should we warn that it is missing? - entry, diffs = self.single_file_regressions(baseline, modified) - if diffs: - self.root_index_files_diffs.append(baseline.name) - any_diffs = True - potential_diff_files = baseline.glob("*.*.*") # TODO: Could try to get this from the regression tool - target_dir_for_this_file_diffs = bundle_root / baseline.name - if potential_diff_files: - if target_dir_for_this_file_diffs.exists(): - rmtree(target_dir_for_this_file_diffs) - target_dir_for_this_file_diffs.mkdir() - index_contents_this_file = "" - for potential_diff_file in potential_diff_files: - copy(potential_diff_file, target_dir_for_this_file_diffs) - diff_file_with_html = target_dir_for_this_file_diffs / (potential_diff_file.name + '.html') - if potential_diff_file.name.endswith('.htm'): - # already a html file, just upload the raw contents but renamed as ...htm.html - copy(potential_diff_file, diff_file_with_html) - else: - # it's not an HTML file, wrap it inside an HTML wrapper in a temp file and send it - contents = potential_diff_file.read_text() - wrapped_contents = self.single_diff_html(contents) - diff_file_with_html.write_text(wrapped_contents) - index_contents_this_file += self.regression_row_in_single_test_case_html(potential_diff_file.name) - index_file = target_dir_for_this_file_diffs / 'index.html' - index_this_file = self.single_test_case_html(index_contents_this_file) - index_file.write_text(index_this_file) - else: - self.root_index_files_no_diff.append(baseline.name) - so_far = ' Diffs! ' if any_diffs else 'No diffs' - if entry_num % 40 == 0: - print(f"On file #{entry_num}/{len(entries)} ({baseline.name}), Diff status so far: {so_far}") - meta_data = [ - f"Regression time stamp in UTC: {datetime.now(UTC)}", - f"Regression time stamp in Central Time: {datetime.now(ZoneInfo('America/Chicago'))}", - f"Number of input files evaluated: {self.num_idf_inspected}", - ] - bundle_root_index_file_path = bundle_root / 'index.html' - bundle_root_index_content = self.bundle_root_index_html(meta_data) - bundle_root_index_file_path.write_text(bundle_root_index_content) - print() - print(f"* Files with Diffs *:\n{"\n ".join(self.root_index_files_diffs)}\n") - print(f"* Diffs by File *:\n{json.dumps(self.diffs_by_idf, indent=2, sort_keys=True)}\n") - print(f"* Diffs by Type *:\n{json.dumps(self.diffs_by_type, indent=2, sort_keys=True)}\n") - if any_diffs: - self.generate_markdown_summary(bundle_root) - # print("::warning title=Regressions::Diffs Detected") - return any_diffs - - -if __name__ == "__main__": # pragma: no cover - testing function, not the __main__ entry point - - if len(sys.argv) != 4: - print("syntax: %s base_dir mod_dir regression_dir" % sys.argv[0]) - sys.exit(1) - arg_base_dir = Path(sys.argv[1]) - arg_mod_dir = Path(sys.argv[2]) - arg_regression_dir = Path(sys.argv[3]) - rm = RegressionManager() - response = rm.check_all_regressions(arg_base_dir, arg_mod_dir, arg_regression_dir) - sys.exit(1 if response else 0) diff --git a/src/EnergyPlus/DataSystemVariables.cc b/src/EnergyPlus/DataSystemVariables.cc index dd2100e87d6..def6ab83745 100644 --- a/src/EnergyPlus/DataSystemVariables.cc +++ b/src/EnergyPlus/DataSystemVariables.cc @@ -111,7 +111,17 @@ namespace DataSystemVariables { constexpr const char * cDisplayInputInAuditEnvVar("DISPLAYINPUTINAUDIT"); // environmental variable that enables the echoing of the input file into the audit file - constexpr const char *ciForceTimeStepEnvVar("CI_FORCE_TIME_STEP"); // environment var forcing 30 minute time steps on CI for efficiency + // DERIVED TYPE DEFINITIONS + // na + + // INTERFACE BLOCK SPECIFICATIONS + // na + + // MODULE VARIABLE DECLARATIONS: + + // Shading methods + + // Functions fs::path CheckForActualFilePath(EnergyPlusData &state, fs::path const &originalInputFilePath, // path (or filename only) as input for object @@ -301,9 +311,6 @@ namespace DataSystemVariables { get_environment_variable(cDisplayInputInAuditEnvVar, cEnvValue); if (!cEnvValue.empty()) state.dataGlobal->DisplayInputInAudit = env_var_on(cEnvValue); // Yes or True - - get_environment_variable(ciForceTimeStepEnvVar, cEnvValue); - if (!cEnvValue.empty()) state.dataSysVars->ciForceTimeStep = env_var_on(cEnvValue); // Yes or True } } // namespace DataSystemVariables diff --git a/src/EnergyPlus/DataSystemVariables.hh b/src/EnergyPlus/DataSystemVariables.hh index f1d57148038..5d4eedc1c54 100644 --- a/src/EnergyPlus/DataSystemVariables.hh +++ b/src/EnergyPlus/DataSystemVariables.hh @@ -150,7 +150,6 @@ struct SystemVarsData : BaseGlobalStruct int NumberIntRadThreads = 1; int iNominalTotSurfaces = 0; bool Threading = false; - bool ciForceTimeStep = false; void init_state([[maybe_unused]] EnergyPlusData &state) override { diff --git a/src/EnergyPlus/SimulationManager.cc b/src/EnergyPlus/SimulationManager.cc index 4f5808e0f65..f28b5f9843a 100644 --- a/src/EnergyPlus/SimulationManager.cc +++ b/src/EnergyPlus/SimulationManager.cc @@ -765,9 +765,6 @@ namespace SimulationManager { state.dataIPShortCut->cAlphaFieldNames, state.dataIPShortCut->cNumericFieldNames); state.dataGlobal->NumOfTimeStepInHour = Number(1); - if (state.dataSysVars->ciForceTimeStep) { - state.dataGlobal->NumOfTimeStepInHour = 2; // Force 30 minute time steps on CI - } if (state.dataGlobal->NumOfTimeStepInHour <= 0 || state.dataGlobal->NumOfTimeStepInHour > 60) { Alphas(1) = fmt::to_string(state.dataGlobal->NumOfTimeStepInHour); ShowWarningError(state, format("{}: Requested number ({}) invalid, Defaulted to 4", CurrentModuleObject, Alphas(1))); diff --git a/third_party/.gitignore b/third_party/.gitignore index f684023a3d0..e1afacd951f 100644 --- a/third_party/.gitignore +++ b/third_party/.gitignore @@ -32,7 +32,6 @@ btwxt/.gitmodules btwxt/vendor/fmt btwxt/vendor/courierr/vendor/fmt btwxt/vendor/courierr/.gitmodules -btwxt/vendor/courierr/vendor/googletest libtk205/.gitmodules # unused repo directories/files diff --git a/tst/EnergyPlus/unit/CommandLineInterface.unit.cc b/tst/EnergyPlus/unit/CommandLineInterface.unit.cc index 7fb32b7e246..d9d23566260 100644 --- a/tst/EnergyPlus/unit/CommandLineInterface.unit.cc +++ b/tst/EnergyPlus/unit/CommandLineInterface.unit.cc @@ -385,7 +385,7 @@ TEST_F(CommandLineInterfaceFixture, runReadVars) } } -TEST_F(CommandLineInterfaceFixture, DISABLED_numThread) +TEST_F(CommandLineInterfaceFixture, numThread) { struct TestCase { diff --git a/tst/EnergyPlus/unit/Timer.unit.cc b/tst/EnergyPlus/unit/Timer.unit.cc index ac1878fc137..6817974a17f 100644 --- a/tst/EnergyPlus/unit/Timer.unit.cc +++ b/tst/EnergyPlus/unit/Timer.unit.cc @@ -60,7 +60,7 @@ using namespace EnergyPlus; -TEST_F(EnergyPlusFixture, DISABLED_Timer_ticktock) +TEST_F(EnergyPlusFixture, Timer_ticktock) { constexpr std::chrono::milliseconds::rep sleep_time_ms = 100; @@ -70,7 +70,7 @@ TEST_F(EnergyPlusFixture, DISABLED_Timer_ticktock) std::this_thread::sleep_for(std::chrono::milliseconds(sleep_time_ms)); t.tock(); // In some occurrences CI is reporting slightly above than 100 values, probably system was quite busy at that time, - // but we don't want to have the test failing occasionally + // but we don't want to have the test failing occassionally EXPECT_GE(t.duration().count(), sleep_time_ms); EXPECT_LT(t.duration().count(), sleep_time_ms * 2); EXPECT_GE(t.elapsedSeconds(), sleep_time_s);