Skip to content

Dump header hash and slot no for events #14

Dump header hash and slot no for events

Dump header hash and slot no for events #14

Workflow file for this run

name: Haskell Linux CI
on:
push:
merge_group:
# DO NOT DELETE.
# This is required for nightly builds and is invoked by nightly-trigger.yml
# on a schedule trigger.
workflow_dispatch:
inputs:
reason:
description: 'Reason'
required: false
default: manual
tests:
description: 'Tests'
required: false
default: some
jobs:
linux_ci:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
ghc: ["8.10.7", "9.2.7"]
cabal: ["3.10.1.0"]
os: [ubuntu-latest]
env:
# Modify this value to "invalidate" the cabal cache.
CABAL_CACHE_VERSION: "2023-06-01"
# Modify this value to "invalidate" the secp cache.
SECP_CACHE_VERSION: "2022-12-30"
# current ref from: 27.02.2022
SECP256K1_REF: ac83be33d0956faf6b7f61a60ab524ef7d6a473a
concurrency:
group: >
a+${{ github.event_name }}
b+${{ github.workflow_ref }}
c+${{ github.job }}
d+${{ matrix.ghc }}
e+${{ matrix.cabal }}
f+${{ matrix.os }}
g+${{ (startsWith(github.ref, 'refs/heads/gh-readonly-queue/') && github.run_id) || github.event.pull_request.number || github.ref }}
cancel-in-progress: true
steps:
- name: Concurrency group
run: >
echo
a+${{ github.event_name }}
b+${{ github.workflow_ref }}
c+${{ github.job }}
d+${{ matrix.ghc }}
e+${{ matrix.cabal }}
f+${{ matrix.os }}
g+${{ (startsWith(github.ref, 'refs/heads/gh-readonly-queue/') && github.run_id) || github.event.pull_request.number || github.ref }}
- name: Workaround runner image issue
# https://github.com/actions/runner-images/issues/7061
run: sudo chown -R $USER /usr/local/.ghcup
- name: Install Haskell
uses: input-output-hk/setup-haskell@v1
id: setup-haskell
with:
ghc-version: ${{ matrix.ghc }}
cabal-version: ${{ matrix.cabal }}
- uses: actions/checkout@v3
- name: "[Bash] Add build script path"
run: echo "$(pwd)/.github/bin" >> $GITHUB_PATH
- name: "LINUX: Install build environment (apt-get)"
run: |
sudo apt-get update
sudo apt-get -y install libsodium23 libsodium-dev
sudo apt-get -y install libsystemd0 libsystemd-dev
sudo apt-get -y remove --purge software-properties-common
sudo apt-get -y autoremove
- name: "LINUX: Install build environment (for secp256k1)"
run: sudo apt-get -y install autoconf automake libtool
- name: Install secp256k1
uses: input-output-hk/setup-secp256k1@v1
with:
git-ref: ${{ env.SECP256K1_REF }}
cache-version: ${{ env.SECP_CACHE_VERSION }}
- name: Cabal update
run: cabal update
- name: Configure build
shell: bash
run: |
cp ".github/workflows/cabal.project.local.ci.$(uname -s)" cabal.project.local
echo "# cabal.project.local"
cat cabal.project.local
- name: Generate dist-newstyle/cache/plan.json
run: cabal build all --dry-run --minimize-conflict-set
# For users who fork cardano-node and want to define a writable cache, then can set up their own
# S3 bucket then define in their forked repository settings the following secrets:
#
# AWS_ACCESS_KEY_ID
# AWS_SECRET_ACCESS_KEY
# BINARY_CACHE_URI
# BINARY_CACHE_REGION
- name: Cabal cache over S3
uses: action-works/cabal-cache-s3@v1
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
with:
region: ${{ vars.BINARY_CACHE_REGION }}
dist-dir: dist-newstyle
store-path: ${{ steps.setup-haskell.outputs.cabal-store }}
threads: ${{ vars.BINARY_CACHE_THREADS }}
archive-uri: ${{ vars.BINARY_CACHE_URI }}/${{ env.CABAL_CACHE_VERSION }}/${{ runner.os }}/${{ matrix.cabal }}/${{ matrix.ghc }}
skip: "${{ vars.BINARY_CACHE_URI == '' }}"
# It's important to ensure that people who fork this repository can not only successfully build in
# CI by default, but also have meaningful cabal store caching.
#
# Because syncing with S3 requires credentials, we cannot rely on S3 for this. For this reason a
# https fallback is used. The https server mirrors the content of the S3 bucket. The https cabal
# store archive is read-only for security reasons.
#
# Users who fork this repository who want to have a writable cabal store archive are encouraged
# to set up their own S3 bucket.
- name: Cabal cache over HTTPS
uses: action-works/cabal-cache-s3@v1
with:
dist-dir: dist-newstyle
store-path: ${{ steps.setup-haskell.outputs.cabal-store }}
threads: ${{ vars.BINARY_CACHE_THREADS }}
archive-uri: https://iohk.cache.haskellworks.io/${{ env.CABAL_CACHE_VERSION }}/${{ runner.os }}/${{ matrix.cabal }}/${{ matrix.ghc }}
skip: "${{ vars.BINARY_CACHE_URI != '' }}"
enable-save: false
- name: Build remaining components
run: cabal build all
- name: Run tests
env:
TMPDIR: ${{ runner.temp }}
TMP: ${{ runner.temp }}
KEEP_WORKSPACE: 1
run: cabal test cardano-testnet cardano-node cardano-node-chairman cardano-cli cardano-submit-api
- name: Tar failed tests workspaces
if: ${{ failure() }}
env:
TMP: ${{ runner.temp }}
shell: bash
run: |
cd $TMP
find . -name 'module' -type f -exec dirname {} \; | xargs -L1 basename | sort -u | xargs tar -czvf workspaces.tgz
- name: Upload workspaces on tests failure
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: failed-test-workspaces-${{ matrix.os }}-ghc${{ matrix.ghc }}-cabal${{ matrix.cabal }}.tgz
path: ${{ runner.temp }}/workspaces.tgz
- name: "Tar artifacts"
shell: bash
run: |
mkdir -p artifacts
for exe in $(cat dist-newstyle/cache/plan.json | jq -r '."install-plan"[] | select(.style == "local" and (."component-name" | startswith("exe:"))) | ."bin-file"'); do
if [ -f $exe ]; then
echo "Including artifact $exe"
( cd artifacts
tar -C "$(dirname $exe)" -czf "$(basename $exe).tar.gz" "$(basename $exe)"
)
else
echo "Skipping artifact $exe"
fi
done
- name: Save Artifact
if: matrix.ghc == '8.10.7'
uses: actions/upload-artifact@v1
with:
name: artifacts-${{ matrix.os }}
path: ./artifacts
- name: Delete socket files in chairman tests in preparation for uploading artifacts
if: ${{ always() }}
shell: bash
run: |
if [ -d "${{ runner.temp }}/chairman" ]; then
find "${{ runner.temp }}/chairman" -type s -exec rm -f {} \;
fi
- uses: actions/upload-artifact@v2
if: ${{ always() }}
continue-on-error: true
with:
name: chairman-test-artifacts-${{ matrix.os }}-${{ matrix.ghc }}
path: ${{ runner.temp }}/chairman/
# Uncomment the following back in for debugging. Remember to launch a `pwsh` from
# the tmux session to debug `pwsh` issues. And be reminded that the `/msys2` and
# `/msys2/mingw64` paths are not in PATH by default for the workflow, but tmate
# will put them in.
# You may also want to run
#
# $env:PATH=("C:\Program Files\PowerShell\7;{0}" -f $env:ORIGINAL_PATH)
#
# to restore the original path. Do note that some test might need msys2
# and will silently fail if msys2 is not in path. See the "Run tests" step.
#
# - name: Setup tmate session
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3
# with:
# limit-access-to-actor: true
build-complete:
needs: [linux_ci]
runs-on: ubuntu-latest
steps:
- name: Build complete
run: echo 'Build complete'
release:
needs: [linux_ci]
if: ${{ startsWith(github.ref, 'refs/tags') }}
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Create Release Tag
id: create_release_tag
run: |
echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
draft: true
prerelease: false
- name: Download Artifact
uses: actions/download-artifact@v1
with:
name: artifacts-ubuntu-latest
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./artifacts-ubuntu-latest/cardano-submit-api.tar.gz
asset_name: cardano-submit-api_${{ steps.create_release_tag.outputs.TAG }}-linux.tar.gz
asset_content_type: application/gzip